file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
wavent.py | #!/usr/bin/env python
"""
WaveNets Audio Generation Model
How-to-run example:
sampleRNN$
THEANO_FLAGS=mode=FAST_RUN,device=gpu1,floatX=float32,lib.cnmem=.95 python models/one_tier/wavent.py --dim 64 --q_levels 256 --q_type linear --which_set MUSIC --batch_size 8 --wavenet_blocks 4 --dilation_layers_per_block 10 --sequence_len_to_train 1600
"""
import time
from datetime import datetime
print "Experiment started at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
exp_start = time.time()
import os, sys
sys.path.insert(1, os.getcwd())
import argparse
import numpy
numpy.random.seed(123)
np = numpy
import random
random.seed(123)
import theano
import theano.tensor as T
import theano.ifelse
import lasagne
import scipy.io.wavfile
import lib
### Parsing passed args/hyperparameters ###
def get_args():
def t_or_f(arg):
ua = str(arg).upper()
if 'TRUE'.startswith(ua):
return True
elif 'FALSE'.startswith(ua):
return False
else:
raise ValueError('Arg is neither `True` nor `False`')
def check_non_negative(value):
ivalue = int(value)
if ivalue < 0:
raise argparse.ArgumentTypeError("%s is not non-negative!" % value)
return ivalue
def check_positive(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is not positive!" % value)
return ivalue
def check_unit_interval(value):
fvalue = float(value)
if fvalue < 0 or fvalue > 1:
raise argparse.ArgumentTypeError("%s is not in [0, 1] interval!" % value)
return fvalue
# No default value here. Indicate every single arguement.
parser = argparse.ArgumentParser(
description='two_tier.py\nNo default value! Indicate every argument.')
# Hyperparameter arguements:
parser.add_argument('--exp', help='Experiment name',
type=str, required=False, default='_')
parser.add_argument('--dim', help='Dimension of RNN and MLPs',\
type=check_positive, required=True)
parser.add_argument('--q_levels', help='Number of bins for quantization of audio samples. Should be 256 for mu-law.',\
type=check_positive, required=True)
parser.add_argument('--q_type', help='Quantization in linear-scale, a-law-companding, or mu-law compandig. With mu-/a-law quantization level shoud be set as 256',\
choices=['linear', 'a-law', 'mu-law'], required=True)
#parser.add_argument('--nll_coeff', help='Value of alpha in [0, 1] for cost=alpha*NLL+(1-alpha)*FFT_cost',\
# type=check_unit_interval, required=True)
parser.add_argument('--which_set', help='ONOM, BLIZZ, or MUSIC',
choices=['ONOM', 'BLIZZ', 'MUSIC', 'HUCK'], required=True)
parser.add_argument('--batch_size', help='size of mini-batch',
type=check_positive, choices=[8, 16, 32, 64, 128, 256], required=True)
parser.add_argument('--wavenet_blocks', help='Number of wavnet blocks to use',
type=check_positive, required=True)
parser.add_argument('--dilation_layers_per_block', help='number of dilation layers per block',
type=check_positive, required=True)
parser.add_argument('--sequence_len_to_train', help='size of output map',
type=check_positive, required=True)
parser.add_argument('--debug', help='debug mode', required=False, default=False, action='store_true')
parser.add_argument('--resume', help='Resume the same model from the last checkpoint. Order of params are important. [for now]',\
required=False, default=False, action='store_true')
args = parser.parse_args()
# Create tag for this experiment based on passed args
tag = reduce(lambda a, b: a+b, sys.argv).replace('--resume', '').replace('/', '-').replace('--', '-').replace('True', 'T').replace('False', 'F')
print "Created experiment tag for these args:"
print tag
return args, tag
args, tag = get_args()
# N_FRAMES = args.n_frames # How many 'frames' to include in each truncated BPTT pass
OVERLAP = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1# How many samples per frame
#GLOBAL_NORM = args.global_norm
DIM = args.dim # Model dimensionality.
Q_LEVELS = args.q_levels # How many levels to use when discretizing samples. e.g. 256 = 8-bit scalar quantization
Q_TYPE = args.q_type # log- or linear-scale
#NLL_COEFF = args.nll_coeff
WHICH_SET = args.which_set
BATCH_SIZE = args.batch_size
#DATA_PATH = args.data_path
if Q_TYPE == 'mu-law' and Q_LEVELS != 256:
raise ValueError('For mu-law Quantization levels should be exactly 256!')
# Fixed hyperparams
GRAD_CLIP = 1 # Elementwise grad clip threshold
BITRATE = 16000
# Other constants
#TRAIN_MODE = 'iters' # To use PRINT_ITERS and STOP_ITERS
TRAIN_MODE = 'time' # To use PRINT_TIME and STOP_TIME
#TRAIN_MODE = 'time-iters'
# To use PRINT_TIME for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
#TRAIN_MODE = 'iters-time'
# To use PRINT_ITERS for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
PRINT_ITERS = 10000 # Print cost, generate samples, save model checkpoint every N iterations.
STOP_ITERS = 100000 # Stop after this many iterations
PRINT_TIME = 90*60 # Print cost, generate samples, save model checkpoint every N seconds.
STOP_TIME = 60*60*60 # Stop after this many seconds of actual training (not including time req'd to generate samples etc.)
N_SEQS = 10 # Number of samples to generate every time monitoring.
FOLDER_PREFIX = os.path.join('results_wavenets', tag)
SEQ_LEN = args.sequence_len_to_train # Total length (# of samples) of each truncated BPTT sequence
Q_ZERO = numpy.int32(Q_LEVELS//2) # Discrete value correponding to zero amplitude
LEARNING_RATE = lib.floatX(numpy.float32(0.0001))
RESUME = args.resume
epoch_str = 'epoch'
iter_str = 'iter'
lowest_valid_str = 'lowest valid cost'
corresp_test_str = 'correponding test cost'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
if args.debug:
import warnings
warnings.warn('----------RUNNING IN DEBUG MODE----------')
TRAIN_MODE = 'time-iters'
PRINT_TIME = 100
STOP_TIME = 300
STOP_ITERS = 1000
### Create directories ###
# FOLDER_PREFIX: root, contains:
# log.txt, __note.txt, train_log.pkl, train_log.png [, model_settings.txt]
# FOLDER_PREFIX/params: saves all checkpoint params as pkl
# FOLDER_PREFIX/samples: keeps all checkpoint samples as wav
# FOLDER_PREFIX/best: keeps the best parameters, samples, ...
if not os.path.exists(FOLDER_PREFIX):
os.makedirs(FOLDER_PREFIX)
PARAMS_PATH = os.path.join(FOLDER_PREFIX, 'params')
if not os.path.exists(PARAMS_PATH):
os.makedirs(PARAMS_PATH)
SAMPLES_PATH = os.path.join(FOLDER_PREFIX, 'samples')
if not os.path.exists(SAMPLES_PATH):
os.makedirs(SAMPLES_PATH)
BEST_PATH = os.path.join(FOLDER_PREFIX, 'best')
if not os.path.exists(BEST_PATH):
os.makedirs(BEST_PATH)
lib.print_model_settings(locals(), path=FOLDER_PREFIX, sys_arg=True)
### Creating computation graph ###
def create_wavenet_block(inp, num_dilation_layer, input_dim, output_dim, name =None):
assert name is not None
layer_out = inp
skip_contrib = []
skip_weights = lib.param(name+".parametrized_weights", lib.floatX(numpy.ones((num_dilation_layer,))))
for i in range(num_dilation_layer):
layer_out, skip_c = lib.ops.dil_conv_1D(
layer_out,
output_dim,
input_dim if i == 0 else output_dim,
2,
dilation = 2**i,
non_linearity = 'gated',
name = name+".dilation_{}".format(i+1)
)
skip_c = skip_c*skip_weights[i]
skip_contrib.append(skip_c)
skip_out = skip_contrib[-1]
j = 0
for i in range(num_dilation_layer-1):
j += 2**(num_dilation_layer-i-1)
skip_out = skip_out + skip_contrib[num_dilation_layer-2 - i][:,j:]
return layer_out, skip_out
def create_model(inp):
out = (inp.astype(theano.config.floatX)/lib.floatX(Q_LEVELS-1) - lib.floatX(0.5))
l_out = out.dimshuffle(0,1,'x')
skips = []
for i in range(args.wavenet_blocks):
l_out, skip_out = create_wavenet_block(l_out, args.dilation_layers_per_block, 1 if i == 0 else args.dim, args.dim, name = "block_{}".format(i+1))
skips.append(skip_out)
out = skips[-1]
for i in range(args.wavenet_blocks - 1):
out = out + skips[args.wavenet_blocks - 2 - i][:,(2**args.dilation_layers_per_block - 1)*(i+1):]
for i in range(3):
out = lib.ops.conv1d("out_{}".format(i+1), out, args.dim, args.dim, 1, non_linearity='relu')
out = lib.ops.conv1d("final", out, args.dim, args.q_levels, 1, non_linearity='identity')
return out
sequences = T.imatrix('sequences')
h0 = T.tensor3('h0')
reset = T.iscalar('reset')
mask = T.matrix('mask')
if args.debug:
# Solely for debugging purposes.
# Maybe I should set the compute_test_value=warn from here.
sequences.tag.test_value = numpy.zeros((BATCH_SIZE, SEQ_LEN), dtype='int32')
input_sequences = sequences[:, :-1]
target_sequences = sequences[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
target_mask = mask[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
output = create_model(input_sequences)
cost = T.nnet.categorical_crossentropy(
T.nnet.softmax(output.reshape((-1, Q_LEVELS))),
target_sequences.flatten()
)
cost = cost.reshape(target_sequences.shape)
cost = cost * target_mask
# Don't use these lines; could end up with NaN
# Specially at the end of audio files where mask is
# all zero for some of the shorter files in mini-batch.
#cost = cost.sum(axis=1) / target_mask.sum(axis=1)
#cost = cost.mean(axis=0)
# Use this one instead.
cost = cost.sum()
cost = cost / target_mask.sum()
# By default we report cross-entropy cost in bits.
# Switch to nats by commenting out this line:
# log_2(e) = 1.44269504089
cost = cost * lib.floatX(numpy.log2(numpy.e))
### Getting the params, grads, updates, and Theano functions ###
params = lib.get_params(cost, lambda x: hasattr(x, 'param') and x.param==True)
lib.print_params_info(params, path=FOLDER_PREFIX)
grads = T.grad(cost, wrt=params, disconnected_inputs='warn')
grads = [T.clip(g, lib.floatX(-GRAD_CLIP), lib.floatX(GRAD_CLIP)) for g in grads]
updates = lasagne.updates.adam(grads, params, learning_rate=LEARNING_RATE)
# Training function
train_fn = theano.function(
[sequences, mask],
cost,
updates=updates,
on_unused_input='warn'
)
# Validation and Test function
test_fn = theano.function(
[sequences, mask],
cost,
on_unused_input='warn'
)
# Sampling at frame level
generate_fn = theano.function(
[sequences],
lib.ops.softmax_and_sample(output),
on_unused_input='warn'
)
def generate_and_save_samples(tag):
def write_audio_file(name, data):
data = data.astype('float32')
data -= data.min()
data /= data.max()
data -= 0.5
data *= 0.95
scipy.io.wavfile.write(
os.path.join(SAMPLES_PATH, name+'.wav'),
BITRATE,
data)
total_time = time.time()
# Generate N_SEQS' sample files, each 5 seconds long
N_SECS = 5
LENGTH = N_SECS*BITRATE
if args.debug:
LENGTH = 1024
num_prev_samples_to_use = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1
samples = numpy.zeros((N_SEQS, LENGTH + num_prev_samples_to_use), dtype='int32')
samples[:, :num_prev_samples_to_use] = Q_ZERO
for t in range(LENGTH):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = generate_fn(samples[:, t:t + num_prev_samples_to_use+1])
if (t > 2*BITRATE) and( t < 3*BITRATE):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = Q_ZERO
total_time = time.time() - total_time
log = "{} samples of {} seconds length generated in {} seconds."
log = log.format(N_SEQS, N_SECS, total_time)
print log,
for i in xrange(N_SEQS):
samp = samples[i, num_prev_samples_to_use: ]
if Q_TYPE == 'mu-law':
from datasets.dataset import mu2linear
samp = mu2linear(samp)
elif Q_TYPE == 'a-law':
raise NotImplementedError('a-law is not implemented')
write_audio_file("sample_{}_{}".format(tag, i), samp)
### Import the data_feeder ###
# Handling WHICH_SET
if WHICH_SET == 'ONOM':
from datasets.dataset import onom_train_feed_epoch as train_feeder
from datasets.dataset import onom_valid_feed_epoch as valid_feeder
from datasets.dataset import onom_test_feed_epoch as test_feeder
elif WHICH_SET == 'BLIZZ':
from datasets.dataset import blizz_train_feed_epoch as train_feeder
from datasets.dataset import blizz_valid_feed_epoch as valid_feeder
from datasets.dataset import blizz_test_feed_epoch as test_feeder
elif WHICH_SET == 'MUSIC':
from datasets.dataset import music_train_feed_epoch as train_feeder
from datasets.dataset import music_valid_feed_epoch as valid_feeder
from datasets.dataset import music_test_feed_epoch as test_feeder
elif WHICH_SET == 'HUCK':
from datasets.dataset import huck_train_feed_epoch as train_feeder
from datasets.dataset import huck_valid_feed_epoch as valid_feeder
from datasets.dataset import huck_test_feed_epoch as test_feeder
def monitor(data_feeder):
"""
Cost and time of test_fn on a given dataset section.
Pass only one of `valid_feeder` or `test_feeder`.
Don't pass `train_feed`.
:returns:
Mean cost over the input dataset (data_feeder)
Total time spent
"""
_total_time = 0.
_costs = []
_data_feeder = data_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
for _seqs, _reset, _mask in _data_feeder:
_start_time = time.time()
_cost = test_fn(_seqs, _mask)
_total_time += time.time() - _start_time
_costs.append(_cost)
return numpy.mean(_costs), _total_time
print "Wall clock time spent before training started: {:.2f}h"\
.format((time.time()-exp_start)/3600.)
print "Training!"
total_iters = 0
total_time = 0.
last_print_time = 0.
last_print_iters = 0
costs = []
lowest_valid_cost = numpy.finfo(numpy.float32).max
corresponding_test_cost = numpy.finfo(numpy.float32).max
new_lowest_cost = False
end_of_batch = False
epoch = 0 # Important for mostly other datasets rather than Blizz
# Initial load train dataset
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
if RESUME:
# Check if checkpoint from previous run is not corrupted.
# Then overwrite some of the variables above.
iters_to_consume, res_path, epoch, total_iters,\
[lowest_valid_cost, corresponding_test_cost, test_cost] = \
lib.resumable(path=FOLDER_PREFIX,
iter_key=iter_str,
epoch_key=epoch_str,
add_resume_counter=True,
other_keys=[lowest_valid_str,
corresp_test_str,
test_nll_str])
# At this point we saved the pkl file.
last_print_iters = total_iters
print "### RESUMING JOB FROM EPOCH {}, ITER {}".format(epoch, total_iters)
# Consumes this much iters to get to the last point in training data.
consume_time = time.time()
for i in xrange(iters_to_consume):
tr_feeder.next()
consume_time = time.time() - consume_time
print "Train data ready in {:.2f}secs after consuming {} minibatches.".\
format(consume_time, iters_to_consume)
lib.load_params(res_path)
print "Parameters from last available checkpoint loaded from path {}".format(res_path)
test_time = 0.0
while True:
# THIS IS ONE ITERATION
| if total_iters % 500 == 0:
print total_iters,
total_iters += 1
try:
# Take as many mini-batches as possible from train set
mini_batch = tr_feeder.next()
except StopIteration:
# Mini-batches are finished. Load it again.
# Basically, one epoch.
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
# and start taking new mini-batches again.
mini_batch = tr_feeder.next()
epoch += 1
end_of_batch = True
print "[Another epoch]",
seqs, reset, mask = mini_batch
##Remove this
# print seqs.shape
# targ = generate_fn(seqs)
# print targ.shape
#####
start_time = time.time()
cost = train_fn(seqs, mask)
total_time += time.time() - start_time
#print "This cost:", cost, "This h0.mean()", h0.mean()
costs.append(cost)
if (TRAIN_MODE=='iters' and total_iters-last_print_iters == PRINT_ITERS) or \
(TRAIN_MODE=='time' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='time-iters' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='iters-time' and total_iters-last_print_iters >= PRINT_ITERS) or \
end_of_batch:
print "\nValidation!",
valid_cost, valid_time = monitor(valid_feeder)
print "Done!"
# Only when the validation cost is improved get the cost for test set.
if valid_cost < lowest_valid_cost:
lowest_valid_cost = valid_cost
print "\n>>> Best validation cost of {} reached. Testing!"\
.format(valid_cost),
test_cost, test_time = monitor(test_feeder)
print "Done!"
# Report last one which is the lowest on validation set:
print ">>> test cost:{}\ttotal time:{}".format(test_cost, test_time)
corresponding_test_cost = test_cost
new_lowest_cost = True
# Stdout the training progress
print_info = "epoch:{}\ttotal iters:{}\twall clock time:{:.2f}h\n"
print_info += ">>> Lowest valid cost:{}\t Corresponding test cost:{}\n"
print_info += "\ttrain cost:{:.4f}\ttotal time:{:.2f}h\tper iter:{:.3f}s\n"
print_info += "\tvalid cost:{:.4f}\ttotal time:{:.2f}h\n"
print_info += "\ttest cost:{:.4f}\ttotal time:{:.2f}h"
print_info = print_info.format(epoch,
total_iters,
(time.time()-exp_start)/3600,
lowest_valid_cost,
corresponding_test_cost,
numpy.mean(costs),
total_time/3600,
total_time/total_iters,
valid_cost,
valid_time/3600,
test_cost,
test_time/3600)
print print_info
# Save and graph training progress
x_axis_str = 'iter'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
training_info = {'epoch' : epoch,
x_axis_str : total_iters,
train_nll_str : numpy.mean(costs),
valid_nll_str : valid_cost,
test_nll_str : test_cost,
'lowest valid cost' : lowest_valid_cost,
'correponding test cost' : corresponding_test_cost,
'train time' : total_time,
'valid time' : valid_time,
'test time' : test_time,
'wall clock time' : time.time()-exp_start}
lib.save_training_info(training_info, FOLDER_PREFIX)
print "Train info saved!",
y_axis_strs = [train_nll_str, valid_nll_str, test_nll_str]
lib.plot_traing_info(x_axis_str, y_axis_strs, FOLDER_PREFIX)
print "Plotted!"
# Generate and save samples
print "Sampling!",
tag = "e{}_i{}_t{:.2f}_tr{:.4f}_v{:.4f}"
tag = tag.format(epoch,
total_iters,
total_time/3600,
numpy.mean(cost),
valid_cost)
tag += ("_best" if new_lowest_cost else "")
# Generate samples
generate_and_save_samples(tag)
print "Done!"
# Save params of model
lib.save_params(
os.path.join(PARAMS_PATH, 'params_{}.pkl'.format(tag))
)
print "Params saved!"
if total_iters-last_print_iters == PRINT_ITERS \
or total_time-last_print_time >= PRINT_TIME:
# If we are here b/c of onom_end_of_batch, we shouldn't mess
# with costs and last_print_iters
costs = []
last_print_time += PRINT_TIME
last_print_iters += PRINT_ITERS
end_of_batch = False
new_lowest_cost = False
print "Validation Done!\nBack to Training..."
if (TRAIN_MODE=='iters' and total_iters == STOP_ITERS) or \
(TRAIN_MODE=='time' and total_time >= STOP_TIME) or \
((TRAIN_MODE=='time-iters' or TRAIN_MODE=='iters-time') and \
(total_iters == STOP_ITERS or total_time >= STOP_TIME)):
print "Done! Total iters:", total_iters, "Total time: ", total_time
print "Experiment ended at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
print "Wall clock time spent: {:.2f}h"\
.format((time.time()-exp_start)/3600)
sys.exit() | conditional_block |
|
wavent.py | #!/usr/bin/env python
"""
WaveNets Audio Generation Model
How-to-run example:
sampleRNN$
THEANO_FLAGS=mode=FAST_RUN,device=gpu1,floatX=float32,lib.cnmem=.95 python models/one_tier/wavent.py --dim 64 --q_levels 256 --q_type linear --which_set MUSIC --batch_size 8 --wavenet_blocks 4 --dilation_layers_per_block 10 --sequence_len_to_train 1600
"""
import time
from datetime import datetime
print "Experiment started at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
exp_start = time.time()
import os, sys
sys.path.insert(1, os.getcwd())
import argparse
import numpy
numpy.random.seed(123)
np = numpy
import random
random.seed(123)
import theano
import theano.tensor as T
import theano.ifelse
import lasagne
import scipy.io.wavfile
import lib
### Parsing passed args/hyperparameters ###
def get_args():
def t_or_f(arg):
ua = str(arg).upper()
if 'TRUE'.startswith(ua):
return True
elif 'FALSE'.startswith(ua):
return False
else:
raise ValueError('Arg is neither `True` nor `False`')
def check_non_negative(value):
ivalue = int(value)
if ivalue < 0:
raise argparse.ArgumentTypeError("%s is not non-negative!" % value)
return ivalue
def check_positive(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is not positive!" % value)
return ivalue
def check_unit_interval(value):
|
# No default value here. Indicate every single arguement.
parser = argparse.ArgumentParser(
description='two_tier.py\nNo default value! Indicate every argument.')
# Hyperparameter arguements:
parser.add_argument('--exp', help='Experiment name',
type=str, required=False, default='_')
parser.add_argument('--dim', help='Dimension of RNN and MLPs',\
type=check_positive, required=True)
parser.add_argument('--q_levels', help='Number of bins for quantization of audio samples. Should be 256 for mu-law.',\
type=check_positive, required=True)
parser.add_argument('--q_type', help='Quantization in linear-scale, a-law-companding, or mu-law compandig. With mu-/a-law quantization level shoud be set as 256',\
choices=['linear', 'a-law', 'mu-law'], required=True)
#parser.add_argument('--nll_coeff', help='Value of alpha in [0, 1] for cost=alpha*NLL+(1-alpha)*FFT_cost',\
# type=check_unit_interval, required=True)
parser.add_argument('--which_set', help='ONOM, BLIZZ, or MUSIC',
choices=['ONOM', 'BLIZZ', 'MUSIC', 'HUCK'], required=True)
parser.add_argument('--batch_size', help='size of mini-batch',
type=check_positive, choices=[8, 16, 32, 64, 128, 256], required=True)
parser.add_argument('--wavenet_blocks', help='Number of wavnet blocks to use',
type=check_positive, required=True)
parser.add_argument('--dilation_layers_per_block', help='number of dilation layers per block',
type=check_positive, required=True)
parser.add_argument('--sequence_len_to_train', help='size of output map',
type=check_positive, required=True)
parser.add_argument('--debug', help='debug mode', required=False, default=False, action='store_true')
parser.add_argument('--resume', help='Resume the same model from the last checkpoint. Order of params are important. [for now]',\
required=False, default=False, action='store_true')
args = parser.parse_args()
# Create tag for this experiment based on passed args
tag = reduce(lambda a, b: a+b, sys.argv).replace('--resume', '').replace('/', '-').replace('--', '-').replace('True', 'T').replace('False', 'F')
print "Created experiment tag for these args:"
print tag
return args, tag
args, tag = get_args()
# N_FRAMES = args.n_frames # How many 'frames' to include in each truncated BPTT pass
OVERLAP = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1# How many samples per frame
#GLOBAL_NORM = args.global_norm
DIM = args.dim # Model dimensionality.
Q_LEVELS = args.q_levels # How many levels to use when discretizing samples. e.g. 256 = 8-bit scalar quantization
Q_TYPE = args.q_type # log- or linear-scale
#NLL_COEFF = args.nll_coeff
WHICH_SET = args.which_set
BATCH_SIZE = args.batch_size
#DATA_PATH = args.data_path
if Q_TYPE == 'mu-law' and Q_LEVELS != 256:
raise ValueError('For mu-law Quantization levels should be exactly 256!')
# Fixed hyperparams
GRAD_CLIP = 1 # Elementwise grad clip threshold
BITRATE = 16000
# Other constants
#TRAIN_MODE = 'iters' # To use PRINT_ITERS and STOP_ITERS
TRAIN_MODE = 'time' # To use PRINT_TIME and STOP_TIME
#TRAIN_MODE = 'time-iters'
# To use PRINT_TIME for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
#TRAIN_MODE = 'iters-time'
# To use PRINT_ITERS for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
PRINT_ITERS = 10000 # Print cost, generate samples, save model checkpoint every N iterations.
STOP_ITERS = 100000 # Stop after this many iterations
PRINT_TIME = 90*60 # Print cost, generate samples, save model checkpoint every N seconds.
STOP_TIME = 60*60*60 # Stop after this many seconds of actual training (not including time req'd to generate samples etc.)
N_SEQS = 10 # Number of samples to generate every time monitoring.
FOLDER_PREFIX = os.path.join('results_wavenets', tag)
SEQ_LEN = args.sequence_len_to_train # Total length (# of samples) of each truncated BPTT sequence
Q_ZERO = numpy.int32(Q_LEVELS//2) # Discrete value correponding to zero amplitude
LEARNING_RATE = lib.floatX(numpy.float32(0.0001))
RESUME = args.resume
epoch_str = 'epoch'
iter_str = 'iter'
lowest_valid_str = 'lowest valid cost'
corresp_test_str = 'correponding test cost'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
if args.debug:
import warnings
warnings.warn('----------RUNNING IN DEBUG MODE----------')
TRAIN_MODE = 'time-iters'
PRINT_TIME = 100
STOP_TIME = 300
STOP_ITERS = 1000
### Create directories ###
# FOLDER_PREFIX: root, contains:
# log.txt, __note.txt, train_log.pkl, train_log.png [, model_settings.txt]
# FOLDER_PREFIX/params: saves all checkpoint params as pkl
# FOLDER_PREFIX/samples: keeps all checkpoint samples as wav
# FOLDER_PREFIX/best: keeps the best parameters, samples, ...
if not os.path.exists(FOLDER_PREFIX):
os.makedirs(FOLDER_PREFIX)
PARAMS_PATH = os.path.join(FOLDER_PREFIX, 'params')
if not os.path.exists(PARAMS_PATH):
os.makedirs(PARAMS_PATH)
SAMPLES_PATH = os.path.join(FOLDER_PREFIX, 'samples')
if not os.path.exists(SAMPLES_PATH):
os.makedirs(SAMPLES_PATH)
BEST_PATH = os.path.join(FOLDER_PREFIX, 'best')
if not os.path.exists(BEST_PATH):
os.makedirs(BEST_PATH)
lib.print_model_settings(locals(), path=FOLDER_PREFIX, sys_arg=True)
### Creating computation graph ###
def create_wavenet_block(inp, num_dilation_layer, input_dim, output_dim, name =None):
assert name is not None
layer_out = inp
skip_contrib = []
skip_weights = lib.param(name+".parametrized_weights", lib.floatX(numpy.ones((num_dilation_layer,))))
for i in range(num_dilation_layer):
layer_out, skip_c = lib.ops.dil_conv_1D(
layer_out,
output_dim,
input_dim if i == 0 else output_dim,
2,
dilation = 2**i,
non_linearity = 'gated',
name = name+".dilation_{}".format(i+1)
)
skip_c = skip_c*skip_weights[i]
skip_contrib.append(skip_c)
skip_out = skip_contrib[-1]
j = 0
for i in range(num_dilation_layer-1):
j += 2**(num_dilation_layer-i-1)
skip_out = skip_out + skip_contrib[num_dilation_layer-2 - i][:,j:]
return layer_out, skip_out
def create_model(inp):
out = (inp.astype(theano.config.floatX)/lib.floatX(Q_LEVELS-1) - lib.floatX(0.5))
l_out = out.dimshuffle(0,1,'x')
skips = []
for i in range(args.wavenet_blocks):
l_out, skip_out = create_wavenet_block(l_out, args.dilation_layers_per_block, 1 if i == 0 else args.dim, args.dim, name = "block_{}".format(i+1))
skips.append(skip_out)
out = skips[-1]
for i in range(args.wavenet_blocks - 1):
out = out + skips[args.wavenet_blocks - 2 - i][:,(2**args.dilation_layers_per_block - 1)*(i+1):]
for i in range(3):
out = lib.ops.conv1d("out_{}".format(i+1), out, args.dim, args.dim, 1, non_linearity='relu')
out = lib.ops.conv1d("final", out, args.dim, args.q_levels, 1, non_linearity='identity')
return out
sequences = T.imatrix('sequences')
h0 = T.tensor3('h0')
reset = T.iscalar('reset')
mask = T.matrix('mask')
if args.debug:
# Solely for debugging purposes.
# Maybe I should set the compute_test_value=warn from here.
sequences.tag.test_value = numpy.zeros((BATCH_SIZE, SEQ_LEN), dtype='int32')
input_sequences = sequences[:, :-1]
target_sequences = sequences[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
target_mask = mask[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
output = create_model(input_sequences)
cost = T.nnet.categorical_crossentropy(
T.nnet.softmax(output.reshape((-1, Q_LEVELS))),
target_sequences.flatten()
)
cost = cost.reshape(target_sequences.shape)
cost = cost * target_mask
# Don't use these lines; could end up with NaN
# Specially at the end of audio files where mask is
# all zero for some of the shorter files in mini-batch.
#cost = cost.sum(axis=1) / target_mask.sum(axis=1)
#cost = cost.mean(axis=0)
# Use this one instead.
cost = cost.sum()
cost = cost / target_mask.sum()
# By default we report cross-entropy cost in bits.
# Switch to nats by commenting out this line:
# log_2(e) = 1.44269504089
cost = cost * lib.floatX(numpy.log2(numpy.e))
### Getting the params, grads, updates, and Theano functions ###
params = lib.get_params(cost, lambda x: hasattr(x, 'param') and x.param==True)
lib.print_params_info(params, path=FOLDER_PREFIX)
grads = T.grad(cost, wrt=params, disconnected_inputs='warn')
grads = [T.clip(g, lib.floatX(-GRAD_CLIP), lib.floatX(GRAD_CLIP)) for g in grads]
updates = lasagne.updates.adam(grads, params, learning_rate=LEARNING_RATE)
# Training function
train_fn = theano.function(
[sequences, mask],
cost,
updates=updates,
on_unused_input='warn'
)
# Validation and Test function
test_fn = theano.function(
[sequences, mask],
cost,
on_unused_input='warn'
)
# Sampling at frame level
generate_fn = theano.function(
[sequences],
lib.ops.softmax_and_sample(output),
on_unused_input='warn'
)
def generate_and_save_samples(tag):
def write_audio_file(name, data):
data = data.astype('float32')
data -= data.min()
data /= data.max()
data -= 0.5
data *= 0.95
scipy.io.wavfile.write(
os.path.join(SAMPLES_PATH, name+'.wav'),
BITRATE,
data)
total_time = time.time()
# Generate N_SEQS' sample files, each 5 seconds long
N_SECS = 5
LENGTH = N_SECS*BITRATE
if args.debug:
LENGTH = 1024
num_prev_samples_to_use = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1
samples = numpy.zeros((N_SEQS, LENGTH + num_prev_samples_to_use), dtype='int32')
samples[:, :num_prev_samples_to_use] = Q_ZERO
for t in range(LENGTH):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = generate_fn(samples[:, t:t + num_prev_samples_to_use+1])
if (t > 2*BITRATE) and( t < 3*BITRATE):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = Q_ZERO
total_time = time.time() - total_time
log = "{} samples of {} seconds length generated in {} seconds."
log = log.format(N_SEQS, N_SECS, total_time)
print log,
for i in xrange(N_SEQS):
samp = samples[i, num_prev_samples_to_use: ]
if Q_TYPE == 'mu-law':
from datasets.dataset import mu2linear
samp = mu2linear(samp)
elif Q_TYPE == 'a-law':
raise NotImplementedError('a-law is not implemented')
write_audio_file("sample_{}_{}".format(tag, i), samp)
### Import the data_feeder ###
# Handling WHICH_SET
if WHICH_SET == 'ONOM':
from datasets.dataset import onom_train_feed_epoch as train_feeder
from datasets.dataset import onom_valid_feed_epoch as valid_feeder
from datasets.dataset import onom_test_feed_epoch as test_feeder
elif WHICH_SET == 'BLIZZ':
from datasets.dataset import blizz_train_feed_epoch as train_feeder
from datasets.dataset import blizz_valid_feed_epoch as valid_feeder
from datasets.dataset import blizz_test_feed_epoch as test_feeder
elif WHICH_SET == 'MUSIC':
from datasets.dataset import music_train_feed_epoch as train_feeder
from datasets.dataset import music_valid_feed_epoch as valid_feeder
from datasets.dataset import music_test_feed_epoch as test_feeder
elif WHICH_SET == 'HUCK':
from datasets.dataset import huck_train_feed_epoch as train_feeder
from datasets.dataset import huck_valid_feed_epoch as valid_feeder
from datasets.dataset import huck_test_feed_epoch as test_feeder
def monitor(data_feeder):
"""
Cost and time of test_fn on a given dataset section.
Pass only one of `valid_feeder` or `test_feeder`.
Don't pass `train_feed`.
:returns:
Mean cost over the input dataset (data_feeder)
Total time spent
"""
_total_time = 0.
_costs = []
_data_feeder = data_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
for _seqs, _reset, _mask in _data_feeder:
_start_time = time.time()
_cost = test_fn(_seqs, _mask)
_total_time += time.time() - _start_time
_costs.append(_cost)
return numpy.mean(_costs), _total_time
print "Wall clock time spent before training started: {:.2f}h"\
.format((time.time()-exp_start)/3600.)
print "Training!"
total_iters = 0
total_time = 0.
last_print_time = 0.
last_print_iters = 0
costs = []
lowest_valid_cost = numpy.finfo(numpy.float32).max
corresponding_test_cost = numpy.finfo(numpy.float32).max
new_lowest_cost = False
end_of_batch = False
epoch = 0 # Important for mostly other datasets rather than Blizz
# Initial load train dataset
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
if RESUME:
# Check if checkpoint from previous run is not corrupted.
# Then overwrite some of the variables above.
iters_to_consume, res_path, epoch, total_iters,\
[lowest_valid_cost, corresponding_test_cost, test_cost] = \
lib.resumable(path=FOLDER_PREFIX,
iter_key=iter_str,
epoch_key=epoch_str,
add_resume_counter=True,
other_keys=[lowest_valid_str,
corresp_test_str,
test_nll_str])
# At this point we saved the pkl file.
last_print_iters = total_iters
print "### RESUMING JOB FROM EPOCH {}, ITER {}".format(epoch, total_iters)
# Consumes this much iters to get to the last point in training data.
consume_time = time.time()
for i in xrange(iters_to_consume):
tr_feeder.next()
consume_time = time.time() - consume_time
print "Train data ready in {:.2f}secs after consuming {} minibatches.".\
format(consume_time, iters_to_consume)
lib.load_params(res_path)
print "Parameters from last available checkpoint loaded from path {}".format(res_path)
test_time = 0.0
while True:
# THIS IS ONE ITERATION
if total_iters % 500 == 0:
print total_iters,
total_iters += 1
try:
# Take as many mini-batches as possible from train set
mini_batch = tr_feeder.next()
except StopIteration:
# Mini-batches are finished. Load it again.
# Basically, one epoch.
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
# and start taking new mini-batches again.
mini_batch = tr_feeder.next()
epoch += 1
end_of_batch = True
print "[Another epoch]",
seqs, reset, mask = mini_batch
##Remove this
# print seqs.shape
# targ = generate_fn(seqs)
# print targ.shape
#####
start_time = time.time()
cost = train_fn(seqs, mask)
total_time += time.time() - start_time
#print "This cost:", cost, "This h0.mean()", h0.mean()
costs.append(cost)
if (TRAIN_MODE=='iters' and total_iters-last_print_iters == PRINT_ITERS) or \
(TRAIN_MODE=='time' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='time-iters' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='iters-time' and total_iters-last_print_iters >= PRINT_ITERS) or \
end_of_batch:
print "\nValidation!",
valid_cost, valid_time = monitor(valid_feeder)
print "Done!"
# Only when the validation cost is improved get the cost for test set.
if valid_cost < lowest_valid_cost:
lowest_valid_cost = valid_cost
print "\n>>> Best validation cost of {} reached. Testing!"\
.format(valid_cost),
test_cost, test_time = monitor(test_feeder)
print "Done!"
# Report last one which is the lowest on validation set:
print ">>> test cost:{}\ttotal time:{}".format(test_cost, test_time)
corresponding_test_cost = test_cost
new_lowest_cost = True
# Stdout the training progress
print_info = "epoch:{}\ttotal iters:{}\twall clock time:{:.2f}h\n"
print_info += ">>> Lowest valid cost:{}\t Corresponding test cost:{}\n"
print_info += "\ttrain cost:{:.4f}\ttotal time:{:.2f}h\tper iter:{:.3f}s\n"
print_info += "\tvalid cost:{:.4f}\ttotal time:{:.2f}h\n"
print_info += "\ttest cost:{:.4f}\ttotal time:{:.2f}h"
print_info = print_info.format(epoch,
total_iters,
(time.time()-exp_start)/3600,
lowest_valid_cost,
corresponding_test_cost,
numpy.mean(costs),
total_time/3600,
total_time/total_iters,
valid_cost,
valid_time/3600,
test_cost,
test_time/3600)
print print_info
# Save and graph training progress
x_axis_str = 'iter'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
training_info = {'epoch' : epoch,
x_axis_str : total_iters,
train_nll_str : numpy.mean(costs),
valid_nll_str : valid_cost,
test_nll_str : test_cost,
'lowest valid cost' : lowest_valid_cost,
'correponding test cost' : corresponding_test_cost,
'train time' : total_time,
'valid time' : valid_time,
'test time' : test_time,
'wall clock time' : time.time()-exp_start}
lib.save_training_info(training_info, FOLDER_PREFIX)
print "Train info saved!",
y_axis_strs = [train_nll_str, valid_nll_str, test_nll_str]
lib.plot_traing_info(x_axis_str, y_axis_strs, FOLDER_PREFIX)
print "Plotted!"
# Generate and save samples
print "Sampling!",
tag = "e{}_i{}_t{:.2f}_tr{:.4f}_v{:.4f}"
tag = tag.format(epoch,
total_iters,
total_time/3600,
numpy.mean(cost),
valid_cost)
tag += ("_best" if new_lowest_cost else "")
# Generate samples
generate_and_save_samples(tag)
print "Done!"
# Save params of model
lib.save_params(
os.path.join(PARAMS_PATH, 'params_{}.pkl'.format(tag))
)
print "Params saved!"
if total_iters-last_print_iters == PRINT_ITERS \
or total_time-last_print_time >= PRINT_TIME:
# If we are here b/c of onom_end_of_batch, we shouldn't mess
# with costs and last_print_iters
costs = []
last_print_time += PRINT_TIME
last_print_iters += PRINT_ITERS
end_of_batch = False
new_lowest_cost = False
print "Validation Done!\nBack to Training..."
if (TRAIN_MODE=='iters' and total_iters == STOP_ITERS) or \
(TRAIN_MODE=='time' and total_time >= STOP_TIME) or \
((TRAIN_MODE=='time-iters' or TRAIN_MODE=='iters-time') and \
(total_iters == STOP_ITERS or total_time >= STOP_TIME)):
print "Done! Total iters:", total_iters, "Total time: ", total_time
print "Experiment ended at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
print "Wall clock time spent: {:.2f}h"\
.format((time.time()-exp_start)/3600)
sys.exit()
| fvalue = float(value)
if fvalue < 0 or fvalue > 1:
raise argparse.ArgumentTypeError("%s is not in [0, 1] interval!" % value)
return fvalue | identifier_body |
wavent.py | #!/usr/bin/env python
"""
WaveNets Audio Generation Model
How-to-run example:
sampleRNN$
THEANO_FLAGS=mode=FAST_RUN,device=gpu1,floatX=float32,lib.cnmem=.95 python models/one_tier/wavent.py --dim 64 --q_levels 256 --q_type linear --which_set MUSIC --batch_size 8 --wavenet_blocks 4 --dilation_layers_per_block 10 --sequence_len_to_train 1600
"""
import time
from datetime import datetime
print "Experiment started at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
exp_start = time.time()
import os, sys
sys.path.insert(1, os.getcwd())
import argparse
import numpy
numpy.random.seed(123)
np = numpy
import random
random.seed(123)
import theano
import theano.tensor as T
import theano.ifelse
import lasagne
import scipy.io.wavfile
import lib
### Parsing passed args/hyperparameters ###
def get_args():
def t_or_f(arg):
ua = str(arg).upper()
if 'TRUE'.startswith(ua):
return True
elif 'FALSE'.startswith(ua):
return False
else:
raise ValueError('Arg is neither `True` nor `False`')
def check_non_negative(value):
ivalue = int(value)
if ivalue < 0:
raise argparse.ArgumentTypeError("%s is not non-negative!" % value)
return ivalue
def check_positive(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is not positive!" % value)
return ivalue
def check_unit_interval(value):
fvalue = float(value)
if fvalue < 0 or fvalue > 1:
raise argparse.ArgumentTypeError("%s is not in [0, 1] interval!" % value)
return fvalue
# No default value here. Indicate every single arguement.
parser = argparse.ArgumentParser(
description='two_tier.py\nNo default value! Indicate every argument.')
# Hyperparameter arguements:
parser.add_argument('--exp', help='Experiment name',
type=str, required=False, default='_')
parser.add_argument('--dim', help='Dimension of RNN and MLPs',\
type=check_positive, required=True)
parser.add_argument('--q_levels', help='Number of bins for quantization of audio samples. Should be 256 for mu-law.',\
type=check_positive, required=True)
parser.add_argument('--q_type', help='Quantization in linear-scale, a-law-companding, or mu-law compandig. With mu-/a-law quantization level shoud be set as 256',\
choices=['linear', 'a-law', 'mu-law'], required=True)
#parser.add_argument('--nll_coeff', help='Value of alpha in [0, 1] for cost=alpha*NLL+(1-alpha)*FFT_cost',\
# type=check_unit_interval, required=True)
parser.add_argument('--which_set', help='ONOM, BLIZZ, or MUSIC',
choices=['ONOM', 'BLIZZ', 'MUSIC', 'HUCK'], required=True)
parser.add_argument('--batch_size', help='size of mini-batch',
type=check_positive, choices=[8, 16, 32, 64, 128, 256], required=True)
parser.add_argument('--wavenet_blocks', help='Number of wavnet blocks to use',
type=check_positive, required=True)
parser.add_argument('--dilation_layers_per_block', help='number of dilation layers per block',
type=check_positive, required=True)
parser.add_argument('--sequence_len_to_train', help='size of output map',
type=check_positive, required=True)
parser.add_argument('--debug', help='debug mode', required=False, default=False, action='store_true')
parser.add_argument('--resume', help='Resume the same model from the last checkpoint. Order of params are important. [for now]',\
required=False, default=False, action='store_true')
args = parser.parse_args()
# Create tag for this experiment based on passed args
tag = reduce(lambda a, b: a+b, sys.argv).replace('--resume', '').replace('/', '-').replace('--', '-').replace('True', 'T').replace('False', 'F')
print "Created experiment tag for these args:"
print tag
return args, tag
args, tag = get_args()
# N_FRAMES = args.n_frames # How many 'frames' to include in each truncated BPTT pass
OVERLAP = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1# How many samples per frame
#GLOBAL_NORM = args.global_norm
DIM = args.dim # Model dimensionality.
Q_LEVELS = args.q_levels # How many levels to use when discretizing samples. e.g. 256 = 8-bit scalar quantization
Q_TYPE = args.q_type # log- or linear-scale
#NLL_COEFF = args.nll_coeff
WHICH_SET = args.which_set
BATCH_SIZE = args.batch_size
#DATA_PATH = args.data_path
if Q_TYPE == 'mu-law' and Q_LEVELS != 256:
raise ValueError('For mu-law Quantization levels should be exactly 256!')
# Fixed hyperparams
GRAD_CLIP = 1 # Elementwise grad clip threshold
BITRATE = 16000
# Other constants
#TRAIN_MODE = 'iters' # To use PRINT_ITERS and STOP_ITERS
TRAIN_MODE = 'time' # To use PRINT_TIME and STOP_TIME
#TRAIN_MODE = 'time-iters'
# To use PRINT_TIME for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
#TRAIN_MODE = 'iters-time'
# To use PRINT_ITERS for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
PRINT_ITERS = 10000 # Print cost, generate samples, save model checkpoint every N iterations.
STOP_ITERS = 100000 # Stop after this many iterations
PRINT_TIME = 90*60 # Print cost, generate samples, save model checkpoint every N seconds.
STOP_TIME = 60*60*60 # Stop after this many seconds of actual training (not including time req'd to generate samples etc.)
N_SEQS = 10 # Number of samples to generate every time monitoring.
FOLDER_PREFIX = os.path.join('results_wavenets', tag)
SEQ_LEN = args.sequence_len_to_train # Total length (# of samples) of each truncated BPTT sequence
Q_ZERO = numpy.int32(Q_LEVELS//2) # Discrete value correponding to zero amplitude
LEARNING_RATE = lib.floatX(numpy.float32(0.0001))
RESUME = args.resume
epoch_str = 'epoch'
iter_str = 'iter'
lowest_valid_str = 'lowest valid cost'
corresp_test_str = 'correponding test cost'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
if args.debug:
import warnings
warnings.warn('----------RUNNING IN DEBUG MODE----------')
TRAIN_MODE = 'time-iters'
PRINT_TIME = 100
STOP_TIME = 300
STOP_ITERS = 1000
### Create directories ###
# FOLDER_PREFIX: root, contains:
# log.txt, __note.txt, train_log.pkl, train_log.png [, model_settings.txt]
# FOLDER_PREFIX/params: saves all checkpoint params as pkl
# FOLDER_PREFIX/samples: keeps all checkpoint samples as wav
# FOLDER_PREFIX/best: keeps the best parameters, samples, ...
if not os.path.exists(FOLDER_PREFIX):
os.makedirs(FOLDER_PREFIX)
PARAMS_PATH = os.path.join(FOLDER_PREFIX, 'params')
if not os.path.exists(PARAMS_PATH):
os.makedirs(PARAMS_PATH)
SAMPLES_PATH = os.path.join(FOLDER_PREFIX, 'samples')
if not os.path.exists(SAMPLES_PATH):
os.makedirs(SAMPLES_PATH)
BEST_PATH = os.path.join(FOLDER_PREFIX, 'best')
if not os.path.exists(BEST_PATH):
os.makedirs(BEST_PATH)
lib.print_model_settings(locals(), path=FOLDER_PREFIX, sys_arg=True)
### Creating computation graph ###
def create_wavenet_block(inp, num_dilation_layer, input_dim, output_dim, name =None):
assert name is not None
layer_out = inp
skip_contrib = []
skip_weights = lib.param(name+".parametrized_weights", lib.floatX(numpy.ones((num_dilation_layer,))))
for i in range(num_dilation_layer):
layer_out, skip_c = lib.ops.dil_conv_1D(
layer_out,
output_dim,
input_dim if i == 0 else output_dim,
2,
dilation = 2**i,
non_linearity = 'gated',
name = name+".dilation_{}".format(i+1)
)
skip_c = skip_c*skip_weights[i]
skip_contrib.append(skip_c)
skip_out = skip_contrib[-1]
j = 0
for i in range(num_dilation_layer-1):
j += 2**(num_dilation_layer-i-1)
skip_out = skip_out + skip_contrib[num_dilation_layer-2 - i][:,j:]
return layer_out, skip_out
def create_model(inp):
out = (inp.astype(theano.config.floatX)/lib.floatX(Q_LEVELS-1) - lib.floatX(0.5))
l_out = out.dimshuffle(0,1,'x')
skips = []
for i in range(args.wavenet_blocks):
l_out, skip_out = create_wavenet_block(l_out, args.dilation_layers_per_block, 1 if i == 0 else args.dim, args.dim, name = "block_{}".format(i+1))
skips.append(skip_out)
out = skips[-1]
for i in range(args.wavenet_blocks - 1):
out = out + skips[args.wavenet_blocks - 2 - i][:,(2**args.dilation_layers_per_block - 1)*(i+1):]
for i in range(3):
out = lib.ops.conv1d("out_{}".format(i+1), out, args.dim, args.dim, 1, non_linearity='relu')
out = lib.ops.conv1d("final", out, args.dim, args.q_levels, 1, non_linearity='identity')
return out
sequences = T.imatrix('sequences')
h0 = T.tensor3('h0')
reset = T.iscalar('reset')
mask = T.matrix('mask')
if args.debug:
# Solely for debugging purposes.
# Maybe I should set the compute_test_value=warn from here.
sequences.tag.test_value = numpy.zeros((BATCH_SIZE, SEQ_LEN), dtype='int32')
input_sequences = sequences[:, :-1]
target_sequences = sequences[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
target_mask = mask[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
output = create_model(input_sequences)
cost = T.nnet.categorical_crossentropy(
T.nnet.softmax(output.reshape((-1, Q_LEVELS))),
target_sequences.flatten()
)
cost = cost.reshape(target_sequences.shape)
cost = cost * target_mask
# Don't use these lines; could end up with NaN
# Specially at the end of audio files where mask is
# all zero for some of the shorter files in mini-batch.
#cost = cost.sum(axis=1) / target_mask.sum(axis=1)
#cost = cost.mean(axis=0)
# Use this one instead.
cost = cost.sum() | cost = cost * lib.floatX(numpy.log2(numpy.e))
### Getting the params, grads, updates, and Theano functions ###
params = lib.get_params(cost, lambda x: hasattr(x, 'param') and x.param==True)
lib.print_params_info(params, path=FOLDER_PREFIX)
grads = T.grad(cost, wrt=params, disconnected_inputs='warn')
grads = [T.clip(g, lib.floatX(-GRAD_CLIP), lib.floatX(GRAD_CLIP)) for g in grads]
updates = lasagne.updates.adam(grads, params, learning_rate=LEARNING_RATE)
# Training function
train_fn = theano.function(
[sequences, mask],
cost,
updates=updates,
on_unused_input='warn'
)
# Validation and Test function
test_fn = theano.function(
[sequences, mask],
cost,
on_unused_input='warn'
)
# Sampling at frame level
generate_fn = theano.function(
[sequences],
lib.ops.softmax_and_sample(output),
on_unused_input='warn'
)
def generate_and_save_samples(tag):
def write_audio_file(name, data):
data = data.astype('float32')
data -= data.min()
data /= data.max()
data -= 0.5
data *= 0.95
scipy.io.wavfile.write(
os.path.join(SAMPLES_PATH, name+'.wav'),
BITRATE,
data)
total_time = time.time()
# Generate N_SEQS' sample files, each 5 seconds long
N_SECS = 5
LENGTH = N_SECS*BITRATE
if args.debug:
LENGTH = 1024
num_prev_samples_to_use = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1
samples = numpy.zeros((N_SEQS, LENGTH + num_prev_samples_to_use), dtype='int32')
samples[:, :num_prev_samples_to_use] = Q_ZERO
for t in range(LENGTH):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = generate_fn(samples[:, t:t + num_prev_samples_to_use+1])
if (t > 2*BITRATE) and( t < 3*BITRATE):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = Q_ZERO
total_time = time.time() - total_time
log = "{} samples of {} seconds length generated in {} seconds."
log = log.format(N_SEQS, N_SECS, total_time)
print log,
for i in xrange(N_SEQS):
samp = samples[i, num_prev_samples_to_use: ]
if Q_TYPE == 'mu-law':
from datasets.dataset import mu2linear
samp = mu2linear(samp)
elif Q_TYPE == 'a-law':
raise NotImplementedError('a-law is not implemented')
write_audio_file("sample_{}_{}".format(tag, i), samp)
### Import the data_feeder ###
# Handling WHICH_SET
if WHICH_SET == 'ONOM':
from datasets.dataset import onom_train_feed_epoch as train_feeder
from datasets.dataset import onom_valid_feed_epoch as valid_feeder
from datasets.dataset import onom_test_feed_epoch as test_feeder
elif WHICH_SET == 'BLIZZ':
from datasets.dataset import blizz_train_feed_epoch as train_feeder
from datasets.dataset import blizz_valid_feed_epoch as valid_feeder
from datasets.dataset import blizz_test_feed_epoch as test_feeder
elif WHICH_SET == 'MUSIC':
from datasets.dataset import music_train_feed_epoch as train_feeder
from datasets.dataset import music_valid_feed_epoch as valid_feeder
from datasets.dataset import music_test_feed_epoch as test_feeder
elif WHICH_SET == 'HUCK':
from datasets.dataset import huck_train_feed_epoch as train_feeder
from datasets.dataset import huck_valid_feed_epoch as valid_feeder
from datasets.dataset import huck_test_feed_epoch as test_feeder
def monitor(data_feeder):
"""
Cost and time of test_fn on a given dataset section.
Pass only one of `valid_feeder` or `test_feeder`.
Don't pass `train_feed`.
:returns:
Mean cost over the input dataset (data_feeder)
Total time spent
"""
_total_time = 0.
_costs = []
_data_feeder = data_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
for _seqs, _reset, _mask in _data_feeder:
_start_time = time.time()
_cost = test_fn(_seqs, _mask)
_total_time += time.time() - _start_time
_costs.append(_cost)
return numpy.mean(_costs), _total_time
print "Wall clock time spent before training started: {:.2f}h"\
.format((time.time()-exp_start)/3600.)
print "Training!"
total_iters = 0
total_time = 0.
last_print_time = 0.
last_print_iters = 0
costs = []
lowest_valid_cost = numpy.finfo(numpy.float32).max
corresponding_test_cost = numpy.finfo(numpy.float32).max
new_lowest_cost = False
end_of_batch = False
epoch = 0 # Important for mostly other datasets rather than Blizz
# Initial load train dataset
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
if RESUME:
# Check if checkpoint from previous run is not corrupted.
# Then overwrite some of the variables above.
iters_to_consume, res_path, epoch, total_iters,\
[lowest_valid_cost, corresponding_test_cost, test_cost] = \
lib.resumable(path=FOLDER_PREFIX,
iter_key=iter_str,
epoch_key=epoch_str,
add_resume_counter=True,
other_keys=[lowest_valid_str,
corresp_test_str,
test_nll_str])
# At this point we saved the pkl file.
last_print_iters = total_iters
print "### RESUMING JOB FROM EPOCH {}, ITER {}".format(epoch, total_iters)
# Consumes this much iters to get to the last point in training data.
consume_time = time.time()
for i in xrange(iters_to_consume):
tr_feeder.next()
consume_time = time.time() - consume_time
print "Train data ready in {:.2f}secs after consuming {} minibatches.".\
format(consume_time, iters_to_consume)
lib.load_params(res_path)
print "Parameters from last available checkpoint loaded from path {}".format(res_path)
test_time = 0.0
while True:
# THIS IS ONE ITERATION
if total_iters % 500 == 0:
print total_iters,
total_iters += 1
try:
# Take as many mini-batches as possible from train set
mini_batch = tr_feeder.next()
except StopIteration:
# Mini-batches are finished. Load it again.
# Basically, one epoch.
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
# and start taking new mini-batches again.
mini_batch = tr_feeder.next()
epoch += 1
end_of_batch = True
print "[Another epoch]",
seqs, reset, mask = mini_batch
##Remove this
# print seqs.shape
# targ = generate_fn(seqs)
# print targ.shape
#####
start_time = time.time()
cost = train_fn(seqs, mask)
total_time += time.time() - start_time
#print "This cost:", cost, "This h0.mean()", h0.mean()
costs.append(cost)
if (TRAIN_MODE=='iters' and total_iters-last_print_iters == PRINT_ITERS) or \
(TRAIN_MODE=='time' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='time-iters' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='iters-time' and total_iters-last_print_iters >= PRINT_ITERS) or \
end_of_batch:
print "\nValidation!",
valid_cost, valid_time = monitor(valid_feeder)
print "Done!"
# Only when the validation cost is improved get the cost for test set.
if valid_cost < lowest_valid_cost:
lowest_valid_cost = valid_cost
print "\n>>> Best validation cost of {} reached. Testing!"\
.format(valid_cost),
test_cost, test_time = monitor(test_feeder)
print "Done!"
# Report last one which is the lowest on validation set:
print ">>> test cost:{}\ttotal time:{}".format(test_cost, test_time)
corresponding_test_cost = test_cost
new_lowest_cost = True
# Stdout the training progress
print_info = "epoch:{}\ttotal iters:{}\twall clock time:{:.2f}h\n"
print_info += ">>> Lowest valid cost:{}\t Corresponding test cost:{}\n"
print_info += "\ttrain cost:{:.4f}\ttotal time:{:.2f}h\tper iter:{:.3f}s\n"
print_info += "\tvalid cost:{:.4f}\ttotal time:{:.2f}h\n"
print_info += "\ttest cost:{:.4f}\ttotal time:{:.2f}h"
print_info = print_info.format(epoch,
total_iters,
(time.time()-exp_start)/3600,
lowest_valid_cost,
corresponding_test_cost,
numpy.mean(costs),
total_time/3600,
total_time/total_iters,
valid_cost,
valid_time/3600,
test_cost,
test_time/3600)
print print_info
# Save and graph training progress
x_axis_str = 'iter'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
training_info = {'epoch' : epoch,
x_axis_str : total_iters,
train_nll_str : numpy.mean(costs),
valid_nll_str : valid_cost,
test_nll_str : test_cost,
'lowest valid cost' : lowest_valid_cost,
'correponding test cost' : corresponding_test_cost,
'train time' : total_time,
'valid time' : valid_time,
'test time' : test_time,
'wall clock time' : time.time()-exp_start}
lib.save_training_info(training_info, FOLDER_PREFIX)
print "Train info saved!",
y_axis_strs = [train_nll_str, valid_nll_str, test_nll_str]
lib.plot_traing_info(x_axis_str, y_axis_strs, FOLDER_PREFIX)
print "Plotted!"
# Generate and save samples
print "Sampling!",
tag = "e{}_i{}_t{:.2f}_tr{:.4f}_v{:.4f}"
tag = tag.format(epoch,
total_iters,
total_time/3600,
numpy.mean(cost),
valid_cost)
tag += ("_best" if new_lowest_cost else "")
# Generate samples
generate_and_save_samples(tag)
print "Done!"
# Save params of model
lib.save_params(
os.path.join(PARAMS_PATH, 'params_{}.pkl'.format(tag))
)
print "Params saved!"
if total_iters-last_print_iters == PRINT_ITERS \
or total_time-last_print_time >= PRINT_TIME:
# If we are here b/c of onom_end_of_batch, we shouldn't mess
# with costs and last_print_iters
costs = []
last_print_time += PRINT_TIME
last_print_iters += PRINT_ITERS
end_of_batch = False
new_lowest_cost = False
print "Validation Done!\nBack to Training..."
if (TRAIN_MODE=='iters' and total_iters == STOP_ITERS) or \
(TRAIN_MODE=='time' and total_time >= STOP_TIME) or \
((TRAIN_MODE=='time-iters' or TRAIN_MODE=='iters-time') and \
(total_iters == STOP_ITERS or total_time >= STOP_TIME)):
print "Done! Total iters:", total_iters, "Total time: ", total_time
print "Experiment ended at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
print "Wall clock time spent: {:.2f}h"\
.format((time.time()-exp_start)/3600)
sys.exit() | cost = cost / target_mask.sum()
# By default we report cross-entropy cost in bits.
# Switch to nats by commenting out this line:
# log_2(e) = 1.44269504089 | random_line_split |
wavent.py | #!/usr/bin/env python
"""
WaveNets Audio Generation Model
How-to-run example:
sampleRNN$
THEANO_FLAGS=mode=FAST_RUN,device=gpu1,floatX=float32,lib.cnmem=.95 python models/one_tier/wavent.py --dim 64 --q_levels 256 --q_type linear --which_set MUSIC --batch_size 8 --wavenet_blocks 4 --dilation_layers_per_block 10 --sequence_len_to_train 1600
"""
import time
from datetime import datetime
print "Experiment started at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
exp_start = time.time()
import os, sys
sys.path.insert(1, os.getcwd())
import argparse
import numpy
numpy.random.seed(123)
np = numpy
import random
random.seed(123)
import theano
import theano.tensor as T
import theano.ifelse
import lasagne
import scipy.io.wavfile
import lib
### Parsing passed args/hyperparameters ###
def get_args():
def t_or_f(arg):
ua = str(arg).upper()
if 'TRUE'.startswith(ua):
return True
elif 'FALSE'.startswith(ua):
return False
else:
raise ValueError('Arg is neither `True` nor `False`')
def check_non_negative(value):
ivalue = int(value)
if ivalue < 0:
raise argparse.ArgumentTypeError("%s is not non-negative!" % value)
return ivalue
def check_positive(value):
ivalue = int(value)
if ivalue < 1:
raise argparse.ArgumentTypeError("%s is not positive!" % value)
return ivalue
def | (value):
fvalue = float(value)
if fvalue < 0 or fvalue > 1:
raise argparse.ArgumentTypeError("%s is not in [0, 1] interval!" % value)
return fvalue
# No default value here. Indicate every single arguement.
parser = argparse.ArgumentParser(
description='two_tier.py\nNo default value! Indicate every argument.')
# Hyperparameter arguements:
parser.add_argument('--exp', help='Experiment name',
type=str, required=False, default='_')
parser.add_argument('--dim', help='Dimension of RNN and MLPs',\
type=check_positive, required=True)
parser.add_argument('--q_levels', help='Number of bins for quantization of audio samples. Should be 256 for mu-law.',\
type=check_positive, required=True)
parser.add_argument('--q_type', help='Quantization in linear-scale, a-law-companding, or mu-law compandig. With mu-/a-law quantization level shoud be set as 256',\
choices=['linear', 'a-law', 'mu-law'], required=True)
#parser.add_argument('--nll_coeff', help='Value of alpha in [0, 1] for cost=alpha*NLL+(1-alpha)*FFT_cost',\
# type=check_unit_interval, required=True)
parser.add_argument('--which_set', help='ONOM, BLIZZ, or MUSIC',
choices=['ONOM', 'BLIZZ', 'MUSIC', 'HUCK'], required=True)
parser.add_argument('--batch_size', help='size of mini-batch',
type=check_positive, choices=[8, 16, 32, 64, 128, 256], required=True)
parser.add_argument('--wavenet_blocks', help='Number of wavnet blocks to use',
type=check_positive, required=True)
parser.add_argument('--dilation_layers_per_block', help='number of dilation layers per block',
type=check_positive, required=True)
parser.add_argument('--sequence_len_to_train', help='size of output map',
type=check_positive, required=True)
parser.add_argument('--debug', help='debug mode', required=False, default=False, action='store_true')
parser.add_argument('--resume', help='Resume the same model from the last checkpoint. Order of params are important. [for now]',\
required=False, default=False, action='store_true')
args = parser.parse_args()
# Create tag for this experiment based on passed args
tag = reduce(lambda a, b: a+b, sys.argv).replace('--resume', '').replace('/', '-').replace('--', '-').replace('True', 'T').replace('False', 'F')
print "Created experiment tag for these args:"
print tag
return args, tag
args, tag = get_args()
# N_FRAMES = args.n_frames # How many 'frames' to include in each truncated BPTT pass
OVERLAP = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1# How many samples per frame
#GLOBAL_NORM = args.global_norm
DIM = args.dim # Model dimensionality.
Q_LEVELS = args.q_levels # How many levels to use when discretizing samples. e.g. 256 = 8-bit scalar quantization
Q_TYPE = args.q_type # log- or linear-scale
#NLL_COEFF = args.nll_coeff
WHICH_SET = args.which_set
BATCH_SIZE = args.batch_size
#DATA_PATH = args.data_path
if Q_TYPE == 'mu-law' and Q_LEVELS != 256:
raise ValueError('For mu-law Quantization levels should be exactly 256!')
# Fixed hyperparams
GRAD_CLIP = 1 # Elementwise grad clip threshold
BITRATE = 16000
# Other constants
#TRAIN_MODE = 'iters' # To use PRINT_ITERS and STOP_ITERS
TRAIN_MODE = 'time' # To use PRINT_TIME and STOP_TIME
#TRAIN_MODE = 'time-iters'
# To use PRINT_TIME for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
#TRAIN_MODE = 'iters-time'
# To use PRINT_ITERS for validation,
# and (STOP_ITERS, STOP_TIME), whichever happened first, for stopping exp.
PRINT_ITERS = 10000 # Print cost, generate samples, save model checkpoint every N iterations.
STOP_ITERS = 100000 # Stop after this many iterations
PRINT_TIME = 90*60 # Print cost, generate samples, save model checkpoint every N seconds.
STOP_TIME = 60*60*60 # Stop after this many seconds of actual training (not including time req'd to generate samples etc.)
N_SEQS = 10 # Number of samples to generate every time monitoring.
FOLDER_PREFIX = os.path.join('results_wavenets', tag)
SEQ_LEN = args.sequence_len_to_train # Total length (# of samples) of each truncated BPTT sequence
Q_ZERO = numpy.int32(Q_LEVELS//2) # Discrete value correponding to zero amplitude
LEARNING_RATE = lib.floatX(numpy.float32(0.0001))
RESUME = args.resume
epoch_str = 'epoch'
iter_str = 'iter'
lowest_valid_str = 'lowest valid cost'
corresp_test_str = 'correponding test cost'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
if args.debug:
import warnings
warnings.warn('----------RUNNING IN DEBUG MODE----------')
TRAIN_MODE = 'time-iters'
PRINT_TIME = 100
STOP_TIME = 300
STOP_ITERS = 1000
### Create directories ###
# FOLDER_PREFIX: root, contains:
# log.txt, __note.txt, train_log.pkl, train_log.png [, model_settings.txt]
# FOLDER_PREFIX/params: saves all checkpoint params as pkl
# FOLDER_PREFIX/samples: keeps all checkpoint samples as wav
# FOLDER_PREFIX/best: keeps the best parameters, samples, ...
if not os.path.exists(FOLDER_PREFIX):
os.makedirs(FOLDER_PREFIX)
PARAMS_PATH = os.path.join(FOLDER_PREFIX, 'params')
if not os.path.exists(PARAMS_PATH):
os.makedirs(PARAMS_PATH)
SAMPLES_PATH = os.path.join(FOLDER_PREFIX, 'samples')
if not os.path.exists(SAMPLES_PATH):
os.makedirs(SAMPLES_PATH)
BEST_PATH = os.path.join(FOLDER_PREFIX, 'best')
if not os.path.exists(BEST_PATH):
os.makedirs(BEST_PATH)
lib.print_model_settings(locals(), path=FOLDER_PREFIX, sys_arg=True)
### Creating computation graph ###
def create_wavenet_block(inp, num_dilation_layer, input_dim, output_dim, name =None):
assert name is not None
layer_out = inp
skip_contrib = []
skip_weights = lib.param(name+".parametrized_weights", lib.floatX(numpy.ones((num_dilation_layer,))))
for i in range(num_dilation_layer):
layer_out, skip_c = lib.ops.dil_conv_1D(
layer_out,
output_dim,
input_dim if i == 0 else output_dim,
2,
dilation = 2**i,
non_linearity = 'gated',
name = name+".dilation_{}".format(i+1)
)
skip_c = skip_c*skip_weights[i]
skip_contrib.append(skip_c)
skip_out = skip_contrib[-1]
j = 0
for i in range(num_dilation_layer-1):
j += 2**(num_dilation_layer-i-1)
skip_out = skip_out + skip_contrib[num_dilation_layer-2 - i][:,j:]
return layer_out, skip_out
def create_model(inp):
out = (inp.astype(theano.config.floatX)/lib.floatX(Q_LEVELS-1) - lib.floatX(0.5))
l_out = out.dimshuffle(0,1,'x')
skips = []
for i in range(args.wavenet_blocks):
l_out, skip_out = create_wavenet_block(l_out, args.dilation_layers_per_block, 1 if i == 0 else args.dim, args.dim, name = "block_{}".format(i+1))
skips.append(skip_out)
out = skips[-1]
for i in range(args.wavenet_blocks - 1):
out = out + skips[args.wavenet_blocks - 2 - i][:,(2**args.dilation_layers_per_block - 1)*(i+1):]
for i in range(3):
out = lib.ops.conv1d("out_{}".format(i+1), out, args.dim, args.dim, 1, non_linearity='relu')
out = lib.ops.conv1d("final", out, args.dim, args.q_levels, 1, non_linearity='identity')
return out
sequences = T.imatrix('sequences')
h0 = T.tensor3('h0')
reset = T.iscalar('reset')
mask = T.matrix('mask')
if args.debug:
# Solely for debugging purposes.
# Maybe I should set the compute_test_value=warn from here.
sequences.tag.test_value = numpy.zeros((BATCH_SIZE, SEQ_LEN), dtype='int32')
input_sequences = sequences[:, :-1]
target_sequences = sequences[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
target_mask = mask[:, (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1:]
output = create_model(input_sequences)
cost = T.nnet.categorical_crossentropy(
T.nnet.softmax(output.reshape((-1, Q_LEVELS))),
target_sequences.flatten()
)
cost = cost.reshape(target_sequences.shape)
cost = cost * target_mask
# Don't use these lines; could end up with NaN
# Specially at the end of audio files where mask is
# all zero for some of the shorter files in mini-batch.
#cost = cost.sum(axis=1) / target_mask.sum(axis=1)
#cost = cost.mean(axis=0)
# Use this one instead.
cost = cost.sum()
cost = cost / target_mask.sum()
# By default we report cross-entropy cost in bits.
# Switch to nats by commenting out this line:
# log_2(e) = 1.44269504089
cost = cost * lib.floatX(numpy.log2(numpy.e))
### Getting the params, grads, updates, and Theano functions ###
params = lib.get_params(cost, lambda x: hasattr(x, 'param') and x.param==True)
lib.print_params_info(params, path=FOLDER_PREFIX)
grads = T.grad(cost, wrt=params, disconnected_inputs='warn')
grads = [T.clip(g, lib.floatX(-GRAD_CLIP), lib.floatX(GRAD_CLIP)) for g in grads]
updates = lasagne.updates.adam(grads, params, learning_rate=LEARNING_RATE)
# Training function
train_fn = theano.function(
[sequences, mask],
cost,
updates=updates,
on_unused_input='warn'
)
# Validation and Test function
test_fn = theano.function(
[sequences, mask],
cost,
on_unused_input='warn'
)
# Sampling at frame level
generate_fn = theano.function(
[sequences],
lib.ops.softmax_and_sample(output),
on_unused_input='warn'
)
def generate_and_save_samples(tag):
def write_audio_file(name, data):
data = data.astype('float32')
data -= data.min()
data /= data.max()
data -= 0.5
data *= 0.95
scipy.io.wavfile.write(
os.path.join(SAMPLES_PATH, name+'.wav'),
BITRATE,
data)
total_time = time.time()
# Generate N_SEQS' sample files, each 5 seconds long
N_SECS = 5
LENGTH = N_SECS*BITRATE
if args.debug:
LENGTH = 1024
num_prev_samples_to_use = (2**args.dilation_layers_per_block - 1)*args.wavenet_blocks + 1
samples = numpy.zeros((N_SEQS, LENGTH + num_prev_samples_to_use), dtype='int32')
samples[:, :num_prev_samples_to_use] = Q_ZERO
for t in range(LENGTH):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = generate_fn(samples[:, t:t + num_prev_samples_to_use+1])
if (t > 2*BITRATE) and( t < 3*BITRATE):
samples[:,num_prev_samples_to_use+t:num_prev_samples_to_use+t+1] = Q_ZERO
total_time = time.time() - total_time
log = "{} samples of {} seconds length generated in {} seconds."
log = log.format(N_SEQS, N_SECS, total_time)
print log,
for i in xrange(N_SEQS):
samp = samples[i, num_prev_samples_to_use: ]
if Q_TYPE == 'mu-law':
from datasets.dataset import mu2linear
samp = mu2linear(samp)
elif Q_TYPE == 'a-law':
raise NotImplementedError('a-law is not implemented')
write_audio_file("sample_{}_{}".format(tag, i), samp)
### Import the data_feeder ###
# Handling WHICH_SET
if WHICH_SET == 'ONOM':
from datasets.dataset import onom_train_feed_epoch as train_feeder
from datasets.dataset import onom_valid_feed_epoch as valid_feeder
from datasets.dataset import onom_test_feed_epoch as test_feeder
elif WHICH_SET == 'BLIZZ':
from datasets.dataset import blizz_train_feed_epoch as train_feeder
from datasets.dataset import blizz_valid_feed_epoch as valid_feeder
from datasets.dataset import blizz_test_feed_epoch as test_feeder
elif WHICH_SET == 'MUSIC':
from datasets.dataset import music_train_feed_epoch as train_feeder
from datasets.dataset import music_valid_feed_epoch as valid_feeder
from datasets.dataset import music_test_feed_epoch as test_feeder
elif WHICH_SET == 'HUCK':
from datasets.dataset import huck_train_feed_epoch as train_feeder
from datasets.dataset import huck_valid_feed_epoch as valid_feeder
from datasets.dataset import huck_test_feed_epoch as test_feeder
def monitor(data_feeder):
"""
Cost and time of test_fn on a given dataset section.
Pass only one of `valid_feeder` or `test_feeder`.
Don't pass `train_feed`.
:returns:
Mean cost over the input dataset (data_feeder)
Total time spent
"""
_total_time = 0.
_costs = []
_data_feeder = data_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
for _seqs, _reset, _mask in _data_feeder:
_start_time = time.time()
_cost = test_fn(_seqs, _mask)
_total_time += time.time() - _start_time
_costs.append(_cost)
return numpy.mean(_costs), _total_time
print "Wall clock time spent before training started: {:.2f}h"\
.format((time.time()-exp_start)/3600.)
print "Training!"
total_iters = 0
total_time = 0.
last_print_time = 0.
last_print_iters = 0
costs = []
lowest_valid_cost = numpy.finfo(numpy.float32).max
corresponding_test_cost = numpy.finfo(numpy.float32).max
new_lowest_cost = False
end_of_batch = False
epoch = 0 # Important for mostly other datasets rather than Blizz
# Initial load train dataset
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
if RESUME:
# Check if checkpoint from previous run is not corrupted.
# Then overwrite some of the variables above.
iters_to_consume, res_path, epoch, total_iters,\
[lowest_valid_cost, corresponding_test_cost, test_cost] = \
lib.resumable(path=FOLDER_PREFIX,
iter_key=iter_str,
epoch_key=epoch_str,
add_resume_counter=True,
other_keys=[lowest_valid_str,
corresp_test_str,
test_nll_str])
# At this point we saved the pkl file.
last_print_iters = total_iters
print "### RESUMING JOB FROM EPOCH {}, ITER {}".format(epoch, total_iters)
# Consumes this much iters to get to the last point in training data.
consume_time = time.time()
for i in xrange(iters_to_consume):
tr_feeder.next()
consume_time = time.time() - consume_time
print "Train data ready in {:.2f}secs after consuming {} minibatches.".\
format(consume_time, iters_to_consume)
lib.load_params(res_path)
print "Parameters from last available checkpoint loaded from path {}".format(res_path)
test_time = 0.0
while True:
# THIS IS ONE ITERATION
if total_iters % 500 == 0:
print total_iters,
total_iters += 1
try:
# Take as many mini-batches as possible from train set
mini_batch = tr_feeder.next()
except StopIteration:
# Mini-batches are finished. Load it again.
# Basically, one epoch.
tr_feeder = train_feeder(BATCH_SIZE,
SEQ_LEN,
OVERLAP,
Q_LEVELS,
Q_ZERO,
Q_TYPE)
# and start taking new mini-batches again.
mini_batch = tr_feeder.next()
epoch += 1
end_of_batch = True
print "[Another epoch]",
seqs, reset, mask = mini_batch
##Remove this
# print seqs.shape
# targ = generate_fn(seqs)
# print targ.shape
#####
start_time = time.time()
cost = train_fn(seqs, mask)
total_time += time.time() - start_time
#print "This cost:", cost, "This h0.mean()", h0.mean()
costs.append(cost)
if (TRAIN_MODE=='iters' and total_iters-last_print_iters == PRINT_ITERS) or \
(TRAIN_MODE=='time' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='time-iters' and total_time-last_print_time >= PRINT_TIME) or \
(TRAIN_MODE=='iters-time' and total_iters-last_print_iters >= PRINT_ITERS) or \
end_of_batch:
print "\nValidation!",
valid_cost, valid_time = monitor(valid_feeder)
print "Done!"
# Only when the validation cost is improved get the cost for test set.
if valid_cost < lowest_valid_cost:
lowest_valid_cost = valid_cost
print "\n>>> Best validation cost of {} reached. Testing!"\
.format(valid_cost),
test_cost, test_time = monitor(test_feeder)
print "Done!"
# Report last one which is the lowest on validation set:
print ">>> test cost:{}\ttotal time:{}".format(test_cost, test_time)
corresponding_test_cost = test_cost
new_lowest_cost = True
# Stdout the training progress
print_info = "epoch:{}\ttotal iters:{}\twall clock time:{:.2f}h\n"
print_info += ">>> Lowest valid cost:{}\t Corresponding test cost:{}\n"
print_info += "\ttrain cost:{:.4f}\ttotal time:{:.2f}h\tper iter:{:.3f}s\n"
print_info += "\tvalid cost:{:.4f}\ttotal time:{:.2f}h\n"
print_info += "\ttest cost:{:.4f}\ttotal time:{:.2f}h"
print_info = print_info.format(epoch,
total_iters,
(time.time()-exp_start)/3600,
lowest_valid_cost,
corresponding_test_cost,
numpy.mean(costs),
total_time/3600,
total_time/total_iters,
valid_cost,
valid_time/3600,
test_cost,
test_time/3600)
print print_info
# Save and graph training progress
x_axis_str = 'iter'
train_nll_str, valid_nll_str, test_nll_str = \
'train NLL (bits)', 'valid NLL (bits)', 'test NLL (bits)'
training_info = {'epoch' : epoch,
x_axis_str : total_iters,
train_nll_str : numpy.mean(costs),
valid_nll_str : valid_cost,
test_nll_str : test_cost,
'lowest valid cost' : lowest_valid_cost,
'correponding test cost' : corresponding_test_cost,
'train time' : total_time,
'valid time' : valid_time,
'test time' : test_time,
'wall clock time' : time.time()-exp_start}
lib.save_training_info(training_info, FOLDER_PREFIX)
print "Train info saved!",
y_axis_strs = [train_nll_str, valid_nll_str, test_nll_str]
lib.plot_traing_info(x_axis_str, y_axis_strs, FOLDER_PREFIX)
print "Plotted!"
# Generate and save samples
print "Sampling!",
tag = "e{}_i{}_t{:.2f}_tr{:.4f}_v{:.4f}"
tag = tag.format(epoch,
total_iters,
total_time/3600,
numpy.mean(cost),
valid_cost)
tag += ("_best" if new_lowest_cost else "")
# Generate samples
generate_and_save_samples(tag)
print "Done!"
# Save params of model
lib.save_params(
os.path.join(PARAMS_PATH, 'params_{}.pkl'.format(tag))
)
print "Params saved!"
if total_iters-last_print_iters == PRINT_ITERS \
or total_time-last_print_time >= PRINT_TIME:
# If we are here b/c of onom_end_of_batch, we shouldn't mess
# with costs and last_print_iters
costs = []
last_print_time += PRINT_TIME
last_print_iters += PRINT_ITERS
end_of_batch = False
new_lowest_cost = False
print "Validation Done!\nBack to Training..."
if (TRAIN_MODE=='iters' and total_iters == STOP_ITERS) or \
(TRAIN_MODE=='time' and total_time >= STOP_TIME) or \
((TRAIN_MODE=='time-iters' or TRAIN_MODE=='iters-time') and \
(total_iters == STOP_ITERS or total_time >= STOP_TIME)):
print "Done! Total iters:", total_iters, "Total time: ", total_time
print "Experiment ended at:", datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M')
print "Wall clock time spent: {:.2f}h"\
.format((time.time()-exp_start)/3600)
sys.exit()
| check_unit_interval | identifier_name |
utils.js | "use strict";
var _ = require('lodash');
var DirectiveFactory = require('./directive').factory;
var METHODS_EXP = /^GET|PUT|POST|DELETE|OPTIONS|HEAD|CONNECT|TRACE$/i;
var VALIDATION_REQUIRED_DEFVAL = true;
var mkobj = function (k, v) {
var o = {};
o[k] = v;
return o;
};
var tryCascadeFuncCall = function (name, func, obj) {
return function () {
try {
func.apply(obj, arguments);
} catch (e) {
var errorObj = new Error(name + (e._origError ? ' >>> ' : '.') + e.message);
errorObj._origError = true;
throw errorObj;
}
};
};
module.exports = function (options) {
var nestedTypes = {
object: {
filters: [],
limit: false
},
array: {
routeParamInfinite: true,
filters: [],
limit: true
}
};
var verifyStringName = function (string, format) {
format = format || options.stringNameFormat;
if (format === 'camel') {
if (!/^[a-z][a-zA-Z0-9]*$/.test(string)) {
throw new Error('string "' + string + '" has invalid stringNameFormat, must be in camel case');
}
} else if (format === 'underscore') {
if (!/^[a-z][a-z0-9_]*$/.test(string)) {
throw new Error('string "' + string + '" has invalid stringNameFormat, must be in underscore case');
}
} else if (format) {
throw new Error('options has invalid stringNameFormat name "' + format +'" ');
}
return string;
};
var optionsTypesNames = _.keys(options.types);
var nestedTypesNames = _.keys(nestedTypes);
var availableTypes = _.extend({}, nestedTypes, options.types);
var availableTypesNames = optionsTypesNames.concat(nestedTypesNames);
if (_.intersection(optionsTypesNames, nestedTypesNames).length) {
throw new Error('you can\'t use [' + nestedTypesNames.join(',') + '] types in options');
}
var parseParamsJSON = function (paramString){
var params = [];
if (paramString != null) {
params = paramString;
if (_.isString(paramString)) {
try {
params = JSON.parse(paramString, true);
} catch (e) {
throw new Error('has invalid JSON format in params "' + paramString + '"');
}
}
if (!_.isArray(params)) {
params = [params];
}
}
return params;
};
var addValidationRule = function (object, ruleName, params, toStart) {
ruleName = ruleName.trim();
if (!_.contains(options.rules, ruleName)) {
throw new Error('unavailable rule "' + ruleName + '"');
}
if (ruleName === 'required' || ruleName === 'optional') {
if (object.validation.required !== VALIDATION_REQUIRED_DEFVAL) {
throw new Error('required/optional rules conflict');
}
object.validation.required = ruleName === 'required';
} else {
object.validation.rules[toStart ? 'unshift' : 'push'](mkobj(ruleName, parseParamsJSON(params || null) || []));
}
};
var hasValidationRule = function (object, ruleName) {
return object.validation && _.any(object.validation.rules, function (rule) {
return rule[ruleName] != null;
});
};
var addFilter = function (object, toEnd, name, params) {
if (!_.contains(options.filters, name)) {
throw new Error('unavailable filter "' + name + '"');
}
object.filters[toEnd ? 'push' : 'unshift'](mkobj(name, params));
};
var parseTypedItemString = function (str) {
var parsed = {}, range, min, max;
str.replace(/^([a-zA-Z][a-zA-Z_0-9]*)(?:\:?([a-zA-Z0-9_]*))(?:\{?([^\}]*)\}?)(\|?.*)$/, function (word, nameString, typeString, rangeString, filtersString) {
parsed.length = {};
parsed.filters = [];
parsed.validation = { required: VALIDATION_REQUIRED_DEFVAL, rules: [] };
// name
parsed.name = verifyStringName(nameString.trim());
// type
parsed.type = verifyStringName(typeString.trim() || options.defaultType);
if (!_.contains(availableTypesNames, parsed.type)) {
throw new Error('invalid type "' + typeString + '", must be [' + availableTypesNames.join(',') + ']');
}
// parse rage string
rangeString = rangeString.replace(/\s*/g, '');
if (rangeString) {
range = rangeString.split(',');
if (range.length === 1) {
max = min = +range[0];
} else if (range.length === 2) {
min = range[0].length ? +range[0] : undefined;
max = range[1].length ? +range[1] : undefined;
}
if (!range.length
|| range.length > 2
|| (max !== null && !_.isNumber(max))
|| (min !== null && !_.isNumber(min))
|| (/^[0-9]+,[0-9]+$/.test(rangeString) && min > max)
) {
throw new Error('invalid range format in item string "' + str + '" {' + rangeString + '}');
}
parsed.length.min = min;
parsed.length.max = max;
}
// parse filters string
var FILTER_FORMAT_EXP = /^([a-zA-Z_]+)(.*)$/i;
if (filtersString) {
var filterSegments = filtersString.replace(/^\|/, '').split(/\s*\|\s*/g);
_.each(filterSegments, function (part) {
var name = part.replace(FILTER_FORMAT_EXP, '$1');
var params = parseParamsJSON(part.replace(FILTER_FORMAT_EXP, '$2'));
addFilter(parsed, true, name, params);
});
}
});
if (_.isEmpty(parsed)){
throw new Error('has invalid format');
}
return parsed;
};
var applyTypeOptions = function (obj, callback) {
_.each(obj, function (objT) {
var name = objT;
var params = [];
if (_.isObject(objT)) {
name = _.keys(objT)[0];
params = _.isArray(objT[name]) ? objT[name] : [objT[name]];
}
callback(name, params, objT);
});
};
var directiveFactory = new DirectiveFactory();
directiveFactory.directive('response', {
constructor: function () {
this.nested.directive('statuses', {
need: true,
verify: function (directiveKey, directiveValue, directives) {
if (!_.isArray(directiveValue)) {
throw new Error('"' + directiveKey +'" not array');
}
if (!directiveValue.length) {
throw new Error('empty');
}
if (directiveValue.length < 2) {
throw new Error('must have >= 2 items [' + directiveValue.join(',') + ']');
}
_.each(directiveValue, function (status) {
if (!options.statuses[status]) {
throw new Error('unavailable status "' + status + '"');
}
});
}
});
},
default: {},
process: function (directive, directiveData, directives) {
var response = {};
if (!_.isObject(directiveData)) {
throw new Error('Invalid type, must be object');
}
var limit = directiveData.limit;
if (limit != null) {
limit = parseInt(directiveData.limit + '', 10);
if (limit < 1 || limit !== directiveData.limit) {
throw new Error('invalid limit format');
}
}
response.output = {
data: {},
meta: {},
limit: limit || null
};
_.each(['data', 'meta'], function (name) {
this._reqParseItems(response.output[name], directiveData[name]);
}, this);
return response;
},
_reqParseItems: function (result, items) {
_.each(items, function (optionName, optionValue) {
if (!_.isArray(items)) {
var _optionName = optionName;
optionName = optionValue;
optionValue = _optionName;
}
tryCascadeFuncCall(optionName, function () {
var item = _.omit(parseTypedItemString(optionName), ['length']);
if (!_.isEmpty(result[item.name])) {
throw new Error('duplicate name "' + item.name + '"');
}
result[item.name] = item;
if (_.contains(nestedTypesNames, item.type)) {
result[item.name].nested = {};
this._reqParseItems(result[item.name].nested, optionValue);
}
}, this)();
}, this);
}
});
directiveFactory.directive('request', {
default: {},
process: function (directive, directiveData, apiData) {
var input = {
file: false,
params: {},
query: {},
body: {}
};
var categories = _.keys(input);
_.each(directiveData, function (inputData, inputCategory) {
if (!_.contains(categories, inputCategory)) {
throw new Error('Undefined type "' + inputCategory + '"');
}
if (inputCategory === 'file') {
if (!_.isBoolean(inputData)) {
throw new Error('file type must be boolean');
}
input[inputCategory] = inputData;
} else {
this._reqInput(input[inputCategory], inputData);
}
}, this);
return {
input: input
};
},
_reqInput: function (result, inputData) {
_.each(inputData, function(optionData, optionName){
var inputItem = parseTypedItemString(optionName);
if (_.isEmpty(inputItem.name)) {
throw new Error('undefined input item name');
}
tryCascadeFuncCall(inputItem.name, function () {
optionData = _.isString(optionData) ? {validation: optionData.split(/\s*\|\s*/)} : optionData;
optionData = _.isArray(optionData) ? {validation: optionData} : optionData;
var available = { validation: [], nested: {}, filters: [], limit: null };
var availableNames = _.keys(available);
_.defaults(optionData, available);
if (_.any(optionData, function (v, k) { return !_.contains(availableNames, k); })) {
throw new Error('has invalid keys, must be [' + availableNames.join(',') + ']');
}
if (!_.isArray(optionData.validation)) {
throw new Error('has invalid sting-array format');
}
var FORMAT_EXP = /^([a-z0-9_]+)(.*)$/i;
_.each(optionData.validation, function(rule){
addValidationRule(inputItem, rule.replace(FORMAT_EXP, '$1'), rule.replace(FORMAT_EXP, '$2'), false);
});
var typeOption = options.types[inputItem.type] || nestedTypes[inputItem.type];
if (typeOption.filters == null) {
typeOption.filters = [];
}
if (inputItem.length.min) {
addValidationRule(inputItem, 'min_length', [inputItem.length.min], true);
}
if (inputItem.length.max) {
addValidationRule(inputItem, 'max_length', [inputItem.length.max], true);
}
delete inputItem.length;
applyTypeOptions(typeOption.filters.concat(optionData.filters), function (filterName, filterParams) {
addFilter(inputItem, false, filterName, filterParams);
});
applyTypeOptions(typeOption.validation, function (ruleName, ruleParams) {
!hasValidationRule(inputItem, ruleName) && addValidationRule(inputItem, ruleName, ruleParams, true);
});
if (!_.isEmpty(result[inputItem.name])) {
throw new Error('duplicate item name "' + inputItem.name +'"');
}
result[inputItem.name] = inputItem;
if (_.contains(nestedTypesNames, inputItem.type)) {
console.log(11111, inputItem.name);
if (_.isEmpty(optionData.nested)) {
throw new Error('empty nested field of nested type');
}
if (nestedTypes[inputItem.type].limit) {
result[inputItem.name].limit = optionData.limit == null ? null : optionData.limit;
} else if (optionData.limit) {
throw new Error('limit field is excess for type "' + inputItem.type + '"');
}
result[inputItem.name].nested = {};
this._reqInput(result[inputItem.name].nested, optionData.nested);
}
}, this)();
}, this);
}
});
directiveFactory.directive('routes', {
default: [],
need: true,
verify: function (directiveKey, directiveValue, directives) {
if (!_.isArray(directiveValue)) {
throw new Error('"' + directiveKey +'" not array');
}
if (!directiveValue.length) {
throw new Error('empty');
}
},
process: function (directiveKey, directiveValue, directives) {
return _.map(directiveValue, function (route) {
if (_.isString(route)) {
var segments = route.split(/\s+/);
if (!(segments.length === 2 || segments.length === 1)) { | throw new Error('has invalid format "' + route +'", must be "METHOD URI_PATTERN"');
}
route = {
method: segments[0],
url: segments[1] == null ? '' : segments[1]
};
}
if (!METHODS_EXP.test(route.method)) {
throw new Error('has invalid method name "' + route.method + '"');
}
if (route.url == null || (_.isEmpty(route.url) && !directives.routeRootUrl) ) {
throw new Error('has empty url pattern');
}
route.method = route.method.toUpperCase();
route.name = directives.name;
if (directives.routeRootUrl != null) {
if (directives.routeRootUrl && !/^\//.test(route.url)) {
route.url = directives.routeRootUrl.replace(/\/$/, '') + '/' + route.url;
}
delete directives.routeRootUrl;
}
var requestParams = ((directives.request||{}).input||{}).params||{};
var requestParamNames = _.keys(requestParams);
var paramNames = [];
route.url = route.url.replace(/\((.*?):([a-zA-Z0-9_]*)\)/g, function (word, pattern, name) {
if (!requestParams[name]) {
throw new Error('undefined input param "' + name + '" in request spec [' + requestParamNames.join(',') + '], invalid url pattern "' + route.url + '"');
}
if (!availableTypes[requestParams[name].type].routeMask) {
throw new Error('invalid used type "' + requestParams[name].type +'" in request params. this type hasn\'t routeMask');
}
if (!pattern) {
pattern = availableTypes[requestParams[name].type].routeMask;
}
paramNames.push(name);
return '(' + pattern + ':' + name + ')';
});
// route.url = route.url.replace(options.router.arrayPattern, function (word, name) {
// if (!requestParams[name]) {
// throw new Error('undefined input param "' + name + '" in request spec [' + requestParamNames.join(',') + '], invalid url pattern "' + route.url + '"');
// }
//
// if (!availableTypes[requestParams[name].type].routeMask) {
// throw new Error('invalid used type "' + requestParams[name].type +'" in request params. this type hasn\'t routeMask');
// }
// });
// if (directives.routeUrlStrictTrailing != null) {
// if (!directives.routeUrlStrictTrailing) {
// route.url = route.url.replace(/\/?$/, '/?');
// }
// delete directives.routeUrlStrictTrailing;
// }
var diffNames = _.difference(requestParamNames, paramNames);
if (diffNames.length) {
throw new Error('conflict request/routes params [' + diffNames.join(',') +']');
}
return route;
});
}
});
return function (sourceJSON, fpath) {
var resultApi = {};
var actions = {};
var mainActionDirectives = {};
_.each(sourceJSON, function (v, k) {
if (/^\.[a-zA-Z_]+/.test(k)) {
actions[k] = v;
} else {
mainActionDirectives[k] = v;
}
});
mainActionDirectives.controller = mainActionDirectives.controller || fpath.replace(/^.+\/([^\.]+)\.spec\.js$/, '$1');
mainActionDirectives.fpath = fpath;
if (_.isEmpty(mainActionDirectives.controller)) {
throw new Error('undefined controller name at ' + fpath);
}
_.each(actions, function (actionDirectives, acctionName) {
actionDirectives.action = acctionName.replace(/^\./, '');
if (_.isEmpty(actionDirectives.action)) {
throw new Error('undefined action name "' + acctionName + '"');
}
actionDirectives.name = mainActionDirectives.controller + '.' + actionDirectives.action;
actionDirectives.title = actionDirectives.title == null ? actionDirectives.name : actionDirectives.title;
var _mainActionDirectives = _.cloneDeep(mainActionDirectives);
actionDirectives = _.merge(_mainActionDirectives, actionDirectives);
tryCascadeFuncCall(actionDirectives.name, function () {
directiveFactory.processAll(actionDirectives, _mainActionDirectives);
})();
// RESULT
resultApi[actionDirectives.name] = actionDirectives;
});
return resultApi;
};
}; | random_line_split |
|
utils.js | "use strict";
var _ = require('lodash');
var DirectiveFactory = require('./directive').factory;
var METHODS_EXP = /^GET|PUT|POST|DELETE|OPTIONS|HEAD|CONNECT|TRACE$/i;
var VALIDATION_REQUIRED_DEFVAL = true;
var mkobj = function (k, v) {
var o = {};
o[k] = v;
return o;
};
var tryCascadeFuncCall = function (name, func, obj) {
return function () {
try {
func.apply(obj, arguments);
} catch (e) {
var errorObj = new Error(name + (e._origError ? ' >>> ' : '.') + e.message);
errorObj._origError = true;
throw errorObj;
}
};
};
module.exports = function (options) {
var nestedTypes = {
object: {
filters: [],
limit: false
},
array: {
routeParamInfinite: true,
filters: [],
limit: true
}
};
var verifyStringName = function (string, format) {
format = format || options.stringNameFormat;
if (format === 'camel') {
if (!/^[a-z][a-zA-Z0-9]*$/.test(string)) {
throw new Error('string "' + string + '" has invalid stringNameFormat, must be in camel case');
}
} else if (format === 'underscore') {
if (!/^[a-z][a-z0-9_]*$/.test(string)) {
throw new Error('string "' + string + '" has invalid stringNameFormat, must be in underscore case');
}
} else if (format) {
throw new Error('options has invalid stringNameFormat name "' + format +'" ');
}
return string;
};
var optionsTypesNames = _.keys(options.types);
var nestedTypesNames = _.keys(nestedTypes);
var availableTypes = _.extend({}, nestedTypes, options.types);
var availableTypesNames = optionsTypesNames.concat(nestedTypesNames);
if (_.intersection(optionsTypesNames, nestedTypesNames).length) {
throw new Error('you can\'t use [' + nestedTypesNames.join(',') + '] types in options');
}
var parseParamsJSON = function (paramString){
var params = [];
if (paramString != null) {
params = paramString;
if (_.isString(paramString)) {
try {
params = JSON.parse(paramString, true);
} catch (e) {
throw new Error('has invalid JSON format in params "' + paramString + '"');
}
}
if (!_.isArray(params)) {
params = [params];
}
}
return params;
};
var addValidationRule = function (object, ruleName, params, toStart) {
ruleName = ruleName.trim();
if (!_.contains(options.rules, ruleName)) {
throw new Error('unavailable rule "' + ruleName + '"');
}
if (ruleName === 'required' || ruleName === 'optional') {
if (object.validation.required !== VALIDATION_REQUIRED_DEFVAL) {
throw new Error('required/optional rules conflict');
}
object.validation.required = ruleName === 'required';
} else {
object.validation.rules[toStart ? 'unshift' : 'push'](mkobj(ruleName, parseParamsJSON(params || null) || []));
}
};
var hasValidationRule = function (object, ruleName) {
return object.validation && _.any(object.validation.rules, function (rule) {
return rule[ruleName] != null;
});
};
var addFilter = function (object, toEnd, name, params) {
if (!_.contains(options.filters, name)) {
throw new Error('unavailable filter "' + name + '"');
}
object.filters[toEnd ? 'push' : 'unshift'](mkobj(name, params));
};
var parseTypedItemString = function (str) {
var parsed = {}, range, min, max;
str.replace(/^([a-zA-Z][a-zA-Z_0-9]*)(?:\:?([a-zA-Z0-9_]*))(?:\{?([^\}]*)\}?)(\|?.*)$/, function (word, nameString, typeString, rangeString, filtersString) {
parsed.length = {};
parsed.filters = [];
parsed.validation = { required: VALIDATION_REQUIRED_DEFVAL, rules: [] };
// name
parsed.name = verifyStringName(nameString.trim());
// type
parsed.type = verifyStringName(typeString.trim() || options.defaultType);
if (!_.contains(availableTypesNames, parsed.type)) {
throw new Error('invalid type "' + typeString + '", must be [' + availableTypesNames.join(',') + ']');
}
// parse rage string
rangeString = rangeString.replace(/\s*/g, '');
if (rangeString) |
// parse filters string
var FILTER_FORMAT_EXP = /^([a-zA-Z_]+)(.*)$/i;
if (filtersString) {
var filterSegments = filtersString.replace(/^\|/, '').split(/\s*\|\s*/g);
_.each(filterSegments, function (part) {
var name = part.replace(FILTER_FORMAT_EXP, '$1');
var params = parseParamsJSON(part.replace(FILTER_FORMAT_EXP, '$2'));
addFilter(parsed, true, name, params);
});
}
});
if (_.isEmpty(parsed)){
throw new Error('has invalid format');
}
return parsed;
};
var applyTypeOptions = function (obj, callback) {
_.each(obj, function (objT) {
var name = objT;
var params = [];
if (_.isObject(objT)) {
name = _.keys(objT)[0];
params = _.isArray(objT[name]) ? objT[name] : [objT[name]];
}
callback(name, params, objT);
});
};
var directiveFactory = new DirectiveFactory();
directiveFactory.directive('response', {
constructor: function () {
this.nested.directive('statuses', {
need: true,
verify: function (directiveKey, directiveValue, directives) {
if (!_.isArray(directiveValue)) {
throw new Error('"' + directiveKey +'" not array');
}
if (!directiveValue.length) {
throw new Error('empty');
}
if (directiveValue.length < 2) {
throw new Error('must have >= 2 items [' + directiveValue.join(',') + ']');
}
_.each(directiveValue, function (status) {
if (!options.statuses[status]) {
throw new Error('unavailable status "' + status + '"');
}
});
}
});
},
default: {},
process: function (directive, directiveData, directives) {
var response = {};
if (!_.isObject(directiveData)) {
throw new Error('Invalid type, must be object');
}
var limit = directiveData.limit;
if (limit != null) {
limit = parseInt(directiveData.limit + '', 10);
if (limit < 1 || limit !== directiveData.limit) {
throw new Error('invalid limit format');
}
}
response.output = {
data: {},
meta: {},
limit: limit || null
};
_.each(['data', 'meta'], function (name) {
this._reqParseItems(response.output[name], directiveData[name]);
}, this);
return response;
},
_reqParseItems: function (result, items) {
_.each(items, function (optionName, optionValue) {
if (!_.isArray(items)) {
var _optionName = optionName;
optionName = optionValue;
optionValue = _optionName;
}
tryCascadeFuncCall(optionName, function () {
var item = _.omit(parseTypedItemString(optionName), ['length']);
if (!_.isEmpty(result[item.name])) {
throw new Error('duplicate name "' + item.name + '"');
}
result[item.name] = item;
if (_.contains(nestedTypesNames, item.type)) {
result[item.name].nested = {};
this._reqParseItems(result[item.name].nested, optionValue);
}
}, this)();
}, this);
}
});
directiveFactory.directive('request', {
default: {},
process: function (directive, directiveData, apiData) {
var input = {
file: false,
params: {},
query: {},
body: {}
};
var categories = _.keys(input);
_.each(directiveData, function (inputData, inputCategory) {
if (!_.contains(categories, inputCategory)) {
throw new Error('Undefined type "' + inputCategory + '"');
}
if (inputCategory === 'file') {
if (!_.isBoolean(inputData)) {
throw new Error('file type must be boolean');
}
input[inputCategory] = inputData;
} else {
this._reqInput(input[inputCategory], inputData);
}
}, this);
return {
input: input
};
},
_reqInput: function (result, inputData) {
_.each(inputData, function(optionData, optionName){
var inputItem = parseTypedItemString(optionName);
if (_.isEmpty(inputItem.name)) {
throw new Error('undefined input item name');
}
tryCascadeFuncCall(inputItem.name, function () {
optionData = _.isString(optionData) ? {validation: optionData.split(/\s*\|\s*/)} : optionData;
optionData = _.isArray(optionData) ? {validation: optionData} : optionData;
var available = { validation: [], nested: {}, filters: [], limit: null };
var availableNames = _.keys(available);
_.defaults(optionData, available);
if (_.any(optionData, function (v, k) { return !_.contains(availableNames, k); })) {
throw new Error('has invalid keys, must be [' + availableNames.join(',') + ']');
}
if (!_.isArray(optionData.validation)) {
throw new Error('has invalid sting-array format');
}
var FORMAT_EXP = /^([a-z0-9_]+)(.*)$/i;
_.each(optionData.validation, function(rule){
addValidationRule(inputItem, rule.replace(FORMAT_EXP, '$1'), rule.replace(FORMAT_EXP, '$2'), false);
});
var typeOption = options.types[inputItem.type] || nestedTypes[inputItem.type];
if (typeOption.filters == null) {
typeOption.filters = [];
}
if (inputItem.length.min) {
addValidationRule(inputItem, 'min_length', [inputItem.length.min], true);
}
if (inputItem.length.max) {
addValidationRule(inputItem, 'max_length', [inputItem.length.max], true);
}
delete inputItem.length;
applyTypeOptions(typeOption.filters.concat(optionData.filters), function (filterName, filterParams) {
addFilter(inputItem, false, filterName, filterParams);
});
applyTypeOptions(typeOption.validation, function (ruleName, ruleParams) {
!hasValidationRule(inputItem, ruleName) && addValidationRule(inputItem, ruleName, ruleParams, true);
});
if (!_.isEmpty(result[inputItem.name])) {
throw new Error('duplicate item name "' + inputItem.name +'"');
}
result[inputItem.name] = inputItem;
if (_.contains(nestedTypesNames, inputItem.type)) {
console.log(11111, inputItem.name);
if (_.isEmpty(optionData.nested)) {
throw new Error('empty nested field of nested type');
}
if (nestedTypes[inputItem.type].limit) {
result[inputItem.name].limit = optionData.limit == null ? null : optionData.limit;
} else if (optionData.limit) {
throw new Error('limit field is excess for type "' + inputItem.type + '"');
}
result[inputItem.name].nested = {};
this._reqInput(result[inputItem.name].nested, optionData.nested);
}
}, this)();
}, this);
}
});
directiveFactory.directive('routes', {
default: [],
need: true,
verify: function (directiveKey, directiveValue, directives) {
if (!_.isArray(directiveValue)) {
throw new Error('"' + directiveKey +'" not array');
}
if (!directiveValue.length) {
throw new Error('empty');
}
},
process: function (directiveKey, directiveValue, directives) {
return _.map(directiveValue, function (route) {
if (_.isString(route)) {
var segments = route.split(/\s+/);
if (!(segments.length === 2 || segments.length === 1)) {
throw new Error('has invalid format "' + route +'", must be "METHOD URI_PATTERN"');
}
route = {
method: segments[0],
url: segments[1] == null ? '' : segments[1]
};
}
if (!METHODS_EXP.test(route.method)) {
throw new Error('has invalid method name "' + route.method + '"');
}
if (route.url == null || (_.isEmpty(route.url) && !directives.routeRootUrl) ) {
throw new Error('has empty url pattern');
}
route.method = route.method.toUpperCase();
route.name = directives.name;
if (directives.routeRootUrl != null) {
if (directives.routeRootUrl && !/^\//.test(route.url)) {
route.url = directives.routeRootUrl.replace(/\/$/, '') + '/' + route.url;
}
delete directives.routeRootUrl;
}
var requestParams = ((directives.request||{}).input||{}).params||{};
var requestParamNames = _.keys(requestParams);
var paramNames = [];
route.url = route.url.replace(/\((.*?):([a-zA-Z0-9_]*)\)/g, function (word, pattern, name) {
if (!requestParams[name]) {
throw new Error('undefined input param "' + name + '" in request spec [' + requestParamNames.join(',') + '], invalid url pattern "' + route.url + '"');
}
if (!availableTypes[requestParams[name].type].routeMask) {
throw new Error('invalid used type "' + requestParams[name].type +'" in request params. this type hasn\'t routeMask');
}
if (!pattern) {
pattern = availableTypes[requestParams[name].type].routeMask;
}
paramNames.push(name);
return '(' + pattern + ':' + name + ')';
});
// route.url = route.url.replace(options.router.arrayPattern, function (word, name) {
// if (!requestParams[name]) {
// throw new Error('undefined input param "' + name + '" in request spec [' + requestParamNames.join(',') + '], invalid url pattern "' + route.url + '"');
// }
//
// if (!availableTypes[requestParams[name].type].routeMask) {
// throw new Error('invalid used type "' + requestParams[name].type +'" in request params. this type hasn\'t routeMask');
// }
// });
// if (directives.routeUrlStrictTrailing != null) {
// if (!directives.routeUrlStrictTrailing) {
// route.url = route.url.replace(/\/?$/, '/?');
// }
// delete directives.routeUrlStrictTrailing;
// }
var diffNames = _.difference(requestParamNames, paramNames);
if (diffNames.length) {
throw new Error('conflict request/routes params [' + diffNames.join(',') +']');
}
return route;
});
}
});
return function (sourceJSON, fpath) {
var resultApi = {};
var actions = {};
var mainActionDirectives = {};
_.each(sourceJSON, function (v, k) {
if (/^\.[a-zA-Z_]+/.test(k)) {
actions[k] = v;
} else {
mainActionDirectives[k] = v;
}
});
mainActionDirectives.controller = mainActionDirectives.controller || fpath.replace(/^.+\/([^\.]+)\.spec\.js$/, '$1');
mainActionDirectives.fpath = fpath;
if (_.isEmpty(mainActionDirectives.controller)) {
throw new Error('undefined controller name at ' + fpath);
}
_.each(actions, function (actionDirectives, acctionName) {
actionDirectives.action = acctionName.replace(/^\./, '');
if (_.isEmpty(actionDirectives.action)) {
throw new Error('undefined action name "' + acctionName + '"');
}
actionDirectives.name = mainActionDirectives.controller + '.' + actionDirectives.action;
actionDirectives.title = actionDirectives.title == null ? actionDirectives.name : actionDirectives.title;
var _mainActionDirectives = _.cloneDeep(mainActionDirectives);
actionDirectives = _.merge(_mainActionDirectives, actionDirectives);
tryCascadeFuncCall(actionDirectives.name, function () {
directiveFactory.processAll(actionDirectives, _mainActionDirectives);
})();
// RESULT
resultApi[actionDirectives.name] = actionDirectives;
});
return resultApi;
};
}; | {
range = rangeString.split(',');
if (range.length === 1) {
max = min = +range[0];
} else if (range.length === 2) {
min = range[0].length ? +range[0] : undefined;
max = range[1].length ? +range[1] : undefined;
}
if (!range.length
|| range.length > 2
|| (max !== null && !_.isNumber(max))
|| (min !== null && !_.isNumber(min))
|| (/^[0-9]+,[0-9]+$/.test(rangeString) && min > max)
) {
throw new Error('invalid range format in item string "' + str + '" {' + rangeString + '}');
}
parsed.length.min = min;
parsed.length.max = max;
} | conditional_block |
main.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod disk;
mod isolation;
mod net;
mod runtime;
mod share;
mod ssh;
mod types;
mod utils;
mod vm;
use std::env;
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use anyhow::anyhow;
use anyhow::Context;
use clap::Args;
use clap::Parser;
use clap::Subcommand;
use image_test_lib::KvPair;
use image_test_lib::Test;
use json_arg::JsonFile;
use tempfile::tempdir;
use tracing::debug;
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::prelude::*;
use crate::isolation::default_passthrough_envs;
use crate::isolation::is_isolated;
use crate::isolation::isolated;
use crate::isolation::Platform;
use crate::runtime::set_runtime;
use crate::types::MachineOpts;
use crate::types::RuntimeOpts;
use crate::types::VMArgs;
use crate::utils::console_output_path_for_tpx;
use crate::utils::log_command;
use crate::vm::VM;
type Result<T> = std::result::Result<T, anyhow::Error>;
#[derive(Debug, Parser)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Run the VM. Must be executed inside container.
Run(RunCmdArgs),
/// Respawn inside isolated image and execute `Run` command.
Isolate(IsolateCmdArgs),
/// Run VM tests inside container.
Test(IsolateCmdArgs),
}
/// Execute the VM
#[derive(Debug, Args)]
struct | {
/// Json-encoded file for VM machine configuration
#[arg(long)]
machine_spec: JsonFile<MachineOpts>,
/// Json-encoded file describing paths of binaries required by VM
#[arg(long)]
runtime_spec: JsonFile<RuntimeOpts>,
#[clap(flatten)]
vm_args: VMArgs,
}
/// Spawn a container and execute the VM inside.
#[derive(Debug, Args)]
struct IsolateCmdArgs {
/// Path to container image.
#[arg(long)]
image: PathBuf,
/// Set these env vars in the container. If VM executes a command, these
/// env vars will also be prepended to the command.
#[arg(long)]
setenv: Vec<KvPair>,
/// Args for run command
#[clap(flatten)]
run_cmd_args: RunCmdArgs,
}
/// Actually starting the VM. This needs to be inside an ephemeral container as
/// lots of resources relies on container for clean up.
fn run(args: &RunCmdArgs) -> Result<()> {
if !is_isolated()? {
return Err(anyhow!("run must be called from inside container"));
}
debug!("RuntimeOpts: {:?}", args.runtime_spec);
debug!("MachineOpts: {:?}", args.machine_spec);
debug!("VMArgs: {:?}", args.vm_args);
set_runtime(args.runtime_spec.clone().into_inner())
.map_err(|_| anyhow!("Failed to set runtime"))?;
Ok(VM::new(args.machine_spec.clone().into_inner(), args.vm_args.clone())?.run()?)
}
/// Enter isolated container and then respawn itself inside it with `run`
/// command and its parameters.
fn respawn(args: &IsolateCmdArgs) -> Result<()> {
let mut envs = default_passthrough_envs();
envs.extend(args.setenv.clone());
let mut vm_args = args.run_cmd_args.vm_args.clone();
vm_args.command_envs = envs.clone();
// Let's always capture console output unless it's console mode
let _console_dir;
if !vm_args.mode.console && vm_args.console_output_file.is_none() {
let dir = tempdir().context("Failed to create temp dir for console output")?;
vm_args.console_output_file = Some(dir.path().join("console.txt"));
_console_dir = dir;
}
let isolated = isolated(&args.image, envs, vm_args.get_container_output_dirs())?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(vm_args.to_args());
log_command(&mut command).status()?;
Ok(())
}
/// Merge all sources of our envs into final list of env vars we should use
/// everywhere for tests. Dedup is handled by functions that use the result.
fn get_test_envs(from_cli: &[KvPair]) -> Vec<KvPair> {
// This handles common envs like RUST_LOG
let mut envs = default_passthrough_envs();
envs.extend_from_slice(from_cli);
// forward test runner env vars to the inner test
for (key, val) in std::env::vars() {
if key.starts_with("TEST_PILOT") {
envs.push((key, OsString::from(val)).into());
}
}
envs
}
/// Validated `VMArgs` and other necessary metadata for tests.
struct ValidatedVMArgs {
/// VMArgs that will be passed into the VM with modified fields
inner: VMArgs,
/// True if the test command is listing tests
is_list: bool,
}
/// Further validate `VMArgs` parsed by clap and generate a new `VMArgs` with
/// content specific to test execution.
fn get_test_vm_args(orig_args: &VMArgs, cli_envs: &[KvPair]) -> Result<ValidatedVMArgs> {
if orig_args.timeout_secs.is_none() {
return Err(anyhow!("Test command must specify --timeout-secs."));
}
if !orig_args.output_dirs.is_empty() {
return Err(anyhow!(
"Test command must not specify --output-dirs. \
This will be parsed from env and test command parameters instead."
));
}
let envs = get_test_envs(cli_envs);
#[derive(Debug, Parser)]
struct TestArgsParser {
#[clap(subcommand)]
test: Test,
}
let mut orig_command = vec![OsString::from("bogus_exec")];
orig_command.extend_from_slice(
&orig_args
.mode
.command
.clone()
.ok_or(anyhow!("Test command must not be empty"))?,
);
let test_args = TestArgsParser::try_parse_from(orig_command)
.context("Test command does not match expected format of `<type> <command>`")?;
let is_list = test_args.test.is_list_tests();
let mut vm_args = orig_args.clone();
vm_args.output_dirs = test_args.test.output_dirs().into_iter().collect();
vm_args.mode.command = Some(test_args.test.into_inner_cmd());
vm_args.command_envs = envs;
vm_args.console_output_file = console_output_path_for_tpx()?;
Ok(ValidatedVMArgs {
inner: vm_args,
is_list,
})
}
/// For some tests, an explicit "list test" step is run against the test binary
/// to discover the tests to run. This command is not our intended test to
/// execute, so it's unnecessarily wasteful to execute it inside the VM. We
/// directly run it inside the container without booting VM.
fn list_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let mut output_dirs = validated_args.inner.get_container_output_dirs();
// RW bind-mount /dev/fuse for running XAR.
// More details in antlir/antlir2/testing/image_test/src/main.rs.
output_dirs.insert(PathBuf::from("/dev/fuse"));
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
output_dirs,
)?;
let mut inner_cmd = validated_args
.inner
.mode
.command
.as_ref()
.expect("command must exist here")
.iter();
let mut command = isolated.command(inner_cmd.next().expect("must have program arg"))?;
command.args(inner_cmd);
Ok(command)
}
/// For actual test command, we spawn the VM and run it.
fn vm_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
validated_args.inner.get_container_output_dirs(),
)?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(validated_args.inner.to_args());
Ok(command)
}
/// `test` is similar to `respawn`, except that we assume control for some
/// inputs instead of allowing caller to pass them in. Some inputs are parsed
/// from the test command.
fn test(args: &IsolateCmdArgs) -> Result<()> {
let validated_args = get_test_vm_args(&args.run_cmd_args.vm_args, &args.setenv)?;
let mut command = if validated_args.is_list {
list_test_command(args, &validated_args)
} else {
vm_test_command(args, &validated_args)
}?;
log_command(&mut command).status()?;
Ok(())
}
fn main() -> Result<()> {
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::Layer::default())
.with(
tracing_subscriber::EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env()
.expect("Invalid logging level set by env"),
)
.init();
Platform::set()?;
debug!("Args: {:?}", env::args());
let cli = Cli::parse();
match &cli.command {
Commands::Isolate(args) => respawn(args),
Commands::Run(args) => run(args),
Commands::Test(args) => test(args),
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::types::VMModeArgs;
#[test]
fn test_get_test_envs() {
env::set_var("RUST_LOG", "hello");
env::set_var("TEST_PILOT_A", "A");
let from_cli = vec![KvPair::from(("foo", "bar"))];
assert_eq!(
get_test_envs(&from_cli),
vec![
KvPair::from(("RUST_LOG", "hello")),
KvPair::from(("foo", "bar")),
KvPair::from(("TEST_PILOT_A", "A")),
],
)
}
#[test]
fn test_get_test_vm_args() {
let valid = VMArgs {
timeout_secs: Some(1),
mode: VMModeArgs {
command: Some(["custom", "whatever"].iter().map(OsString::from).collect()),
..Default::default()
},
..Default::default()
};
let empty_env = Vec::<KvPair>::new();
let mut expected = valid.clone();
expected.mode.command = Some(vec![OsString::from("whatever")]);
let parsed = get_test_vm_args(&valid, &empty_env).expect("Parsing should succeed");
assert_eq!(parsed.inner, expected);
assert!(!parsed.is_list);
let mut timeout = valid.clone();
timeout.timeout_secs = None;
assert!(get_test_vm_args(&timeout, &empty_env).is_err());
let mut output_dirs = valid.clone();
output_dirs.output_dirs = vec![PathBuf::from("/some")];
assert!(get_test_vm_args(&output_dirs, &empty_env).is_err());
let mut command = valid;
command.mode.command = None;
assert!(get_test_vm_args(&command, &empty_env).is_err());
command.mode.command = Some(vec![OsString::from("invalid")]);
assert!(get_test_vm_args(&command, &empty_env).is_err());
}
}
| RunCmdArgs | identifier_name |
main.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod disk;
mod isolation;
mod net;
mod runtime;
mod share;
mod ssh;
mod types;
mod utils;
mod vm;
use std::env;
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use anyhow::anyhow;
use anyhow::Context;
use clap::Args;
use clap::Parser;
use clap::Subcommand;
use image_test_lib::KvPair;
use image_test_lib::Test;
use json_arg::JsonFile;
use tempfile::tempdir;
use tracing::debug;
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::prelude::*;
use crate::isolation::default_passthrough_envs;
use crate::isolation::is_isolated;
use crate::isolation::isolated;
use crate::isolation::Platform;
use crate::runtime::set_runtime;
use crate::types::MachineOpts;
use crate::types::RuntimeOpts;
use crate::types::VMArgs;
use crate::utils::console_output_path_for_tpx;
use crate::utils::log_command;
use crate::vm::VM;
type Result<T> = std::result::Result<T, anyhow::Error>;
#[derive(Debug, Parser)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Run the VM. Must be executed inside container.
Run(RunCmdArgs),
/// Respawn inside isolated image and execute `Run` command.
Isolate(IsolateCmdArgs),
/// Run VM tests inside container.
Test(IsolateCmdArgs),
}
/// Execute the VM
#[derive(Debug, Args)]
struct RunCmdArgs {
/// Json-encoded file for VM machine configuration
#[arg(long)]
machine_spec: JsonFile<MachineOpts>,
/// Json-encoded file describing paths of binaries required by VM
#[arg(long)]
runtime_spec: JsonFile<RuntimeOpts>,
#[clap(flatten)]
vm_args: VMArgs,
}
/// Spawn a container and execute the VM inside.
#[derive(Debug, Args)]
struct IsolateCmdArgs {
/// Path to container image.
#[arg(long)]
image: PathBuf,
/// Set these env vars in the container. If VM executes a command, these
/// env vars will also be prepended to the command.
#[arg(long)]
setenv: Vec<KvPair>,
/// Args for run command
#[clap(flatten)]
run_cmd_args: RunCmdArgs,
}
/// Actually starting the VM. This needs to be inside an ephemeral container as
/// lots of resources relies on container for clean up.
fn run(args: &RunCmdArgs) -> Result<()> {
if !is_isolated()? {
return Err(anyhow!("run must be called from inside container"));
}
debug!("RuntimeOpts: {:?}", args.runtime_spec);
debug!("MachineOpts: {:?}", args.machine_spec);
debug!("VMArgs: {:?}", args.vm_args);
set_runtime(args.runtime_spec.clone().into_inner())
.map_err(|_| anyhow!("Failed to set runtime"))?;
Ok(VM::new(args.machine_spec.clone().into_inner(), args.vm_args.clone())?.run()?)
}
/// Enter isolated container and then respawn itself inside it with `run`
/// command and its parameters.
fn respawn(args: &IsolateCmdArgs) -> Result<()> {
let mut envs = default_passthrough_envs();
envs.extend(args.setenv.clone());
let mut vm_args = args.run_cmd_args.vm_args.clone();
vm_args.command_envs = envs.clone();
// Let's always capture console output unless it's console mode
let _console_dir;
if !vm_args.mode.console && vm_args.console_output_file.is_none() {
let dir = tempdir().context("Failed to create temp dir for console output")?;
vm_args.console_output_file = Some(dir.path().join("console.txt"));
_console_dir = dir;
}
let isolated = isolated(&args.image, envs, vm_args.get_container_output_dirs())?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(vm_args.to_args());
log_command(&mut command).status()?;
Ok(())
}
/// Merge all sources of our envs into final list of env vars we should use
/// everywhere for tests. Dedup is handled by functions that use the result.
fn get_test_envs(from_cli: &[KvPair]) -> Vec<KvPair> {
// This handles common envs like RUST_LOG
let mut envs = default_passthrough_envs();
envs.extend_from_slice(from_cli);
// forward test runner env vars to the inner test
for (key, val) in std::env::vars() {
if key.starts_with("TEST_PILOT") {
envs.push((key, OsString::from(val)).into());
}
}
envs
}
/// Validated `VMArgs` and other necessary metadata for tests.
struct ValidatedVMArgs {
/// VMArgs that will be passed into the VM with modified fields
inner: VMArgs,
/// True if the test command is listing tests
is_list: bool,
}
/// Further validate `VMArgs` parsed by clap and generate a new `VMArgs` with
/// content specific to test execution.
fn get_test_vm_args(orig_args: &VMArgs, cli_envs: &[KvPair]) -> Result<ValidatedVMArgs> {
if orig_args.timeout_secs.is_none() |
if !orig_args.output_dirs.is_empty() {
return Err(anyhow!(
"Test command must not specify --output-dirs. \
This will be parsed from env and test command parameters instead."
));
}
let envs = get_test_envs(cli_envs);
#[derive(Debug, Parser)]
struct TestArgsParser {
#[clap(subcommand)]
test: Test,
}
let mut orig_command = vec![OsString::from("bogus_exec")];
orig_command.extend_from_slice(
&orig_args
.mode
.command
.clone()
.ok_or(anyhow!("Test command must not be empty"))?,
);
let test_args = TestArgsParser::try_parse_from(orig_command)
.context("Test command does not match expected format of `<type> <command>`")?;
let is_list = test_args.test.is_list_tests();
let mut vm_args = orig_args.clone();
vm_args.output_dirs = test_args.test.output_dirs().into_iter().collect();
vm_args.mode.command = Some(test_args.test.into_inner_cmd());
vm_args.command_envs = envs;
vm_args.console_output_file = console_output_path_for_tpx()?;
Ok(ValidatedVMArgs {
inner: vm_args,
is_list,
})
}
/// For some tests, an explicit "list test" step is run against the test binary
/// to discover the tests to run. This command is not our intended test to
/// execute, so it's unnecessarily wasteful to execute it inside the VM. We
/// directly run it inside the container without booting VM.
fn list_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let mut output_dirs = validated_args.inner.get_container_output_dirs();
// RW bind-mount /dev/fuse for running XAR.
// More details in antlir/antlir2/testing/image_test/src/main.rs.
output_dirs.insert(PathBuf::from("/dev/fuse"));
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
output_dirs,
)?;
let mut inner_cmd = validated_args
.inner
.mode
.command
.as_ref()
.expect("command must exist here")
.iter();
let mut command = isolated.command(inner_cmd.next().expect("must have program arg"))?;
command.args(inner_cmd);
Ok(command)
}
/// For actual test command, we spawn the VM and run it.
fn vm_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
validated_args.inner.get_container_output_dirs(),
)?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(validated_args.inner.to_args());
Ok(command)
}
/// `test` is similar to `respawn`, except that we assume control for some
/// inputs instead of allowing caller to pass them in. Some inputs are parsed
/// from the test command.
fn test(args: &IsolateCmdArgs) -> Result<()> {
let validated_args = get_test_vm_args(&args.run_cmd_args.vm_args, &args.setenv)?;
let mut command = if validated_args.is_list {
list_test_command(args, &validated_args)
} else {
vm_test_command(args, &validated_args)
}?;
log_command(&mut command).status()?;
Ok(())
}
fn main() -> Result<()> {
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::Layer::default())
.with(
tracing_subscriber::EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env()
.expect("Invalid logging level set by env"),
)
.init();
Platform::set()?;
debug!("Args: {:?}", env::args());
let cli = Cli::parse();
match &cli.command {
Commands::Isolate(args) => respawn(args),
Commands::Run(args) => run(args),
Commands::Test(args) => test(args),
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::types::VMModeArgs;
#[test]
fn test_get_test_envs() {
env::set_var("RUST_LOG", "hello");
env::set_var("TEST_PILOT_A", "A");
let from_cli = vec![KvPair::from(("foo", "bar"))];
assert_eq!(
get_test_envs(&from_cli),
vec![
KvPair::from(("RUST_LOG", "hello")),
KvPair::from(("foo", "bar")),
KvPair::from(("TEST_PILOT_A", "A")),
],
)
}
#[test]
fn test_get_test_vm_args() {
let valid = VMArgs {
timeout_secs: Some(1),
mode: VMModeArgs {
command: Some(["custom", "whatever"].iter().map(OsString::from).collect()),
..Default::default()
},
..Default::default()
};
let empty_env = Vec::<KvPair>::new();
let mut expected = valid.clone();
expected.mode.command = Some(vec![OsString::from("whatever")]);
let parsed = get_test_vm_args(&valid, &empty_env).expect("Parsing should succeed");
assert_eq!(parsed.inner, expected);
assert!(!parsed.is_list);
let mut timeout = valid.clone();
timeout.timeout_secs = None;
assert!(get_test_vm_args(&timeout, &empty_env).is_err());
let mut output_dirs = valid.clone();
output_dirs.output_dirs = vec![PathBuf::from("/some")];
assert!(get_test_vm_args(&output_dirs, &empty_env).is_err());
let mut command = valid;
command.mode.command = None;
assert!(get_test_vm_args(&command, &empty_env).is_err());
command.mode.command = Some(vec![OsString::from("invalid")]);
assert!(get_test_vm_args(&command, &empty_env).is_err());
}
}
| {
return Err(anyhow!("Test command must specify --timeout-secs."));
} | conditional_block |
main.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
mod disk;
mod isolation;
mod net;
mod runtime;
mod share;
mod ssh;
mod types;
mod utils;
mod vm;
use std::env;
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use anyhow::anyhow;
use anyhow::Context;
use clap::Args;
use clap::Parser;
use clap::Subcommand;
use image_test_lib::KvPair;
use image_test_lib::Test;
use json_arg::JsonFile;
use tempfile::tempdir;
use tracing::debug;
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::prelude::*;
use crate::isolation::default_passthrough_envs;
use crate::isolation::is_isolated;
use crate::isolation::isolated;
use crate::isolation::Platform;
use crate::runtime::set_runtime;
use crate::types::MachineOpts;
use crate::types::RuntimeOpts;
use crate::types::VMArgs;
use crate::utils::console_output_path_for_tpx;
use crate::utils::log_command;
use crate::vm::VM;
type Result<T> = std::result::Result<T, anyhow::Error>;
#[derive(Debug, Parser)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Run the VM. Must be executed inside container.
Run(RunCmdArgs),
/// Respawn inside isolated image and execute `Run` command.
Isolate(IsolateCmdArgs),
/// Run VM tests inside container.
Test(IsolateCmdArgs),
}
/// Execute the VM
#[derive(Debug, Args)]
struct RunCmdArgs {
/// Json-encoded file for VM machine configuration
#[arg(long)]
machine_spec: JsonFile<MachineOpts>,
/// Json-encoded file describing paths of binaries required by VM
#[arg(long)]
runtime_spec: JsonFile<RuntimeOpts>,
#[clap(flatten)]
vm_args: VMArgs,
}
/// Spawn a container and execute the VM inside.
#[derive(Debug, Args)]
struct IsolateCmdArgs {
/// Path to container image.
#[arg(long)]
image: PathBuf,
/// Set these env vars in the container. If VM executes a command, these
/// env vars will also be prepended to the command.
#[arg(long)]
setenv: Vec<KvPair>,
/// Args for run command
#[clap(flatten)]
run_cmd_args: RunCmdArgs,
}
/// Actually starting the VM. This needs to be inside an ephemeral container as
/// lots of resources relies on container for clean up.
fn run(args: &RunCmdArgs) -> Result<()> {
if !is_isolated()? {
return Err(anyhow!("run must be called from inside container"));
}
debug!("RuntimeOpts: {:?}", args.runtime_spec);
debug!("MachineOpts: {:?}", args.machine_spec);
debug!("VMArgs: {:?}", args.vm_args);
set_runtime(args.runtime_spec.clone().into_inner())
.map_err(|_| anyhow!("Failed to set runtime"))?;
Ok(VM::new(args.machine_spec.clone().into_inner(), args.vm_args.clone())?.run()?)
}
/// Enter isolated container and then respawn itself inside it with `run`
/// command and its parameters.
fn respawn(args: &IsolateCmdArgs) -> Result<()> {
let mut envs = default_passthrough_envs();
envs.extend(args.setenv.clone());
let mut vm_args = args.run_cmd_args.vm_args.clone();
vm_args.command_envs = envs.clone();
// Let's always capture console output unless it's console mode
let _console_dir;
if !vm_args.mode.console && vm_args.console_output_file.is_none() {
let dir = tempdir().context("Failed to create temp dir for console output")?;
vm_args.console_output_file = Some(dir.path().join("console.txt"));
_console_dir = dir;
}
let isolated = isolated(&args.image, envs, vm_args.get_container_output_dirs())?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(vm_args.to_args());
log_command(&mut command).status()?;
Ok(())
}
/// Merge all sources of our envs into final list of env vars we should use
/// everywhere for tests. Dedup is handled by functions that use the result.
fn get_test_envs(from_cli: &[KvPair]) -> Vec<KvPair> {
// This handles common envs like RUST_LOG
let mut envs = default_passthrough_envs();
envs.extend_from_slice(from_cli);
// forward test runner env vars to the inner test
for (key, val) in std::env::vars() {
if key.starts_with("TEST_PILOT") {
envs.push((key, OsString::from(val)).into());
}
}
envs
}
/// Validated `VMArgs` and other necessary metadata for tests.
struct ValidatedVMArgs {
/// VMArgs that will be passed into the VM with modified fields
inner: VMArgs,
/// True if the test command is listing tests
is_list: bool,
}
/// Further validate `VMArgs` parsed by clap and generate a new `VMArgs` with
/// content specific to test execution.
fn get_test_vm_args(orig_args: &VMArgs, cli_envs: &[KvPair]) -> Result<ValidatedVMArgs> {
if orig_args.timeout_secs.is_none() {
return Err(anyhow!("Test command must specify --timeout-secs."));
}
if !orig_args.output_dirs.is_empty() {
return Err(anyhow!(
"Test command must not specify --output-dirs. \
This will be parsed from env and test command parameters instead."
));
}
let envs = get_test_envs(cli_envs);
#[derive(Debug, Parser)]
struct TestArgsParser {
#[clap(subcommand)]
test: Test,
}
let mut orig_command = vec![OsString::from("bogus_exec")];
orig_command.extend_from_slice(
&orig_args
.mode
.command
.clone()
.ok_or(anyhow!("Test command must not be empty"))?,
);
let test_args = TestArgsParser::try_parse_from(orig_command)
.context("Test command does not match expected format of `<type> <command>`")?;
let is_list = test_args.test.is_list_tests();
let mut vm_args = orig_args.clone();
vm_args.output_dirs = test_args.test.output_dirs().into_iter().collect();
vm_args.mode.command = Some(test_args.test.into_inner_cmd());
vm_args.command_envs = envs;
vm_args.console_output_file = console_output_path_for_tpx()?;
Ok(ValidatedVMArgs {
inner: vm_args,
is_list,
})
}
/// For some tests, an explicit "list test" step is run against the test binary
/// to discover the tests to run. This command is not our intended test to
/// execute, so it's unnecessarily wasteful to execute it inside the VM. We
/// directly run it inside the container without booting VM.
fn list_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let mut output_dirs = validated_args.inner.get_container_output_dirs();
// RW bind-mount /dev/fuse for running XAR.
// More details in antlir/antlir2/testing/image_test/src/main.rs.
output_dirs.insert(PathBuf::from("/dev/fuse"));
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
output_dirs,
)?;
let mut inner_cmd = validated_args
.inner
.mode
.command
.as_ref()
.expect("command must exist here")
.iter();
let mut command = isolated.command(inner_cmd.next().expect("must have program arg"))?;
command.args(inner_cmd);
Ok(command)
}
/// For actual test command, we spawn the VM and run it.
fn vm_test_command(args: &IsolateCmdArgs, validated_args: &ValidatedVMArgs) -> Result<Command> {
let isolated = isolated(
&args.image,
validated_args.inner.command_envs.clone(),
validated_args.inner.get_container_output_dirs(),
)?;
let exe = env::current_exe().context("while getting argv[0]")?;
let mut command = isolated.command(exe)?;
command
.arg("run")
.arg("--machine-spec")
.arg(args.run_cmd_args.machine_spec.path())
.arg("--runtime-spec")
.arg(args.run_cmd_args.runtime_spec.path())
.args(validated_args.inner.to_args());
Ok(command)
}
/// `test` is similar to `respawn`, except that we assume control for some
/// inputs instead of allowing caller to pass them in. Some inputs are parsed
/// from the test command.
fn test(args: &IsolateCmdArgs) -> Result<()> {
let validated_args = get_test_vm_args(&args.run_cmd_args.vm_args, &args.setenv)?;
let mut command = if validated_args.is_list {
list_test_command(args, &validated_args)
} else {
vm_test_command(args, &validated_args)
}?;
log_command(&mut command).status()?;
Ok(())
}
fn main() -> Result<()> {
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::Layer::default()) | )
.init();
Platform::set()?;
debug!("Args: {:?}", env::args());
let cli = Cli::parse();
match &cli.command {
Commands::Isolate(args) => respawn(args),
Commands::Run(args) => run(args),
Commands::Test(args) => test(args),
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::types::VMModeArgs;
#[test]
fn test_get_test_envs() {
env::set_var("RUST_LOG", "hello");
env::set_var("TEST_PILOT_A", "A");
let from_cli = vec![KvPair::from(("foo", "bar"))];
assert_eq!(
get_test_envs(&from_cli),
vec![
KvPair::from(("RUST_LOG", "hello")),
KvPair::from(("foo", "bar")),
KvPair::from(("TEST_PILOT_A", "A")),
],
)
}
#[test]
fn test_get_test_vm_args() {
let valid = VMArgs {
timeout_secs: Some(1),
mode: VMModeArgs {
command: Some(["custom", "whatever"].iter().map(OsString::from).collect()),
..Default::default()
},
..Default::default()
};
let empty_env = Vec::<KvPair>::new();
let mut expected = valid.clone();
expected.mode.command = Some(vec![OsString::from("whatever")]);
let parsed = get_test_vm_args(&valid, &empty_env).expect("Parsing should succeed");
assert_eq!(parsed.inner, expected);
assert!(!parsed.is_list);
let mut timeout = valid.clone();
timeout.timeout_secs = None;
assert!(get_test_vm_args(&timeout, &empty_env).is_err());
let mut output_dirs = valid.clone();
output_dirs.output_dirs = vec![PathBuf::from("/some")];
assert!(get_test_vm_args(&output_dirs, &empty_env).is_err());
let mut command = valid;
command.mode.command = None;
assert!(get_test_vm_args(&command, &empty_env).is_err());
command.mode.command = Some(vec![OsString::from("invalid")]);
assert!(get_test_vm_args(&command, &empty_env).is_err());
}
} | .with(
tracing_subscriber::EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env()
.expect("Invalid logging level set by env"), | random_line_split |
4167.user.js | // ==UserScript==
// @name Amazon-Hennepin County Library Lookup
// @version 1.3
// @description slightly modified version of - v1.3 Search the Seattle Public Library Catalog from Amazon book listings by fatknowledge.
// @include http://*.amazon.*
// ==/UserScript==
// revision history:
// Version 1.2 - Status now displayed when all copies being held. (3/22/07)
// Version 1.2.1 -Added Transit Request status. (3/22/07)
// Version 1.2.2 - Edited var origTitle to match change in Amazon CSS (4/14/08)
// Version 1.2.3 - Edited new index pallti to utl (9/1/2009)
// Version 1.3 - Updated for 2013 Amazon and NYPL layout changes by Gareth Price (12-30-2013)
// Thanks to Dale Brayden for his assistance
(function(){
var libraryIsbnUrlPattern = 'https://catalog.hclib.org/ipac20/ipac.jsp?index=ISBN&term='
var libraryURLPatternForLink = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=ISBN&term='
var libraryURLPatternForNoMatch = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=UTL&term='
var libraryName = 'Hennepin County';
var isbn = getIsbn(window.location.href);
var isbns = new Array();
var isbnsIndex = -1;
var foundCount = 0;
// Set to true to output additional debug info
var DEBUG = true;
if(DEBUG) GM_log('Hennepin County Library Linky');
// Output status text while searching. If false, just shows final status.
var VERBOSE = false;
if (isbn!=0){
createStatusAndLibraryHTML();
if(VERBOSE) updateStatusHTML('Searching ' + libraryName + '...');
getStatusAllISBNs(isbn);
}
return;
//get all ISBNs for this book and write to global var isbns
//then call getBookStatuses
function getStatusAllISBNs(isbn) {
var wbUrl = 'http://labs.oclc.org/xisbn/' + isbn;
GM_xmlhttpRequest({
method: 'GET',
url: wbUrl,
headers: {
'User-agent': 'Mozilla/4.0 (compatible) Greasemonkey/0.3',
'Accept': 'application/atom+xml,application/xml,text/xml',
},
onload: function(responseDetails) {
if(DEBUG) GM_log(responseDetails.responseText);
var parser = new DOMParser();
var dom = parser.parseFromString(responseDetails.responseText,
"application/xml");
var isbnsDom = dom.getElementsByTagName('isbn');
// old line... limiting to only three for (var i = 0; i < isbnsDom.length; i++){
// if (isbnsDom.length > 3) {isbnsDom.length = 3;}
for (var i = 0; i < isbnsDom.length; i++){
isbns[i] = isbnsDom[i].textContent;
}
getBookStatuses();
}
});
}
//loop through all the isbns
//this gets called back after each search to do next isbn
function getBookStatuses(){
isbnsIndex++;
if(DEBUG) GM_log("getBookStatuses"+isbnsIndex+ " " + isbns.length);
if (isbnsIndex < isbns.length){
if(VERBOSE) updateStatusHTML("Searching for ISBN "+ isbns[isbnsIndex] + " in " + libraryName + '...');
getBookStatus(libraryIsbnUrlPattern, isbns[isbnsIndex]);
//when done going through isbns, update the status
} else {
if (foundCount==0){
setStatusNoneFound();
} else if (foundCount==1){
removeStatus();
} else {
setStatusColor("black");
updateStatusHTML(foundCount+ ' versions found:');
}
}
}
//connect to library server to get book status for isbn and then insert result under the book title
//call getBookStatuses when done
function getBookStatus(libraryUrlPattern, isbn){
if(DEBUG) GM_log('Searching: '+libraryUrlPattern + isbn);
var libraryAvailability = /Checked In/;
var libraryOnOrder = /(\d+) Copies On Order/;
var libraryInProcess = /Pending/;
var libraryTransitRequest = /Transit Request/;
var libraryBeingHeld = /Being held/;
var libraryHolds = /Current Requests: (\d+)/;
var libraryCopies = /Reservable copies: (\d+)/;
var libraryDueBack = /(\d{2}\/\d{2}\/\d{4})/;
var notFound = /Sorry, could not find anything matching/
var libraryElectronic = /(online|electronic) resource/;
var libraryUseIn = /USE IN LIBRARY/;
var libraryMultipleVersions = /(\d+) Found/;
// formats
var cd = /sound disc/;
var largeprint = /large print/;
var audiobook = /Audiobook\s/;
var ebook = /eBook\sdownload/;
var eaudiobook = /Audiobook\sdownload/;
GM_xmlhttpRequest
({
method:'GET',
url: libraryUrlPattern + isbn,
onload:function(results) {
page = results.responseText;
var libraryFormat = "Books";
if ( eaudiobook.test(page) )
{
var libraryFormat = "Audiobook downloads"
}
else if ( audiobook.test(page) )
{
var libraryFormat = "Audiobooks";
}
else if ( cd.test(page) )
{
var libraryFormat = "Compact Disc"
}
else if ( largeprint.test(page) )
{
var libraryFormat = "Large Print Books"
}
else if ( ebook.test(page) )
{
var libraryFormat = "eBook downloads"
};
if ( notFound.test(page) ){
getBookStatuses();
}
else if ( libraryAvailability.test(page) )
{
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
libraryFormat + " available now at " + libraryName + " Library (owns " + copies + " copies)",
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryUseIn.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
"Available in reference section at "+ libraryName,
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryOnOrder.test(page) )
{
var CopiesOnOrder = page.match(libraryOnOrder)[1]
var holds = page.match(libraryHolds)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On order!",
libraryFormat + " on order. Request from " + libraryName + " Library (" + CopiesOnOrder + " copies on order, " + holds + " requests)",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryHolds.test(page) ) {
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
//var holdsStr = page.match(holds);
//if(!holdsStr) {
// holdsStr = 'On hold';
//}
if (holds != 0) {var howmay = holds} else {var howmay = "there are no "};
setLibraryHTML(
libraryUrlPattern, isbn,
holds + " Requests",
"Currently " + howmay + " requests on " + copies + " " + libraryFormat + ". Request from " + libraryName + " Library",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryBeingHeld.test(page) )
{
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"All copies on request shelf",
"All " + libraryFormat + " on request shelf. Request from " + libraryName + " Library (currently " + holds + " requests on " + copies + " copies)",
"#AA7700" // dark yellow
);
}
else if ( libraryInProcess.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"In process!",
libraryFormat + "available soon at " + libraryName + " Library! (" + copies + " copies pending)" ,
"#AA7700" // dark yellow
);
foundCount++;
| getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
libraryFormat + " due back on " + due + " at "+ libraryName,
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryMultipleVersions.test(page) )
{
var versions = page.match(libraryMultipleVersions)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Multiple versions",
versions + " version(s) listed at " + libraryName,
"green" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryElectronic.test(page) )
{
if (libraryFormat != "Book")
{ var digital = libraryFormat}
else
{ var digital = "Digital"};
setLibraryHTML(
libraryUrlPattern, isbn,
"On the e-shelf now!",
digital + " available now at "+ libraryName,
"green"
);
foundCount++;
getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
"Due back at " + libraryName + " Library on " + due,
"#AA7700" // dark yellow
);
}
else if ( foundCount = 0)
{
setLibraryHTML(
libraryUrlPattern, isbn,
"Check for other editions",
"This edition not in " + libraryName + " Library. Click to check for other editions.",
"red"
);// do nothing;
}
}
});
}
function createStatusAndLibraryHTML() {
var title_node = getTitleNode();
if(!title_node) {
if(DEBUG) GM_log("can't find title node");
return null;
}
var h1_node = title_node.parentNode;
var br = document.createElement('br');
//the div for library status when found
var splLinkyDiv = document.createElement('div');
splLinkyDiv.id = 'splLinkyLibraryHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
//splLinkyDiv.style.fontSize = '12px';
//splLinkyDiv.style.color = 'black';
splLinkyDiv.setAttribute('style','color:black\;' + 'background-color:#FFFF99\;' + 'font-size:12px;' + 'padding:3px;');
//How lame is this javascript DOM syntax? Instead of having an insertAfter function, you have an insertBefore
//and then pass in the .nextSibling attribute of the element. Really inuitive guys.
h1_node.insertBefore(splLinkyDiv, title_node.nextSibling);
h1_node.insertBefore(br, title_node.nextSibling);
//the div for status as checks are occuring
var splStatusDiv = document.createElement('div');
splStatusDiv.id = 'splLinkyStatusHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
splStatusDiv.style.fontSize = '12px';
splStatusDiv.style.color = 'brown';
h1_node.insertBefore(splStatusDiv, splLinkyDiv);
// h1_node.insertBefore(br, title_node.nextSibling);
}
function updateStatusHTML(text) {
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(document.createTextNode(text));
}
//add status of book below previous ones
function setLibraryHTML(libraryUrlPattern, isbn, title, linktext, color) {
var splLinkyDiv = document.getElementById('splLinkyLibraryHTML');
if (splLinkyDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryUrlPattern+isbn);
link.setAttribute('target', "_blank");
link.style.color = color;
var label = document.createTextNode( linktext );
link.appendChild(label);
//append to existing content
splLinkyDiv.appendChild(link);
splLinkyDiv.appendChild(document.createElement('br'));
}
//none found
//add link to search by title
function setStatusNoneFound() {
var title = getTitle();
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryURLPatternForNoMatch + encodeURIComponent(TheTitle));
link.setAttribute('target', "_blank");
link.style.color = "red";
var label = document.createTextNode('Not found. Search by title at ' + libraryName );
link.appendChild(label);
//remove existing content
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(link);
}
function setStatusColor(color){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
splStatusDiv.style.color = color;
}
function removeStatus(){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
//check if there is a ISBN in the URL
//URL looks like http://www.amazon.com/Liseys-Story-Stephen-King/dp/0743289412/ref=xarw/002-5799652-4968811
function getIsbn(url){
try {
//match if there is a / followed by a 7-9 digit number followed by either another number or an x
//followed by a / or end of url
var isbn = url.match(/\/(\d{7,9}[\d|X])(\/|$)/)[1];
} catch (e) { return 0; }
return isbn;
}
function getTitle(){
var title = getTitleNode();
if (title==null) { return null; }
//remove words in parentheses and subtitles (anything after a colon)
return title.textContent.replace(/\(.+\)/, '').replace(/:.*/, '');
}
// Find the node containing the book title
function getTitleNode()
{
// Amazon has a number of different page layouts that put the title in different tags
// This is an array of xpaths that can contain an item's title
var titlePaths = [
"//span[@id='btAsinTitle']/node()[not(self::span)]",
"//h1[@id='title']/node()[not(self::span)]"
];
for(var i in titlePaths) {
var nodes = document.evaluate(titlePaths[ i ], document, null, XPathResult.UNORDERED_NODE_ITERATOR_TYPE, null);
var thisNode = nodes.iterateNext();
var titleNode;
// Get the last node
while(thisNode){
if(DEBUG) GM_log( thisNode.textContent );
titleNode = thisNode;
if(titleNode) {
break;
}
thisNode = nodes.iterateNext();
}
}
if (titleNode == null || !nodes) {
GM_log("can't find title node");
return null;
} else {
if(DEBUG) GM_log("Found title node: " + titleNode.textContent);
}
return titleNode;
}
}
)(); | random_line_split |
|
4167.user.js | // ==UserScript==
// @name Amazon-Hennepin County Library Lookup
// @version 1.3
// @description slightly modified version of - v1.3 Search the Seattle Public Library Catalog from Amazon book listings by fatknowledge.
// @include http://*.amazon.*
// ==/UserScript==
// revision history:
// Version 1.2 - Status now displayed when all copies being held. (3/22/07)
// Version 1.2.1 -Added Transit Request status. (3/22/07)
// Version 1.2.2 - Edited var origTitle to match change in Amazon CSS (4/14/08)
// Version 1.2.3 - Edited new index pallti to utl (9/1/2009)
// Version 1.3 - Updated for 2013 Amazon and NYPL layout changes by Gareth Price (12-30-2013)
// Thanks to Dale Brayden for his assistance
(function(){
var libraryIsbnUrlPattern = 'https://catalog.hclib.org/ipac20/ipac.jsp?index=ISBN&term='
var libraryURLPatternForLink = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=ISBN&term='
var libraryURLPatternForNoMatch = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=UTL&term='
var libraryName = 'Hennepin County';
var isbn = getIsbn(window.location.href);
var isbns = new Array();
var isbnsIndex = -1;
var foundCount = 0;
// Set to true to output additional debug info
var DEBUG = true;
if(DEBUG) GM_log('Hennepin County Library Linky');
// Output status text while searching. If false, just shows final status.
var VERBOSE = false;
if (isbn!=0){
createStatusAndLibraryHTML();
if(VERBOSE) updateStatusHTML('Searching ' + libraryName + '...');
getStatusAllISBNs(isbn);
}
return;
//get all ISBNs for this book and write to global var isbns
//then call getBookStatuses
function getStatusAllISBNs(isbn) {
var wbUrl = 'http://labs.oclc.org/xisbn/' + isbn;
GM_xmlhttpRequest({
method: 'GET',
url: wbUrl,
headers: {
'User-agent': 'Mozilla/4.0 (compatible) Greasemonkey/0.3',
'Accept': 'application/atom+xml,application/xml,text/xml',
},
onload: function(responseDetails) {
if(DEBUG) GM_log(responseDetails.responseText);
var parser = new DOMParser();
var dom = parser.parseFromString(responseDetails.responseText,
"application/xml");
var isbnsDom = dom.getElementsByTagName('isbn');
// old line... limiting to only three for (var i = 0; i < isbnsDom.length; i++){
// if (isbnsDom.length > 3) {isbnsDom.length = 3;}
for (var i = 0; i < isbnsDom.length; i++){
isbns[i] = isbnsDom[i].textContent;
}
getBookStatuses();
}
});
}
//loop through all the isbns
//this gets called back after each search to do next isbn
function getBookStatuses(){
isbnsIndex++;
if(DEBUG) GM_log("getBookStatuses"+isbnsIndex+ " " + isbns.length);
if (isbnsIndex < isbns.length){
if(VERBOSE) updateStatusHTML("Searching for ISBN "+ isbns[isbnsIndex] + " in " + libraryName + '...');
getBookStatus(libraryIsbnUrlPattern, isbns[isbnsIndex]);
//when done going through isbns, update the status
} else {
if (foundCount==0){
setStatusNoneFound();
} else if (foundCount==1){
removeStatus();
} else {
setStatusColor("black");
updateStatusHTML(foundCount+ ' versions found:');
}
}
}
//connect to library server to get book status for isbn and then insert result under the book title
//call getBookStatuses when done
function getBookStatus(libraryUrlPattern, isbn){
if(DEBUG) GM_log('Searching: '+libraryUrlPattern + isbn);
var libraryAvailability = /Checked In/;
var libraryOnOrder = /(\d+) Copies On Order/;
var libraryInProcess = /Pending/;
var libraryTransitRequest = /Transit Request/;
var libraryBeingHeld = /Being held/;
var libraryHolds = /Current Requests: (\d+)/;
var libraryCopies = /Reservable copies: (\d+)/;
var libraryDueBack = /(\d{2}\/\d{2}\/\d{4})/;
var notFound = /Sorry, could not find anything matching/
var libraryElectronic = /(online|electronic) resource/;
var libraryUseIn = /USE IN LIBRARY/;
var libraryMultipleVersions = /(\d+) Found/;
// formats
var cd = /sound disc/;
var largeprint = /large print/;
var audiobook = /Audiobook\s/;
var ebook = /eBook\sdownload/;
var eaudiobook = /Audiobook\sdownload/;
GM_xmlhttpRequest
({
method:'GET',
url: libraryUrlPattern + isbn,
onload:function(results) {
page = results.responseText;
var libraryFormat = "Books";
if ( eaudiobook.test(page) )
{
var libraryFormat = "Audiobook downloads"
}
else if ( audiobook.test(page) )
{
var libraryFormat = "Audiobooks";
}
else if ( cd.test(page) )
{
var libraryFormat = "Compact Disc"
}
else if ( largeprint.test(page) )
{
var libraryFormat = "Large Print Books"
}
else if ( ebook.test(page) )
{
var libraryFormat = "eBook downloads"
};
if ( notFound.test(page) ){
getBookStatuses();
}
else if ( libraryAvailability.test(page) )
{
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
libraryFormat + " available now at " + libraryName + " Library (owns " + copies + " copies)",
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryUseIn.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
"Available in reference section at "+ libraryName,
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryOnOrder.test(page) )
{
var CopiesOnOrder = page.match(libraryOnOrder)[1]
var holds = page.match(libraryHolds)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On order!",
libraryFormat + " on order. Request from " + libraryName + " Library (" + CopiesOnOrder + " copies on order, " + holds + " requests)",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryHolds.test(page) ) {
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
//var holdsStr = page.match(holds);
//if(!holdsStr) {
// holdsStr = 'On hold';
//}
if (holds != 0) {var howmay = holds} else {var howmay = "there are no "};
setLibraryHTML(
libraryUrlPattern, isbn,
holds + " Requests",
"Currently " + howmay + " requests on " + copies + " " + libraryFormat + ". Request from " + libraryName + " Library",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryBeingHeld.test(page) )
{
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"All copies on request shelf",
"All " + libraryFormat + " on request shelf. Request from " + libraryName + " Library (currently " + holds + " requests on " + copies + " copies)",
"#AA7700" // dark yellow
);
}
else if ( libraryInProcess.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"In process!",
libraryFormat + "available soon at " + libraryName + " Library! (" + copies + " copies pending)" ,
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
libraryFormat + " due back on " + due + " at "+ libraryName,
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryMultipleVersions.test(page) )
{
var versions = page.match(libraryMultipleVersions)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Multiple versions",
versions + " version(s) listed at " + libraryName,
"green" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryElectronic.test(page) )
{
if (libraryFormat != "Book")
{ var digital = libraryFormat}
else
{ var digital = "Digital"};
setLibraryHTML(
libraryUrlPattern, isbn,
"On the e-shelf now!",
digital + " available now at "+ libraryName,
"green"
);
foundCount++;
getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
"Due back at " + libraryName + " Library on " + due,
"#AA7700" // dark yellow
);
}
else if ( foundCount = 0)
{
setLibraryHTML(
libraryUrlPattern, isbn,
"Check for other editions",
"This edition not in " + libraryName + " Library. Click to check for other editions.",
"red"
);// do nothing;
}
}
});
}
function createStatusAndLibraryHTML() |
function updateStatusHTML(text) {
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(document.createTextNode(text));
}
//add status of book below previous ones
function setLibraryHTML(libraryUrlPattern, isbn, title, linktext, color) {
var splLinkyDiv = document.getElementById('splLinkyLibraryHTML');
if (splLinkyDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryUrlPattern+isbn);
link.setAttribute('target', "_blank");
link.style.color = color;
var label = document.createTextNode( linktext );
link.appendChild(label);
//append to existing content
splLinkyDiv.appendChild(link);
splLinkyDiv.appendChild(document.createElement('br'));
}
//none found
//add link to search by title
function setStatusNoneFound() {
var title = getTitle();
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryURLPatternForNoMatch + encodeURIComponent(TheTitle));
link.setAttribute('target', "_blank");
link.style.color = "red";
var label = document.createTextNode('Not found. Search by title at ' + libraryName );
link.appendChild(label);
//remove existing content
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(link);
}
function setStatusColor(color){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
splStatusDiv.style.color = color;
}
function removeStatus(){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
//check if there is a ISBN in the URL
//URL looks like http://www.amazon.com/Liseys-Story-Stephen-King/dp/0743289412/ref=xarw/002-5799652-4968811
function getIsbn(url){
try {
//match if there is a / followed by a 7-9 digit number followed by either another number or an x
//followed by a / or end of url
var isbn = url.match(/\/(\d{7,9}[\d|X])(\/|$)/)[1];
} catch (e) { return 0; }
return isbn;
}
function getTitle(){
var title = getTitleNode();
if (title==null) { return null; }
//remove words in parentheses and subtitles (anything after a colon)
return title.textContent.replace(/\(.+\)/, '').replace(/:.*/, '');
}
// Find the node containing the book title
function getTitleNode()
{
// Amazon has a number of different page layouts that put the title in different tags
// This is an array of xpaths that can contain an item's title
var titlePaths = [
"//span[@id='btAsinTitle']/node()[not(self::span)]",
"//h1[@id='title']/node()[not(self::span)]"
];
for(var i in titlePaths) {
var nodes = document.evaluate(titlePaths[ i ], document, null, XPathResult.UNORDERED_NODE_ITERATOR_TYPE, null);
var thisNode = nodes.iterateNext();
var titleNode;
// Get the last node
while(thisNode){
if(DEBUG) GM_log( thisNode.textContent );
titleNode = thisNode;
if(titleNode) {
break;
}
thisNode = nodes.iterateNext();
}
}
if (titleNode == null || !nodes) {
GM_log("can't find title node");
return null;
} else {
if(DEBUG) GM_log("Found title node: " + titleNode.textContent);
}
return titleNode;
}
}
)(); | {
var title_node = getTitleNode();
if(!title_node) {
if(DEBUG) GM_log("can't find title node");
return null;
}
var h1_node = title_node.parentNode;
var br = document.createElement('br');
//the div for library status when found
var splLinkyDiv = document.createElement('div');
splLinkyDiv.id = 'splLinkyLibraryHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
//splLinkyDiv.style.fontSize = '12px';
//splLinkyDiv.style.color = 'black';
splLinkyDiv.setAttribute('style','color:black\;' + 'background-color:#FFFF99\;' + 'font-size:12px;' + 'padding:3px;');
//How lame is this javascript DOM syntax? Instead of having an insertAfter function, you have an insertBefore
//and then pass in the .nextSibling attribute of the element. Really inuitive guys.
h1_node.insertBefore(splLinkyDiv, title_node.nextSibling);
h1_node.insertBefore(br, title_node.nextSibling);
//the div for status as checks are occuring
var splStatusDiv = document.createElement('div');
splStatusDiv.id = 'splLinkyStatusHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
splStatusDiv.style.fontSize = '12px';
splStatusDiv.style.color = 'brown';
h1_node.insertBefore(splStatusDiv, splLinkyDiv);
// h1_node.insertBefore(br, title_node.nextSibling);
} | identifier_body |
4167.user.js | // ==UserScript==
// @name Amazon-Hennepin County Library Lookup
// @version 1.3
// @description slightly modified version of - v1.3 Search the Seattle Public Library Catalog from Amazon book listings by fatknowledge.
// @include http://*.amazon.*
// ==/UserScript==
// revision history:
// Version 1.2 - Status now displayed when all copies being held. (3/22/07)
// Version 1.2.1 -Added Transit Request status. (3/22/07)
// Version 1.2.2 - Edited var origTitle to match change in Amazon CSS (4/14/08)
// Version 1.2.3 - Edited new index pallti to utl (9/1/2009)
// Version 1.3 - Updated for 2013 Amazon and NYPL layout changes by Gareth Price (12-30-2013)
// Thanks to Dale Brayden for his assistance
(function(){
var libraryIsbnUrlPattern = 'https://catalog.hclib.org/ipac20/ipac.jsp?index=ISBN&term='
var libraryURLPatternForLink = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=ISBN&term='
var libraryURLPatternForNoMatch = 'http://hzapps.hclib.org/pub/ipac/link2ipac.cfm?LinkyVersion=1.3&index=UTL&term='
var libraryName = 'Hennepin County';
var isbn = getIsbn(window.location.href);
var isbns = new Array();
var isbnsIndex = -1;
var foundCount = 0;
// Set to true to output additional debug info
var DEBUG = true;
if(DEBUG) GM_log('Hennepin County Library Linky');
// Output status text while searching. If false, just shows final status.
var VERBOSE = false;
if (isbn!=0){
createStatusAndLibraryHTML();
if(VERBOSE) updateStatusHTML('Searching ' + libraryName + '...');
getStatusAllISBNs(isbn);
}
return;
//get all ISBNs for this book and write to global var isbns
//then call getBookStatuses
function getStatusAllISBNs(isbn) {
var wbUrl = 'http://labs.oclc.org/xisbn/' + isbn;
GM_xmlhttpRequest({
method: 'GET',
url: wbUrl,
headers: {
'User-agent': 'Mozilla/4.0 (compatible) Greasemonkey/0.3',
'Accept': 'application/atom+xml,application/xml,text/xml',
},
onload: function(responseDetails) {
if(DEBUG) GM_log(responseDetails.responseText);
var parser = new DOMParser();
var dom = parser.parseFromString(responseDetails.responseText,
"application/xml");
var isbnsDom = dom.getElementsByTagName('isbn');
// old line... limiting to only three for (var i = 0; i < isbnsDom.length; i++){
// if (isbnsDom.length > 3) {isbnsDom.length = 3;}
for (var i = 0; i < isbnsDom.length; i++){
isbns[i] = isbnsDom[i].textContent;
}
getBookStatuses();
}
});
}
//loop through all the isbns
//this gets called back after each search to do next isbn
function getBookStatuses(){
isbnsIndex++;
if(DEBUG) GM_log("getBookStatuses"+isbnsIndex+ " " + isbns.length);
if (isbnsIndex < isbns.length){
if(VERBOSE) updateStatusHTML("Searching for ISBN "+ isbns[isbnsIndex] + " in " + libraryName + '...');
getBookStatus(libraryIsbnUrlPattern, isbns[isbnsIndex]);
//when done going through isbns, update the status
} else {
if (foundCount==0){
setStatusNoneFound();
} else if (foundCount==1){
removeStatus();
} else {
setStatusColor("black");
updateStatusHTML(foundCount+ ' versions found:');
}
}
}
//connect to library server to get book status for isbn and then insert result under the book title
//call getBookStatuses when done
function | (libraryUrlPattern, isbn){
if(DEBUG) GM_log('Searching: '+libraryUrlPattern + isbn);
var libraryAvailability = /Checked In/;
var libraryOnOrder = /(\d+) Copies On Order/;
var libraryInProcess = /Pending/;
var libraryTransitRequest = /Transit Request/;
var libraryBeingHeld = /Being held/;
var libraryHolds = /Current Requests: (\d+)/;
var libraryCopies = /Reservable copies: (\d+)/;
var libraryDueBack = /(\d{2}\/\d{2}\/\d{4})/;
var notFound = /Sorry, could not find anything matching/
var libraryElectronic = /(online|electronic) resource/;
var libraryUseIn = /USE IN LIBRARY/;
var libraryMultipleVersions = /(\d+) Found/;
// formats
var cd = /sound disc/;
var largeprint = /large print/;
var audiobook = /Audiobook\s/;
var ebook = /eBook\sdownload/;
var eaudiobook = /Audiobook\sdownload/;
GM_xmlhttpRequest
({
method:'GET',
url: libraryUrlPattern + isbn,
onload:function(results) {
page = results.responseText;
var libraryFormat = "Books";
if ( eaudiobook.test(page) )
{
var libraryFormat = "Audiobook downloads"
}
else if ( audiobook.test(page) )
{
var libraryFormat = "Audiobooks";
}
else if ( cd.test(page) )
{
var libraryFormat = "Compact Disc"
}
else if ( largeprint.test(page) )
{
var libraryFormat = "Large Print Books"
}
else if ( ebook.test(page) )
{
var libraryFormat = "eBook downloads"
};
if ( notFound.test(page) ){
getBookStatuses();
}
else if ( libraryAvailability.test(page) )
{
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
libraryFormat + " available now at " + libraryName + " Library (owns " + copies + " copies)",
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryUseIn.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"On the shelf now!",
"Available in reference section at "+ libraryName,
"green"
// "#2bff81" //light green
);
foundCount++;
getBookStatuses();
}
else if ( libraryOnOrder.test(page) )
{
var CopiesOnOrder = page.match(libraryOnOrder)[1]
var holds = page.match(libraryHolds)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"On order!",
libraryFormat + " on order. Request from " + libraryName + " Library (" + CopiesOnOrder + " copies on order, " + holds + " requests)",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryHolds.test(page) ) {
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
//var holdsStr = page.match(holds);
//if(!holdsStr) {
// holdsStr = 'On hold';
//}
if (holds != 0) {var howmay = holds} else {var howmay = "there are no "};
setLibraryHTML(
libraryUrlPattern, isbn,
holds + " Requests",
"Currently " + howmay + " requests on " + copies + " " + libraryFormat + ". Request from " + libraryName + " Library",
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryBeingHeld.test(page) )
{
var holds = page.match(libraryHolds)[1]
var copies = page.match(libraryCopies)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"All copies on request shelf",
"All " + libraryFormat + " on request shelf. Request from " + libraryName + " Library (currently " + holds + " requests on " + copies + " copies)",
"#AA7700" // dark yellow
);
}
else if ( libraryInProcess.test(page) )
{
setLibraryHTML(
libraryUrlPattern, isbn,
"In process!",
libraryFormat + "available soon at " + libraryName + " Library! (" + copies + " copies pending)" ,
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
libraryFormat + " due back on " + due + " at "+ libraryName,
"#AA7700" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryMultipleVersions.test(page) )
{
var versions = page.match(libraryMultipleVersions)[1];
setLibraryHTML(
libraryUrlPattern, isbn,
"Multiple versions",
versions + " version(s) listed at " + libraryName,
"green" // dark yellow
);
foundCount++;
getBookStatuses();
}
else if ( libraryElectronic.test(page) )
{
if (libraryFormat != "Book")
{ var digital = libraryFormat}
else
{ var digital = "Digital"};
setLibraryHTML(
libraryUrlPattern, isbn,
"On the e-shelf now!",
digital + " available now at "+ libraryName,
"green"
);
foundCount++;
getBookStatuses();
}
else if ( libraryDueBack.test(page) )
{
var due = page.match(libraryDueBack)[1]
setLibraryHTML(
libraryUrlPattern, isbn,
"Due back " + due,
"Due back at " + libraryName + " Library on " + due,
"#AA7700" // dark yellow
);
}
else if ( foundCount = 0)
{
setLibraryHTML(
libraryUrlPattern, isbn,
"Check for other editions",
"This edition not in " + libraryName + " Library. Click to check for other editions.",
"red"
);// do nothing;
}
}
});
}
function createStatusAndLibraryHTML() {
var title_node = getTitleNode();
if(!title_node) {
if(DEBUG) GM_log("can't find title node");
return null;
}
var h1_node = title_node.parentNode;
var br = document.createElement('br');
//the div for library status when found
var splLinkyDiv = document.createElement('div');
splLinkyDiv.id = 'splLinkyLibraryHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
//splLinkyDiv.style.fontSize = '12px';
//splLinkyDiv.style.color = 'black';
splLinkyDiv.setAttribute('style','color:black\;' + 'background-color:#FFFF99\;' + 'font-size:12px;' + 'padding:3px;');
//How lame is this javascript DOM syntax? Instead of having an insertAfter function, you have an insertBefore
//and then pass in the .nextSibling attribute of the element. Really inuitive guys.
h1_node.insertBefore(splLinkyDiv, title_node.nextSibling);
h1_node.insertBefore(br, title_node.nextSibling);
//the div for status as checks are occuring
var splStatusDiv = document.createElement('div');
splStatusDiv.id = 'splLinkyStatusHTML';
//resize to 12px to get out of the enlarged h1 size and return back to normal
splStatusDiv.style.fontSize = '12px';
splStatusDiv.style.color = 'brown';
h1_node.insertBefore(splStatusDiv, splLinkyDiv);
// h1_node.insertBefore(br, title_node.nextSibling);
}
function updateStatusHTML(text) {
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(document.createTextNode(text));
}
//add status of book below previous ones
function setLibraryHTML(libraryUrlPattern, isbn, title, linktext, color) {
var splLinkyDiv = document.getElementById('splLinkyLibraryHTML');
if (splLinkyDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryUrlPattern+isbn);
link.setAttribute('target', "_blank");
link.style.color = color;
var label = document.createTextNode( linktext );
link.appendChild(label);
//append to existing content
splLinkyDiv.appendChild(link);
splLinkyDiv.appendChild(document.createElement('br'));
}
//none found
//add link to search by title
function setStatusNoneFound() {
var title = getTitle();
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
var link = document.createElement('a');
link.setAttribute('title', title );
link.setAttribute('href', libraryURLPatternForNoMatch + encodeURIComponent(TheTitle));
link.setAttribute('target', "_blank");
link.style.color = "red";
var label = document.createTextNode('Not found. Search by title at ' + libraryName );
link.appendChild(label);
//remove existing content
if (splStatusDiv.firstChild){
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
splStatusDiv.appendChild(link);
}
function setStatusColor(color){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
if (splStatusDiv == null) { return; }
splStatusDiv.style.color = color;
}
function removeStatus(){
var splStatusDiv = document.getElementById('splLinkyStatusHTML');
splStatusDiv.removeChild(splStatusDiv.firstChild);
}
//check if there is a ISBN in the URL
//URL looks like http://www.amazon.com/Liseys-Story-Stephen-King/dp/0743289412/ref=xarw/002-5799652-4968811
function getIsbn(url){
try {
//match if there is a / followed by a 7-9 digit number followed by either another number or an x
//followed by a / or end of url
var isbn = url.match(/\/(\d{7,9}[\d|X])(\/|$)/)[1];
} catch (e) { return 0; }
return isbn;
}
function getTitle(){
var title = getTitleNode();
if (title==null) { return null; }
//remove words in parentheses and subtitles (anything after a colon)
return title.textContent.replace(/\(.+\)/, '').replace(/:.*/, '');
}
// Find the node containing the book title
function getTitleNode()
{
// Amazon has a number of different page layouts that put the title in different tags
// This is an array of xpaths that can contain an item's title
var titlePaths = [
"//span[@id='btAsinTitle']/node()[not(self::span)]",
"//h1[@id='title']/node()[not(self::span)]"
];
for(var i in titlePaths) {
var nodes = document.evaluate(titlePaths[ i ], document, null, XPathResult.UNORDERED_NODE_ITERATOR_TYPE, null);
var thisNode = nodes.iterateNext();
var titleNode;
// Get the last node
while(thisNode){
if(DEBUG) GM_log( thisNode.textContent );
titleNode = thisNode;
if(titleNode) {
break;
}
thisNode = nodes.iterateNext();
}
}
if (titleNode == null || !nodes) {
GM_log("can't find title node");
return null;
} else {
if(DEBUG) GM_log("Found title node: " + titleNode.textContent);
}
return titleNode;
}
}
)(); | getBookStatus | identifier_name |
error.rs | //! 9P error representations.
//!
//! In 9P2000 errors are represented as strings.
//! All the error strings in this module are imported from include/net/9p/error.c of Linux kernel.
//!
//! By contrast, in 9P2000.L, errors are represented as numbers (errno).
//! Using the Linux system errno numbers is the expected behaviour.
extern crate nix;
use error::errno::*;
use std::error as stderror;
use std::io::ErrorKind::*;
use std::{fmt, io};
fn errno_from_ioerror(e: &io::Error) -> nix::errno::Errno {
e.raw_os_error()
.map(nix::errno::from_i32)
.unwrap_or_else(|| match e.kind() {
NotFound => ENOENT,
PermissionDenied => EPERM,
ConnectionRefused => ECONNREFUSED,
ConnectionReset => ECONNRESET,
ConnectionAborted => ECONNABORTED,
NotConnected => ENOTCONN,
AddrInUse => EADDRINUSE,
AddrNotAvailable => EADDRNOTAVAIL,
BrokenPipe => EPIPE,
AlreadyExists => EALREADY,
WouldBlock => EAGAIN,
InvalidInput => EINVAL,
InvalidData => EINVAL,
TimedOut => ETIMEDOUT,
WriteZero => EAGAIN,
Interrupted => EINTR,
Other | _ => EIO,
})
}
/// 9P error type which is convertible to an errno.
///
/// The value of `Error::errno()` will be used for Rlerror.
///
/// # Protocol
/// 9P2000.L
#[derive(Debug)]
pub enum Error {
/// System error containing an errno.
No(nix::errno::Errno),
/// I/O error.
Io(io::Error),
}
impl Error {
/// Get an errno representations.
pub fn errno(&self) -> nix::errno::Errno {
match *self {
Error::No(ref e) => e.clone(),
Error::Io(ref e) => errno_from_ioerror(e),
}
} | }
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::No(ref e) => write!(f, "System error: {}", e.desc()),
Error::Io(ref e) => write!(f, "I/O error: {}", e),
}
}
}
impl stderror::Error for Error {
fn description(&self) -> &str {
match *self {
Error::No(ref e) => e.desc(),
Error::Io(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&stderror::Error> {
match *self {
Error::No(_) => None,
Error::Io(ref e) => Some(e),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl<'a> From<&'a io::Error> for Error {
fn from(e: &'a io::Error) -> Self {
Error::No(errno_from_ioerror(e))
}
}
impl From<nix::errno::Errno> for Error {
fn from(e: nix::errno::Errno) -> Self {
Error::No(e)
}
}
impl From<nix::Error> for Error {
fn from(e: nix::Error) -> Self {
Error::No(e.errno())
}
}
/// The system errno definitions.
///
/// # Protocol
/// 9P2000.L
pub mod errno {
extern crate nix;
pub use self::nix::errno::Errno::*;
}
/// 9P error strings imported from Linux.
///
/// # Protocol
/// 9P2000
pub mod string {
pub const EPERM: &'static str = "Operation not permitted";
pub const EPERM_WSTAT: &'static str = "wstat prohibited";
pub const ENOENT: &'static str = "No such file or directory";
pub const ENOENT_DIR: &'static str = "directory entry not found";
pub const ENOENT_FILE: &'static str = "file not found";
pub const EINTR: &'static str = "Interrupted system call";
pub const EIO: &'static str = "Input/output error";
pub const ENXIO: &'static str = "No such device or address";
pub const E2BIG: &'static str = "Argument list too long";
pub const EBADF: &'static str = "Bad file descriptor";
pub const EAGAIN: &'static str = "Resource temporarily unavailable";
pub const ENOMEM: &'static str = "Cannot allocate memory";
pub const EACCES: &'static str = "Permission denied";
pub const EFAULT: &'static str = "Bad address";
pub const ENOTBLK: &'static str = "Block device required";
pub const EBUSY: &'static str = "Device or resource busy";
pub const EEXIST: &'static str = "File exists";
pub const EXDEV: &'static str = "Invalid cross-device link";
pub const ENODEV: &'static str = "No such device";
pub const ENOTDIR: &'static str = "Not a directory";
pub const EISDIR: &'static str = "Is a directory";
pub const EINVAL: &'static str = "Invalid argument";
pub const ENFILE: &'static str = "Too many open files in system";
pub const EMFILE: &'static str = "Too many open files";
pub const ETXTBSY: &'static str = "Text file busy";
pub const EFBIG: &'static str = "File too large";
pub const ENOSPC: &'static str = "No space left on device";
pub const ESPIPE: &'static str = "Illegal seek";
pub const EROFS: &'static str = "Read-only file system";
pub const EMLINK: &'static str = "Too many links";
pub const EPIPE: &'static str = "Broken pipe";
pub const EDOM: &'static str = "Numerical argument out of domain";
pub const ERANGE: &'static str = "Numerical result out of range";
pub const EDEADLK: &'static str = "Resource deadlock avoided";
pub const ENAMETOOLONG: &'static str = "File name too long";
pub const ENOLCK: &'static str = "No locks available";
pub const ENOSYS: &'static str = "Function not implemented";
pub const ENOTEMPTY: &'static str = "Directory not empty";
pub const ELOOP: &'static str = "Too many levels of symbolic links";
pub const ENOMSG: &'static str = "No message of desired type";
pub const EIDRM: &'static str = "Identifier removed";
pub const ENODATA: &'static str = "No data available";
pub const ENONET: &'static str = "Machine is not on the network";
pub const ENOPKG: &'static str = "Package not installed";
pub const EREMOTE: &'static str = "Object is remote";
pub const ENOLINK: &'static str = "Link has been severed";
pub const ECOMM: &'static str = "Communication error on send";
pub const EPROTO: &'static str = "Protocol error";
pub const EBADMSG: &'static str = "Bad message";
pub const EBADFD: &'static str = "File descriptor in bad state";
pub const ESTRPIPE: &'static str = "Streams pipe error";
pub const EUSERS: &'static str = "Too many users";
pub const ENOTSOCK: &'static str = "Socket operation on non-socket";
pub const EMSGSIZE: &'static str = "Message too long";
pub const ENOPROTOOPT: &'static str = "Protocol not available";
pub const EPROTONOSUPPORT: &'static str = "Protocol not supported";
pub const ESOCKTNOSUPPORT: &'static str = "Socket type not supported";
pub const EOPNOTSUPP: &'static str = "Operation not supported";
pub const EPFNOSUPPORT: &'static str = "Protocol family not supported";
pub const ENETDOWN: &'static str = "Network is down";
pub const ENETUNREACH: &'static str = "Network is unreachable";
pub const ENETRESET: &'static str = "Network dropped connection on reset";
pub const ECONNABORTED: &'static str = "Software caused connection abort";
pub const ECONNRESET: &'static str = "Connection reset by peer";
pub const ENOBUFS: &'static str = "No buffer space available";
pub const EISCONN: &'static str = "Transport endpoint is already connected";
pub const ENOTCONN: &'static str = "Transport endpoint is not connected";
pub const ESHUTDOWN: &'static str = "Cannot send after transport endpoint shutdown";
pub const ETIMEDOUT: &'static str = "Connection timed out";
pub const ECONNREFUSED: &'static str = "Connection refused";
pub const EHOSTDOWN: &'static str = "Host is down";
pub const EHOSTUNREACH: &'static str = "No route to host";
pub const EALREADY: &'static str = "Operation already in progress";
pub const EINPROGRESS: &'static str = "Operation now in progress";
pub const EISNAM: &'static str = "Is a named type file";
pub const EREMOTEIO: &'static str = "Remote I/O error";
pub const EDQUOT: &'static str = "Disk quota exceeded";
pub const EBADF2: &'static str = "fid unknown or out of range";
pub const EACCES2: &'static str = "permission denied";
pub const ENOENT_FILE2: &'static str = "file does not exist";
pub const ECONNREFUSED2: &'static str = "authentication failed";
pub const ESPIPE2: &'static str = "bad offset in directory read";
pub const EBADF3: &'static str = "bad use of fid";
pub const EPERM_CONV: &'static str = "wstat can't convert between files and directories";
pub const ENOTEMPTY2: &'static str = "directory is not empty";
pub const EEXIST2: &'static str = "file exists";
pub const EEXIST3: &'static str = "file already exists";
pub const EEXIST4: &'static str = "file or directory already exists";
pub const EBADF4: &'static str = "fid already in use";
pub const ETXTBSY2: &'static str = "file in use";
pub const EIO2: &'static str = "i/o error";
pub const ETXTBSY3: &'static str = "file already open for I/O";
pub const EINVAL2: &'static str = "illegal mode";
pub const ENAMETOOLONG2: &'static str = "illegal name";
pub const ENOTDIR2: &'static str = "not a directory";
pub const EPERM_GRP: &'static str = "not a member of proposed group";
pub const EACCES3: &'static str = "not owner";
pub const EACCES4: &'static str = "only owner can change group in wstat";
pub const EROFS2: &'static str = "read only file system";
pub const EPERM_SPFILE: &'static str = "no access to special file";
pub const EIO3: &'static str = "i/o count too large";
pub const EINVAL3: &'static str = "unknown group";
pub const EINVAL4: &'static str = "unknown user";
pub const EPROTO2: &'static str = "bogus wstat buffer";
pub const EAGAIN2: &'static str = "exclusive use file already open";
pub const EIO4: &'static str = "corrupted directory entry";
pub const EIO5: &'static str = "corrupted file entry";
pub const EIO6: &'static str = "corrupted block label";
pub const EIO7: &'static str = "corrupted meta data";
pub const EINVAL5: &'static str = "illegal offset";
pub const ENOENT_PATH: &'static str = "illegal path element";
pub const EIO8: &'static str = "root of file system is corrupted";
pub const EIO9: &'static str = "corrupted super block";
pub const EPROTO3: &'static str = "protocol botch";
pub const ENOSPC2: &'static str = "file system is full";
pub const EAGAIN3: &'static str = "file is in use";
pub const ENOENT_ALLOC: &'static str = "directory entry is not allocated";
pub const EROFS3: &'static str = "file is read only";
pub const EIDRM2: &'static str = "file has been removed";
pub const EPERM_TRUNCATE: &'static str = "only support truncation to zero length";
pub const EPERM_RMROOT: &'static str = "cannot remove root";
pub const EFBIG2: &'static str = "file too big";
pub const EIO10: &'static str = "venti i/o error";
} | random_line_split |
|
error.rs | //! 9P error representations.
//!
//! In 9P2000 errors are represented as strings.
//! All the error strings in this module are imported from include/net/9p/error.c of Linux kernel.
//!
//! By contrast, in 9P2000.L, errors are represented as numbers (errno).
//! Using the Linux system errno numbers is the expected behaviour.
extern crate nix;
use error::errno::*;
use std::error as stderror;
use std::io::ErrorKind::*;
use std::{fmt, io};
fn errno_from_ioerror(e: &io::Error) -> nix::errno::Errno {
e.raw_os_error()
.map(nix::errno::from_i32)
.unwrap_or_else(|| match e.kind() {
NotFound => ENOENT,
PermissionDenied => EPERM,
ConnectionRefused => ECONNREFUSED,
ConnectionReset => ECONNRESET,
ConnectionAborted => ECONNABORTED,
NotConnected => ENOTCONN,
AddrInUse => EADDRINUSE,
AddrNotAvailable => EADDRNOTAVAIL,
BrokenPipe => EPIPE,
AlreadyExists => EALREADY,
WouldBlock => EAGAIN,
InvalidInput => EINVAL,
InvalidData => EINVAL,
TimedOut => ETIMEDOUT,
WriteZero => EAGAIN,
Interrupted => EINTR,
Other | _ => EIO,
})
}
/// 9P error type which is convertible to an errno.
///
/// The value of `Error::errno()` will be used for Rlerror.
///
/// # Protocol
/// 9P2000.L
#[derive(Debug)]
pub enum Error {
/// System error containing an errno.
No(nix::errno::Errno),
/// I/O error.
Io(io::Error),
}
impl Error {
/// Get an errno representations.
pub fn errno(&self) -> nix::errno::Errno {
match *self {
Error::No(ref e) => e.clone(),
Error::Io(ref e) => errno_from_ioerror(e),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::No(ref e) => write!(f, "System error: {}", e.desc()),
Error::Io(ref e) => write!(f, "I/O error: {}", e),
}
}
}
impl stderror::Error for Error {
fn description(&self) -> &str {
match *self {
Error::No(ref e) => e.desc(),
Error::Io(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&stderror::Error> {
match *self {
Error::No(_) => None,
Error::Io(ref e) => Some(e),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl<'a> From<&'a io::Error> for Error {
fn from(e: &'a io::Error) -> Self {
Error::No(errno_from_ioerror(e))
}
}
impl From<nix::errno::Errno> for Error {
fn from(e: nix::errno::Errno) -> Self |
}
impl From<nix::Error> for Error {
fn from(e: nix::Error) -> Self {
Error::No(e.errno())
}
}
/// The system errno definitions.
///
/// # Protocol
/// 9P2000.L
pub mod errno {
extern crate nix;
pub use self::nix::errno::Errno::*;
}
/// 9P error strings imported from Linux.
///
/// # Protocol
/// 9P2000
pub mod string {
pub const EPERM: &'static str = "Operation not permitted";
pub const EPERM_WSTAT: &'static str = "wstat prohibited";
pub const ENOENT: &'static str = "No such file or directory";
pub const ENOENT_DIR: &'static str = "directory entry not found";
pub const ENOENT_FILE: &'static str = "file not found";
pub const EINTR: &'static str = "Interrupted system call";
pub const EIO: &'static str = "Input/output error";
pub const ENXIO: &'static str = "No such device or address";
pub const E2BIG: &'static str = "Argument list too long";
pub const EBADF: &'static str = "Bad file descriptor";
pub const EAGAIN: &'static str = "Resource temporarily unavailable";
pub const ENOMEM: &'static str = "Cannot allocate memory";
pub const EACCES: &'static str = "Permission denied";
pub const EFAULT: &'static str = "Bad address";
pub const ENOTBLK: &'static str = "Block device required";
pub const EBUSY: &'static str = "Device or resource busy";
pub const EEXIST: &'static str = "File exists";
pub const EXDEV: &'static str = "Invalid cross-device link";
pub const ENODEV: &'static str = "No such device";
pub const ENOTDIR: &'static str = "Not a directory";
pub const EISDIR: &'static str = "Is a directory";
pub const EINVAL: &'static str = "Invalid argument";
pub const ENFILE: &'static str = "Too many open files in system";
pub const EMFILE: &'static str = "Too many open files";
pub const ETXTBSY: &'static str = "Text file busy";
pub const EFBIG: &'static str = "File too large";
pub const ENOSPC: &'static str = "No space left on device";
pub const ESPIPE: &'static str = "Illegal seek";
pub const EROFS: &'static str = "Read-only file system";
pub const EMLINK: &'static str = "Too many links";
pub const EPIPE: &'static str = "Broken pipe";
pub const EDOM: &'static str = "Numerical argument out of domain";
pub const ERANGE: &'static str = "Numerical result out of range";
pub const EDEADLK: &'static str = "Resource deadlock avoided";
pub const ENAMETOOLONG: &'static str = "File name too long";
pub const ENOLCK: &'static str = "No locks available";
pub const ENOSYS: &'static str = "Function not implemented";
pub const ENOTEMPTY: &'static str = "Directory not empty";
pub const ELOOP: &'static str = "Too many levels of symbolic links";
pub const ENOMSG: &'static str = "No message of desired type";
pub const EIDRM: &'static str = "Identifier removed";
pub const ENODATA: &'static str = "No data available";
pub const ENONET: &'static str = "Machine is not on the network";
pub const ENOPKG: &'static str = "Package not installed";
pub const EREMOTE: &'static str = "Object is remote";
pub const ENOLINK: &'static str = "Link has been severed";
pub const ECOMM: &'static str = "Communication error on send";
pub const EPROTO: &'static str = "Protocol error";
pub const EBADMSG: &'static str = "Bad message";
pub const EBADFD: &'static str = "File descriptor in bad state";
pub const ESTRPIPE: &'static str = "Streams pipe error";
pub const EUSERS: &'static str = "Too many users";
pub const ENOTSOCK: &'static str = "Socket operation on non-socket";
pub const EMSGSIZE: &'static str = "Message too long";
pub const ENOPROTOOPT: &'static str = "Protocol not available";
pub const EPROTONOSUPPORT: &'static str = "Protocol not supported";
pub const ESOCKTNOSUPPORT: &'static str = "Socket type not supported";
pub const EOPNOTSUPP: &'static str = "Operation not supported";
pub const EPFNOSUPPORT: &'static str = "Protocol family not supported";
pub const ENETDOWN: &'static str = "Network is down";
pub const ENETUNREACH: &'static str = "Network is unreachable";
pub const ENETRESET: &'static str = "Network dropped connection on reset";
pub const ECONNABORTED: &'static str = "Software caused connection abort";
pub const ECONNRESET: &'static str = "Connection reset by peer";
pub const ENOBUFS: &'static str = "No buffer space available";
pub const EISCONN: &'static str = "Transport endpoint is already connected";
pub const ENOTCONN: &'static str = "Transport endpoint is not connected";
pub const ESHUTDOWN: &'static str = "Cannot send after transport endpoint shutdown";
pub const ETIMEDOUT: &'static str = "Connection timed out";
pub const ECONNREFUSED: &'static str = "Connection refused";
pub const EHOSTDOWN: &'static str = "Host is down";
pub const EHOSTUNREACH: &'static str = "No route to host";
pub const EALREADY: &'static str = "Operation already in progress";
pub const EINPROGRESS: &'static str = "Operation now in progress";
pub const EISNAM: &'static str = "Is a named type file";
pub const EREMOTEIO: &'static str = "Remote I/O error";
pub const EDQUOT: &'static str = "Disk quota exceeded";
pub const EBADF2: &'static str = "fid unknown or out of range";
pub const EACCES2: &'static str = "permission denied";
pub const ENOENT_FILE2: &'static str = "file does not exist";
pub const ECONNREFUSED2: &'static str = "authentication failed";
pub const ESPIPE2: &'static str = "bad offset in directory read";
pub const EBADF3: &'static str = "bad use of fid";
pub const EPERM_CONV: &'static str = "wstat can't convert between files and directories";
pub const ENOTEMPTY2: &'static str = "directory is not empty";
pub const EEXIST2: &'static str = "file exists";
pub const EEXIST3: &'static str = "file already exists";
pub const EEXIST4: &'static str = "file or directory already exists";
pub const EBADF4: &'static str = "fid already in use";
pub const ETXTBSY2: &'static str = "file in use";
pub const EIO2: &'static str = "i/o error";
pub const ETXTBSY3: &'static str = "file already open for I/O";
pub const EINVAL2: &'static str = "illegal mode";
pub const ENAMETOOLONG2: &'static str = "illegal name";
pub const ENOTDIR2: &'static str = "not a directory";
pub const EPERM_GRP: &'static str = "not a member of proposed group";
pub const EACCES3: &'static str = "not owner";
pub const EACCES4: &'static str = "only owner can change group in wstat";
pub const EROFS2: &'static str = "read only file system";
pub const EPERM_SPFILE: &'static str = "no access to special file";
pub const EIO3: &'static str = "i/o count too large";
pub const EINVAL3: &'static str = "unknown group";
pub const EINVAL4: &'static str = "unknown user";
pub const EPROTO2: &'static str = "bogus wstat buffer";
pub const EAGAIN2: &'static str = "exclusive use file already open";
pub const EIO4: &'static str = "corrupted directory entry";
pub const EIO5: &'static str = "corrupted file entry";
pub const EIO6: &'static str = "corrupted block label";
pub const EIO7: &'static str = "corrupted meta data";
pub const EINVAL5: &'static str = "illegal offset";
pub const ENOENT_PATH: &'static str = "illegal path element";
pub const EIO8: &'static str = "root of file system is corrupted";
pub const EIO9: &'static str = "corrupted super block";
pub const EPROTO3: &'static str = "protocol botch";
pub const ENOSPC2: &'static str = "file system is full";
pub const EAGAIN3: &'static str = "file is in use";
pub const ENOENT_ALLOC: &'static str = "directory entry is not allocated";
pub const EROFS3: &'static str = "file is read only";
pub const EIDRM2: &'static str = "file has been removed";
pub const EPERM_TRUNCATE: &'static str = "only support truncation to zero length";
pub const EPERM_RMROOT: &'static str = "cannot remove root";
pub const EFBIG2: &'static str = "file too big";
pub const EIO10: &'static str = "venti i/o error";
}
| {
Error::No(e)
} | identifier_body |
error.rs | //! 9P error representations.
//!
//! In 9P2000 errors are represented as strings.
//! All the error strings in this module are imported from include/net/9p/error.c of Linux kernel.
//!
//! By contrast, in 9P2000.L, errors are represented as numbers (errno).
//! Using the Linux system errno numbers is the expected behaviour.
extern crate nix;
use error::errno::*;
use std::error as stderror;
use std::io::ErrorKind::*;
use std::{fmt, io};
fn | (e: &io::Error) -> nix::errno::Errno {
e.raw_os_error()
.map(nix::errno::from_i32)
.unwrap_or_else(|| match e.kind() {
NotFound => ENOENT,
PermissionDenied => EPERM,
ConnectionRefused => ECONNREFUSED,
ConnectionReset => ECONNRESET,
ConnectionAborted => ECONNABORTED,
NotConnected => ENOTCONN,
AddrInUse => EADDRINUSE,
AddrNotAvailable => EADDRNOTAVAIL,
BrokenPipe => EPIPE,
AlreadyExists => EALREADY,
WouldBlock => EAGAIN,
InvalidInput => EINVAL,
InvalidData => EINVAL,
TimedOut => ETIMEDOUT,
WriteZero => EAGAIN,
Interrupted => EINTR,
Other | _ => EIO,
})
}
/// 9P error type which is convertible to an errno.
///
/// The value of `Error::errno()` will be used for Rlerror.
///
/// # Protocol
/// 9P2000.L
#[derive(Debug)]
pub enum Error {
/// System error containing an errno.
No(nix::errno::Errno),
/// I/O error.
Io(io::Error),
}
impl Error {
/// Get an errno representations.
pub fn errno(&self) -> nix::errno::Errno {
match *self {
Error::No(ref e) => e.clone(),
Error::Io(ref e) => errno_from_ioerror(e),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::No(ref e) => write!(f, "System error: {}", e.desc()),
Error::Io(ref e) => write!(f, "I/O error: {}", e),
}
}
}
impl stderror::Error for Error {
fn description(&self) -> &str {
match *self {
Error::No(ref e) => e.desc(),
Error::Io(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&stderror::Error> {
match *self {
Error::No(_) => None,
Error::Io(ref e) => Some(e),
}
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl<'a> From<&'a io::Error> for Error {
fn from(e: &'a io::Error) -> Self {
Error::No(errno_from_ioerror(e))
}
}
impl From<nix::errno::Errno> for Error {
fn from(e: nix::errno::Errno) -> Self {
Error::No(e)
}
}
impl From<nix::Error> for Error {
fn from(e: nix::Error) -> Self {
Error::No(e.errno())
}
}
/// The system errno definitions.
///
/// # Protocol
/// 9P2000.L
pub mod errno {
extern crate nix;
pub use self::nix::errno::Errno::*;
}
/// 9P error strings imported from Linux.
///
/// # Protocol
/// 9P2000
pub mod string {
pub const EPERM: &'static str = "Operation not permitted";
pub const EPERM_WSTAT: &'static str = "wstat prohibited";
pub const ENOENT: &'static str = "No such file or directory";
pub const ENOENT_DIR: &'static str = "directory entry not found";
pub const ENOENT_FILE: &'static str = "file not found";
pub const EINTR: &'static str = "Interrupted system call";
pub const EIO: &'static str = "Input/output error";
pub const ENXIO: &'static str = "No such device or address";
pub const E2BIG: &'static str = "Argument list too long";
pub const EBADF: &'static str = "Bad file descriptor";
pub const EAGAIN: &'static str = "Resource temporarily unavailable";
pub const ENOMEM: &'static str = "Cannot allocate memory";
pub const EACCES: &'static str = "Permission denied";
pub const EFAULT: &'static str = "Bad address";
pub const ENOTBLK: &'static str = "Block device required";
pub const EBUSY: &'static str = "Device or resource busy";
pub const EEXIST: &'static str = "File exists";
pub const EXDEV: &'static str = "Invalid cross-device link";
pub const ENODEV: &'static str = "No such device";
pub const ENOTDIR: &'static str = "Not a directory";
pub const EISDIR: &'static str = "Is a directory";
pub const EINVAL: &'static str = "Invalid argument";
pub const ENFILE: &'static str = "Too many open files in system";
pub const EMFILE: &'static str = "Too many open files";
pub const ETXTBSY: &'static str = "Text file busy";
pub const EFBIG: &'static str = "File too large";
pub const ENOSPC: &'static str = "No space left on device";
pub const ESPIPE: &'static str = "Illegal seek";
pub const EROFS: &'static str = "Read-only file system";
pub const EMLINK: &'static str = "Too many links";
pub const EPIPE: &'static str = "Broken pipe";
pub const EDOM: &'static str = "Numerical argument out of domain";
pub const ERANGE: &'static str = "Numerical result out of range";
pub const EDEADLK: &'static str = "Resource deadlock avoided";
pub const ENAMETOOLONG: &'static str = "File name too long";
pub const ENOLCK: &'static str = "No locks available";
pub const ENOSYS: &'static str = "Function not implemented";
pub const ENOTEMPTY: &'static str = "Directory not empty";
pub const ELOOP: &'static str = "Too many levels of symbolic links";
pub const ENOMSG: &'static str = "No message of desired type";
pub const EIDRM: &'static str = "Identifier removed";
pub const ENODATA: &'static str = "No data available";
pub const ENONET: &'static str = "Machine is not on the network";
pub const ENOPKG: &'static str = "Package not installed";
pub const EREMOTE: &'static str = "Object is remote";
pub const ENOLINK: &'static str = "Link has been severed";
pub const ECOMM: &'static str = "Communication error on send";
pub const EPROTO: &'static str = "Protocol error";
pub const EBADMSG: &'static str = "Bad message";
pub const EBADFD: &'static str = "File descriptor in bad state";
pub const ESTRPIPE: &'static str = "Streams pipe error";
pub const EUSERS: &'static str = "Too many users";
pub const ENOTSOCK: &'static str = "Socket operation on non-socket";
pub const EMSGSIZE: &'static str = "Message too long";
pub const ENOPROTOOPT: &'static str = "Protocol not available";
pub const EPROTONOSUPPORT: &'static str = "Protocol not supported";
pub const ESOCKTNOSUPPORT: &'static str = "Socket type not supported";
pub const EOPNOTSUPP: &'static str = "Operation not supported";
pub const EPFNOSUPPORT: &'static str = "Protocol family not supported";
pub const ENETDOWN: &'static str = "Network is down";
pub const ENETUNREACH: &'static str = "Network is unreachable";
pub const ENETRESET: &'static str = "Network dropped connection on reset";
pub const ECONNABORTED: &'static str = "Software caused connection abort";
pub const ECONNRESET: &'static str = "Connection reset by peer";
pub const ENOBUFS: &'static str = "No buffer space available";
pub const EISCONN: &'static str = "Transport endpoint is already connected";
pub const ENOTCONN: &'static str = "Transport endpoint is not connected";
pub const ESHUTDOWN: &'static str = "Cannot send after transport endpoint shutdown";
pub const ETIMEDOUT: &'static str = "Connection timed out";
pub const ECONNREFUSED: &'static str = "Connection refused";
pub const EHOSTDOWN: &'static str = "Host is down";
pub const EHOSTUNREACH: &'static str = "No route to host";
pub const EALREADY: &'static str = "Operation already in progress";
pub const EINPROGRESS: &'static str = "Operation now in progress";
pub const EISNAM: &'static str = "Is a named type file";
pub const EREMOTEIO: &'static str = "Remote I/O error";
pub const EDQUOT: &'static str = "Disk quota exceeded";
pub const EBADF2: &'static str = "fid unknown or out of range";
pub const EACCES2: &'static str = "permission denied";
pub const ENOENT_FILE2: &'static str = "file does not exist";
pub const ECONNREFUSED2: &'static str = "authentication failed";
pub const ESPIPE2: &'static str = "bad offset in directory read";
pub const EBADF3: &'static str = "bad use of fid";
pub const EPERM_CONV: &'static str = "wstat can't convert between files and directories";
pub const ENOTEMPTY2: &'static str = "directory is not empty";
pub const EEXIST2: &'static str = "file exists";
pub const EEXIST3: &'static str = "file already exists";
pub const EEXIST4: &'static str = "file or directory already exists";
pub const EBADF4: &'static str = "fid already in use";
pub const ETXTBSY2: &'static str = "file in use";
pub const EIO2: &'static str = "i/o error";
pub const ETXTBSY3: &'static str = "file already open for I/O";
pub const EINVAL2: &'static str = "illegal mode";
pub const ENAMETOOLONG2: &'static str = "illegal name";
pub const ENOTDIR2: &'static str = "not a directory";
pub const EPERM_GRP: &'static str = "not a member of proposed group";
pub const EACCES3: &'static str = "not owner";
pub const EACCES4: &'static str = "only owner can change group in wstat";
pub const EROFS2: &'static str = "read only file system";
pub const EPERM_SPFILE: &'static str = "no access to special file";
pub const EIO3: &'static str = "i/o count too large";
pub const EINVAL3: &'static str = "unknown group";
pub const EINVAL4: &'static str = "unknown user";
pub const EPROTO2: &'static str = "bogus wstat buffer";
pub const EAGAIN2: &'static str = "exclusive use file already open";
pub const EIO4: &'static str = "corrupted directory entry";
pub const EIO5: &'static str = "corrupted file entry";
pub const EIO6: &'static str = "corrupted block label";
pub const EIO7: &'static str = "corrupted meta data";
pub const EINVAL5: &'static str = "illegal offset";
pub const ENOENT_PATH: &'static str = "illegal path element";
pub const EIO8: &'static str = "root of file system is corrupted";
pub const EIO9: &'static str = "corrupted super block";
pub const EPROTO3: &'static str = "protocol botch";
pub const ENOSPC2: &'static str = "file system is full";
pub const EAGAIN3: &'static str = "file is in use";
pub const ENOENT_ALLOC: &'static str = "directory entry is not allocated";
pub const EROFS3: &'static str = "file is read only";
pub const EIDRM2: &'static str = "file has been removed";
pub const EPERM_TRUNCATE: &'static str = "only support truncation to zero length";
pub const EPERM_RMROOT: &'static str = "cannot remove root";
pub const EFBIG2: &'static str = "file too big";
pub const EIO10: &'static str = "venti i/o error";
}
| errno_from_ioerror | identifier_name |
test.container.ts | import {Component, OnInit} from "@angular/core";
import {select, Store} from "@ngrx/store";
import { Observable } from "rxjs";
import { SubTest, defaultSubTest, subTests, Test } from "../../../core/domain/tests.model";
import * as fromState from "../../../core/state";
import * as TestAction from "../../../core/state/tests/tests.action";
import { fileType, recType } from './test.component';
import * as recordRTC from 'recordrtc';
import { HORIZONTAL_TRANSCRIPT, VERTICAL_A_TRANSCRIPT, VERTICAL_B_TRANSCRIPT, Distance, lev_dem_distance, cleanString } from '../../../util/lev-dem.util';
import { IoService } from 'src/app/core/service/io.service';
import { FileService } from "src/app/core/service/file.service";
import { SubTestsService } from "src/app/core/service/sub-tests.service";
import { TestService } from 'src/app/core/service/test.service';
@Component({
selector: "app-test-container",
template: `
<app-test-component
[subTests$]="subTests$"
[currentLangCode] = "currentLangCode"
[currentText] = "final_transcript"
[triggerChange] = "triggerChange"
[totalScore] = "totalScore"
[overall_comment] = "overall_comment"
(recognize) = toggleRecording($event)
(file_context) = recognizeRecording($event)
(calculate_total) = calculateTotal($event)>
</app-test-component>
<app-loader *ngIf = "isLoading" [message] = "message"></app-loader>
`
})
export class TestContainer implements OnInit {
/**
* The username for the currently logged in user.
*/
public subTests$: SubTest[];
public final_transcript: string = '';
private transcript_array: any = {};
private current_seq: number = 0;
public interim_transcript: string = '';
public triggerChange: number = 0;
private recognizing: boolean = false;
private ignore_onend: boolean;
private start_timestamp: Date;
private end_timestamp: Date;
private duration: number;
private totalScore: number;
private overall_comment: string;
private recognition;
private currentTest: number;
private currentTranscriptTemplate: string;
/** Recording data */
private isRecording:boolean = false;
public recordAudio: any;
currentPatientId: number;
currentTestId: number;
currentTestObj: Test;
currentLangCode: string;
/** Distance function */
public distanceFnc = lev_dem_distance;
public cleanString = cleanString;
public distance: Distance;
public isLoading: boolean = false;
public message: string = "";
/**
* Constructor.
*/
public constructor(private store$: Store<any>,
private ioService:IoService,
private fileService: FileService,
private testService: TestService,
private subTestsService: SubTestsService
) {
}
/**
* Initialize the component.
*/
public ngOnInit() {
this.store$.pipe(select(fromState.selectCurrentPatient)).subscribe(
(patient) => {
if (patient) {
this.currentPatientId = patient.patient_id;
this.currentLangCode = patient.lang_code;
}
}
);
this.store$.pipe(select(fromState.selectCurrentTest)).subscribe(
(test) => {
//Selecting current test Id
if (test) {
this.currentTestObj = test;
this.currentTestId = test.test_id;
this.totalScore = test.score;
this.overall_comment = test.comments;
this.subTestsService.getSubTests(this.currentTestId).subscribe(
(data) => {
console.log('Got the following subtests:', data);
if (data && data.length > 0){
this.subTests$ = data;
//this.subTests$ = [{...defaultSubTest}, {...defaultSubTest}, {...defaultSubTest}];
} else {
console.log('Filling the default data');
let subTest: SubTest = {...defaultSubTest};
subTest.test_id = this.currentTestId;
this.subTests$ = [{...subTest}, {...subTest}, {...subTest}];
}
}
);
}
}
);
//this.initializeRecognitionSettings();
this.ioService.receiveStream('transcript', this, function(transcript, scope) {
console.log(transcript, scope);
let i:number = 0;
let ind: number = 1;
/** Clean the transcript string, apply substitution of know words, replace all the rest non digit chars */
if (!!transcript.transcript) {
transcript.transcript = scope.cleanString(transcript.transcript);
}
console.log('Received: ', !transcript.transcript && !scope.transcript_array[transcript.seq_num_prev], transcript.transcript);
/** Update the transcript array object (holds all the recived up to now chunks) and then try to construct interim transcript */
if ((!transcript.transcript && !scope.transcript_array[transcript.seq_num_prev]) || transcript.transcript) {
scope.transcript_array[transcript.seq_num_prev] = transcript.transcript ? transcript.transcript : '';
console.log('Update transcript array: ', transcript.seq_num_prev, scope.transcript_array[transcript.seq_num_prev]);
/** special treatment to start */
if (transcript.seq_num_prev == 0){
scope.interim_transcript = scope.transcript_array[transcript.seq_num_prev];
scope.current_seq++;
}
console.log('Set current sequesnce: ', scope.current_seq);
console.log('Current sequesnce arr value: ', scope.transcript_array[scope.current_seq]);
/** Combine current transcript string - take maximum continuous sequence of transcripts and join them together*/
while ( scope.transcript_array[scope.current_seq] || (scope.transcript_array[scope.current_seq] == '')) {
if ( scope.transcript_array[scope.current_seq] != '' ) {
i = 0;
while ( i >= 0 && (scope.transcript_array[scope.current_seq].length + 1) >= ind
&& ((i == 0) || (i >= (scope.transcript_array[scope.current_seq-1].length - scope.transcript_array[scope.current_seq].length)))) {
i = scope.transcript_array[scope.current_seq - 1].indexOf(scope.transcript_array[scope.current_seq].substring(0, ind++));
}
scope.interim_transcript = scope.interim_transcript + scope.transcript_array[scope.current_seq].substring(ind-2, scope.transcript_array[scope.current_seq].length+1);
}
scope.current_seq++;
}
console.log(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.distance = scope.distanceFnc(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.updateCurrentTest();
}
});
}
updateCurrentTest(){
let me = this;
let current:SubTest = me.subTests$[me.currentTest];
current.sub_test_type_id = me.currentTest;
if (me.duration) {
current.time = me.duration * 1000;
me.duration = null;
} else {
current.time = (+(me.end_timestamp ? me.end_timestamp : new Date()) - +me.start_timestamp);
}
current.audio_text = me.distance.text;
current.additions = me.distance.additions;
current.deletions = me.distance.deletions;
current.substitutions = me.distance.substitutions;
current.transpositions = me.distance.transpositions;
current.mistakes_list = me.distance.ops;
me.triggerChange++;
}
//** Using RecordRTC to stream the speech to server and get transcripted string */
onStart() {
let me = this;
me.isRecording = true;
me.ioService.setDefaultLanguage(me.currentLangCode);
/**
* Initialize the current subtest recording
*/
console.log('Setting language to: ', this.currentLangCode);
me.final_transcript = '';
this.interim_transcript = '';
let stream_seq: number = 0;
navigator.mediaDevices.getUserMedia({
audio: true
}).then(function(stream: MediaStream) {
me.recordAudio = recordRTC(stream, {
type: 'audio',
mimeType: 'audio/webm',
sampleRate: 44100,
recorderType: recordRTC.StereoAudioRecorder,
numberOfAudioChannels: 1,
timeSlice: 1000,
desiredSampRate: 16000,
async ondataavailable(blob) {
let file_name = me.currentTestId + '_' + me.currentTest;
console.log('File name: ', file_name);
me.ioService.sendBinaryStream(blob, file_name, (stream_seq++));
if (!me.isRecording) {
me.stopRecording();
}
}
});
me.recordAudio.startRecording();
}).catch(function(error) {
console.error(JSON.stringify(error));
});
}
onStop() {
// recording stopped
this.isRecording = false;
// stop audio recorder
}
| (){
this.recordAudio.stopRecording();
this.transcript_array = {};
this.current_seq = 0;
this.final_transcript = this.interim_transcript;
let me = this;
let file_name:string = me.currentTestId + '_' + me.currentTest;
this.isLoading = true;
/** Combining all chunks into one single file and transcribe it */
this.fileService.combineAudio( me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
me.updateCurrentTest();
this.isLoading = false;
}
);
}
public toggleRecording(event: recType){
if (this.isRecording) {
this.onStop();
return;
} else {
console.log('Setting language to: ', this.currentLangCode);
this.final_transcript = '';
this.onStart();
this.ignore_onend = false;
this.start_timestamp = new Date();
this.currentTest = event.selectedTest;
console.log('Setting template by : ', this.currentTest);
switch (this.currentTest) {
case 0:
this.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
this.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
this.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
console.log('Setting template to : ', this.currentTranscriptTemplate);
}
}
public recognizeRecording(event: fileType){
let me = this;
let file_name:string = me.currentTestId + '_' + event.subTest;
me.currentTest = event.subTest;
this.isLoading = true;
this.message = 'Transcribing, please be patient!'
switch (event.subTest) {
case 0:
me.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
me.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
me.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
this.subTests$[this.currentTest].audio = file_name;
this.fileService.uploadAudio(event.file, me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
this.isLoading = false;
me.updateCurrentTest();
}
);
}
public calculateTotal(event: string){
this.isLoading = true;
this.message = 'Saving tests data, please wait!';
if (event){
this.totalScore = this.subTests$[2].time * (80/(80 - this.subTests$[2].deletions + this.subTests$[2].additions));
/**
* Update all sub tests data once recognition is finished
*/
this.subTestsService.updateSubTests(this.subTests$).subscribe(data => {
/**
* Upadate total of the test
*/
this.currentTestObj.score = this.totalScore;
console.log(this.overall_comment);
this.currentTestObj.comments = event;
this.testService.update(this.currentTestObj).subscribe(data => this.isLoading = false);
});
}
}
}
| stopRecording | identifier_name |
test.container.ts | import {Component, OnInit} from "@angular/core";
import {select, Store} from "@ngrx/store";
import { Observable } from "rxjs";
import { SubTest, defaultSubTest, subTests, Test } from "../../../core/domain/tests.model";
import * as fromState from "../../../core/state";
import * as TestAction from "../../../core/state/tests/tests.action";
import { fileType, recType } from './test.component';
import * as recordRTC from 'recordrtc';
import { HORIZONTAL_TRANSCRIPT, VERTICAL_A_TRANSCRIPT, VERTICAL_B_TRANSCRIPT, Distance, lev_dem_distance, cleanString } from '../../../util/lev-dem.util';
import { IoService } from 'src/app/core/service/io.service';
import { FileService } from "src/app/core/service/file.service";
import { SubTestsService } from "src/app/core/service/sub-tests.service";
import { TestService } from 'src/app/core/service/test.service';
@Component({
selector: "app-test-container",
template: `
<app-test-component
[subTests$]="subTests$"
[currentLangCode] = "currentLangCode"
[currentText] = "final_transcript"
[triggerChange] = "triggerChange"
[totalScore] = "totalScore"
[overall_comment] = "overall_comment"
(recognize) = toggleRecording($event)
(file_context) = recognizeRecording($event)
(calculate_total) = calculateTotal($event)>
</app-test-component>
<app-loader *ngIf = "isLoading" [message] = "message"></app-loader>
`
})
export class TestContainer implements OnInit {
/**
* The username for the currently logged in user.
*/
public subTests$: SubTest[];
public final_transcript: string = '';
private transcript_array: any = {};
private current_seq: number = 0;
public interim_transcript: string = '';
public triggerChange: number = 0;
private recognizing: boolean = false;
private ignore_onend: boolean;
private start_timestamp: Date;
private end_timestamp: Date;
private duration: number;
private totalScore: number;
private overall_comment: string;
private recognition;
private currentTest: number;
private currentTranscriptTemplate: string;
/** Recording data */
private isRecording:boolean = false;
public recordAudio: any;
currentPatientId: number;
currentTestId: number;
currentTestObj: Test;
currentLangCode: string;
/** Distance function */
public distanceFnc = lev_dem_distance;
public cleanString = cleanString;
public distance: Distance;
public isLoading: boolean = false;
public message: string = "";
/**
* Constructor.
*/
public constructor(private store$: Store<any>,
private ioService:IoService,
private fileService: FileService,
private testService: TestService,
private subTestsService: SubTestsService
) {
}
/**
* Initialize the component.
*/
public ngOnInit() {
this.store$.pipe(select(fromState.selectCurrentPatient)).subscribe(
(patient) => {
if (patient) {
this.currentPatientId = patient.patient_id;
this.currentLangCode = patient.lang_code;
}
}
);
this.store$.pipe(select(fromState.selectCurrentTest)).subscribe(
(test) => {
//Selecting current test Id
if (test) {
this.currentTestObj = test;
this.currentTestId = test.test_id;
this.totalScore = test.score;
this.overall_comment = test.comments;
this.subTestsService.getSubTests(this.currentTestId).subscribe(
(data) => {
console.log('Got the following subtests:', data);
if (data && data.length > 0){
this.subTests$ = data;
//this.subTests$ = [{...defaultSubTest}, {...defaultSubTest}, {...defaultSubTest}];
} else {
console.log('Filling the default data');
let subTest: SubTest = {...defaultSubTest};
subTest.test_id = this.currentTestId;
this.subTests$ = [{...subTest}, {...subTest}, {...subTest}];
}
}
);
}
}
);
//this.initializeRecognitionSettings();
this.ioService.receiveStream('transcript', this, function(transcript, scope) {
console.log(transcript, scope);
let i:number = 0;
let ind: number = 1;
/** Clean the transcript string, apply substitution of know words, replace all the rest non digit chars */
if (!!transcript.transcript) {
transcript.transcript = scope.cleanString(transcript.transcript);
}
console.log('Received: ', !transcript.transcript && !scope.transcript_array[transcript.seq_num_prev], transcript.transcript);
/** Update the transcript array object (holds all the recived up to now chunks) and then try to construct interim transcript */
if ((!transcript.transcript && !scope.transcript_array[transcript.seq_num_prev]) || transcript.transcript) {
scope.transcript_array[transcript.seq_num_prev] = transcript.transcript ? transcript.transcript : '';
console.log('Update transcript array: ', transcript.seq_num_prev, scope.transcript_array[transcript.seq_num_prev]);
/** special treatment to start */
if (transcript.seq_num_prev == 0){
scope.interim_transcript = scope.transcript_array[transcript.seq_num_prev];
scope.current_seq++;
}
console.log('Set current sequesnce: ', scope.current_seq);
console.log('Current sequesnce arr value: ', scope.transcript_array[scope.current_seq]);
/** Combine current transcript string - take maximum continuous sequence of transcripts and join them together*/
while ( scope.transcript_array[scope.current_seq] || (scope.transcript_array[scope.current_seq] == '')) {
if ( scope.transcript_array[scope.current_seq] != '' ) {
i = 0;
while ( i >= 0 && (scope.transcript_array[scope.current_seq].length + 1) >= ind
&& ((i == 0) || (i >= (scope.transcript_array[scope.current_seq-1].length - scope.transcript_array[scope.current_seq].length)))) {
i = scope.transcript_array[scope.current_seq - 1].indexOf(scope.transcript_array[scope.current_seq].substring(0, ind++));
}
scope.interim_transcript = scope.interim_transcript + scope.transcript_array[scope.current_seq].substring(ind-2, scope.transcript_array[scope.current_seq].length+1);
}
scope.current_seq++;
}
console.log(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.distance = scope.distanceFnc(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.updateCurrentTest();
}
});
}
updateCurrentTest(){
let me = this;
let current:SubTest = me.subTests$[me.currentTest];
current.sub_test_type_id = me.currentTest;
if (me.duration) {
current.time = me.duration * 1000;
me.duration = null;
} else {
current.time = (+(me.end_timestamp ? me.end_timestamp : new Date()) - +me.start_timestamp);
}
current.audio_text = me.distance.text;
current.additions = me.distance.additions;
current.deletions = me.distance.deletions;
current.substitutions = me.distance.substitutions;
current.transpositions = me.distance.transpositions;
current.mistakes_list = me.distance.ops;
me.triggerChange++;
}
//** Using RecordRTC to stream the speech to server and get transcripted string */
onStart() {
let me = this;
me.isRecording = true;
me.ioService.setDefaultLanguage(me.currentLangCode);
/**
* Initialize the current subtest recording
*/
console.log('Setting language to: ', this.currentLangCode);
me.final_transcript = '';
this.interim_transcript = '';
let stream_seq: number = 0;
navigator.mediaDevices.getUserMedia({
audio: true
}).then(function(stream: MediaStream) {
me.recordAudio = recordRTC(stream, {
type: 'audio',
mimeType: 'audio/webm',
sampleRate: 44100,
recorderType: recordRTC.StereoAudioRecorder,
numberOfAudioChannels: 1,
timeSlice: 1000,
desiredSampRate: 16000,
async ondataavailable(blob) {
let file_name = me.currentTestId + '_' + me.currentTest;
console.log('File name: ', file_name);
me.ioService.sendBinaryStream(blob, file_name, (stream_seq++));
if (!me.isRecording) {
me.stopRecording();
}
}
});
me.recordAudio.startRecording();
}).catch(function(error) {
console.error(JSON.stringify(error));
});
}
onStop() {
// recording stopped
this.isRecording = false;
// stop audio recorder
}
stopRecording(){
this.recordAudio.stopRecording();
this.transcript_array = {};
this.current_seq = 0;
this.final_transcript = this.interim_transcript;
let me = this;
let file_name:string = me.currentTestId + '_' + me.currentTest;
this.isLoading = true;
/** Combining all chunks into one single file and transcribe it */
this.fileService.combineAudio( me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
me.updateCurrentTest();
this.isLoading = false;
}
);
}
public toggleRecording(event: recType){
if (this.isRecording) {
this.onStop();
return;
} else {
console.log('Setting language to: ', this.currentLangCode);
this.final_transcript = '';
this.onStart();
this.ignore_onend = false;
this.start_timestamp = new Date();
this.currentTest = event.selectedTest;
console.log('Setting template by : ', this.currentTest);
switch (this.currentTest) {
case 0:
this.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
this.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
this.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
console.log('Setting template to : ', this.currentTranscriptTemplate);
}
}
public recognizeRecording(event: fileType){
let me = this;
let file_name:string = me.currentTestId + '_' + event.subTest;
me.currentTest = event.subTest;
this.isLoading = true;
this.message = 'Transcribing, please be patient!'
switch (event.subTest) {
case 0:
me.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
me.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
me.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
this.subTests$[this.currentTest].audio = file_name;
this.fileService.uploadAudio(event.file, me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
this.isLoading = false;
me.updateCurrentTest();
}
);
} | this.isLoading = true;
this.message = 'Saving tests data, please wait!';
if (event){
this.totalScore = this.subTests$[2].time * (80/(80 - this.subTests$[2].deletions + this.subTests$[2].additions));
/**
* Update all sub tests data once recognition is finished
*/
this.subTestsService.updateSubTests(this.subTests$).subscribe(data => {
/**
* Upadate total of the test
*/
this.currentTestObj.score = this.totalScore;
console.log(this.overall_comment);
this.currentTestObj.comments = event;
this.testService.update(this.currentTestObj).subscribe(data => this.isLoading = false);
});
}
}
} |
public calculateTotal(event: string){ | random_line_split |
test.container.ts | import {Component, OnInit} from "@angular/core";
import {select, Store} from "@ngrx/store";
import { Observable } from "rxjs";
import { SubTest, defaultSubTest, subTests, Test } from "../../../core/domain/tests.model";
import * as fromState from "../../../core/state";
import * as TestAction from "../../../core/state/tests/tests.action";
import { fileType, recType } from './test.component';
import * as recordRTC from 'recordrtc';
import { HORIZONTAL_TRANSCRIPT, VERTICAL_A_TRANSCRIPT, VERTICAL_B_TRANSCRIPT, Distance, lev_dem_distance, cleanString } from '../../../util/lev-dem.util';
import { IoService } from 'src/app/core/service/io.service';
import { FileService } from "src/app/core/service/file.service";
import { SubTestsService } from "src/app/core/service/sub-tests.service";
import { TestService } from 'src/app/core/service/test.service';
@Component({
selector: "app-test-container",
template: `
<app-test-component
[subTests$]="subTests$"
[currentLangCode] = "currentLangCode"
[currentText] = "final_transcript"
[triggerChange] = "triggerChange"
[totalScore] = "totalScore"
[overall_comment] = "overall_comment"
(recognize) = toggleRecording($event)
(file_context) = recognizeRecording($event)
(calculate_total) = calculateTotal($event)>
</app-test-component>
<app-loader *ngIf = "isLoading" [message] = "message"></app-loader>
`
})
export class TestContainer implements OnInit {
/**
* The username for the currently logged in user.
*/
public subTests$: SubTest[];
public final_transcript: string = '';
private transcript_array: any = {};
private current_seq: number = 0;
public interim_transcript: string = '';
public triggerChange: number = 0;
private recognizing: boolean = false;
private ignore_onend: boolean;
private start_timestamp: Date;
private end_timestamp: Date;
private duration: number;
private totalScore: number;
private overall_comment: string;
private recognition;
private currentTest: number;
private currentTranscriptTemplate: string;
/** Recording data */
private isRecording:boolean = false;
public recordAudio: any;
currentPatientId: number;
currentTestId: number;
currentTestObj: Test;
currentLangCode: string;
/** Distance function */
public distanceFnc = lev_dem_distance;
public cleanString = cleanString;
public distance: Distance;
public isLoading: boolean = false;
public message: string = "";
/**
* Constructor.
*/
public constructor(private store$: Store<any>,
private ioService:IoService,
private fileService: FileService,
private testService: TestService,
private subTestsService: SubTestsService
) {
}
/**
* Initialize the component.
*/
public ngOnInit() {
this.store$.pipe(select(fromState.selectCurrentPatient)).subscribe(
(patient) => {
if (patient) {
this.currentPatientId = patient.patient_id;
this.currentLangCode = patient.lang_code;
}
}
);
this.store$.pipe(select(fromState.selectCurrentTest)).subscribe(
(test) => {
//Selecting current test Id
if (test) {
this.currentTestObj = test;
this.currentTestId = test.test_id;
this.totalScore = test.score;
this.overall_comment = test.comments;
this.subTestsService.getSubTests(this.currentTestId).subscribe(
(data) => {
console.log('Got the following subtests:', data);
if (data && data.length > 0){
this.subTests$ = data;
//this.subTests$ = [{...defaultSubTest}, {...defaultSubTest}, {...defaultSubTest}];
} else {
console.log('Filling the default data');
let subTest: SubTest = {...defaultSubTest};
subTest.test_id = this.currentTestId;
this.subTests$ = [{...subTest}, {...subTest}, {...subTest}];
}
}
);
}
}
);
//this.initializeRecognitionSettings();
this.ioService.receiveStream('transcript', this, function(transcript, scope) {
console.log(transcript, scope);
let i:number = 0;
let ind: number = 1;
/** Clean the transcript string, apply substitution of know words, replace all the rest non digit chars */
if (!!transcript.transcript) {
transcript.transcript = scope.cleanString(transcript.transcript);
}
console.log('Received: ', !transcript.transcript && !scope.transcript_array[transcript.seq_num_prev], transcript.transcript);
/** Update the transcript array object (holds all the recived up to now chunks) and then try to construct interim transcript */
if ((!transcript.transcript && !scope.transcript_array[transcript.seq_num_prev]) || transcript.transcript) {
scope.transcript_array[transcript.seq_num_prev] = transcript.transcript ? transcript.transcript : '';
console.log('Update transcript array: ', transcript.seq_num_prev, scope.transcript_array[transcript.seq_num_prev]);
/** special treatment to start */
if (transcript.seq_num_prev == 0){
scope.interim_transcript = scope.transcript_array[transcript.seq_num_prev];
scope.current_seq++;
}
console.log('Set current sequesnce: ', scope.current_seq);
console.log('Current sequesnce arr value: ', scope.transcript_array[scope.current_seq]);
/** Combine current transcript string - take maximum continuous sequence of transcripts and join them together*/
while ( scope.transcript_array[scope.current_seq] || (scope.transcript_array[scope.current_seq] == '')) {
if ( scope.transcript_array[scope.current_seq] != '' ) {
i = 0;
while ( i >= 0 && (scope.transcript_array[scope.current_seq].length + 1) >= ind
&& ((i == 0) || (i >= (scope.transcript_array[scope.current_seq-1].length - scope.transcript_array[scope.current_seq].length)))) {
i = scope.transcript_array[scope.current_seq - 1].indexOf(scope.transcript_array[scope.current_seq].substring(0, ind++));
}
scope.interim_transcript = scope.interim_transcript + scope.transcript_array[scope.current_seq].substring(ind-2, scope.transcript_array[scope.current_seq].length+1);
}
scope.current_seq++;
}
console.log(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.distance = scope.distanceFnc(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.updateCurrentTest();
}
});
}
updateCurrentTest(){
let me = this;
let current:SubTest = me.subTests$[me.currentTest];
current.sub_test_type_id = me.currentTest;
if (me.duration) {
current.time = me.duration * 1000;
me.duration = null;
} else {
current.time = (+(me.end_timestamp ? me.end_timestamp : new Date()) - +me.start_timestamp);
}
current.audio_text = me.distance.text;
current.additions = me.distance.additions;
current.deletions = me.distance.deletions;
current.substitutions = me.distance.substitutions;
current.transpositions = me.distance.transpositions;
current.mistakes_list = me.distance.ops;
me.triggerChange++;
}
//** Using RecordRTC to stream the speech to server and get transcripted string */
onStart() {
let me = this;
me.isRecording = true;
me.ioService.setDefaultLanguage(me.currentLangCode);
/**
* Initialize the current subtest recording
*/
console.log('Setting language to: ', this.currentLangCode);
me.final_transcript = '';
this.interim_transcript = '';
let stream_seq: number = 0;
navigator.mediaDevices.getUserMedia({
audio: true
}).then(function(stream: MediaStream) {
me.recordAudio = recordRTC(stream, {
type: 'audio',
mimeType: 'audio/webm',
sampleRate: 44100,
recorderType: recordRTC.StereoAudioRecorder,
numberOfAudioChannels: 1,
timeSlice: 1000,
desiredSampRate: 16000,
async ondataavailable(blob) {
let file_name = me.currentTestId + '_' + me.currentTest;
console.log('File name: ', file_name);
me.ioService.sendBinaryStream(blob, file_name, (stream_seq++));
if (!me.isRecording) {
me.stopRecording();
}
}
});
me.recordAudio.startRecording();
}).catch(function(error) {
console.error(JSON.stringify(error));
});
}
onStop() {
// recording stopped
this.isRecording = false;
// stop audio recorder
}
stopRecording(){
this.recordAudio.stopRecording();
this.transcript_array = {};
this.current_seq = 0;
this.final_transcript = this.interim_transcript;
let me = this;
let file_name:string = me.currentTestId + '_' + me.currentTest;
this.isLoading = true;
/** Combining all chunks into one single file and transcribe it */
this.fileService.combineAudio( me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
me.updateCurrentTest();
this.isLoading = false;
}
);
}
public toggleRecording(event: recType){
if (this.isRecording) {
this.onStop();
return;
} else |
}
public recognizeRecording(event: fileType){
let me = this;
let file_name:string = me.currentTestId + '_' + event.subTest;
me.currentTest = event.subTest;
this.isLoading = true;
this.message = 'Transcribing, please be patient!'
switch (event.subTest) {
case 0:
me.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
me.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
me.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
this.subTests$[this.currentTest].audio = file_name;
this.fileService.uploadAudio(event.file, me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
this.isLoading = false;
me.updateCurrentTest();
}
);
}
public calculateTotal(event: string){
this.isLoading = true;
this.message = 'Saving tests data, please wait!';
if (event){
this.totalScore = this.subTests$[2].time * (80/(80 - this.subTests$[2].deletions + this.subTests$[2].additions));
/**
* Update all sub tests data once recognition is finished
*/
this.subTestsService.updateSubTests(this.subTests$).subscribe(data => {
/**
* Upadate total of the test
*/
this.currentTestObj.score = this.totalScore;
console.log(this.overall_comment);
this.currentTestObj.comments = event;
this.testService.update(this.currentTestObj).subscribe(data => this.isLoading = false);
});
}
}
}
| {
console.log('Setting language to: ', this.currentLangCode);
this.final_transcript = '';
this.onStart();
this.ignore_onend = false;
this.start_timestamp = new Date();
this.currentTest = event.selectedTest;
console.log('Setting template by : ', this.currentTest);
switch (this.currentTest) {
case 0:
this.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
this.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
this.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
console.log('Setting template to : ', this.currentTranscriptTemplate);
} | conditional_block |
test.container.ts | import {Component, OnInit} from "@angular/core";
import {select, Store} from "@ngrx/store";
import { Observable } from "rxjs";
import { SubTest, defaultSubTest, subTests, Test } from "../../../core/domain/tests.model";
import * as fromState from "../../../core/state";
import * as TestAction from "../../../core/state/tests/tests.action";
import { fileType, recType } from './test.component';
import * as recordRTC from 'recordrtc';
import { HORIZONTAL_TRANSCRIPT, VERTICAL_A_TRANSCRIPT, VERTICAL_B_TRANSCRIPT, Distance, lev_dem_distance, cleanString } from '../../../util/lev-dem.util';
import { IoService } from 'src/app/core/service/io.service';
import { FileService } from "src/app/core/service/file.service";
import { SubTestsService } from "src/app/core/service/sub-tests.service";
import { TestService } from 'src/app/core/service/test.service';
@Component({
selector: "app-test-container",
template: `
<app-test-component
[subTests$]="subTests$"
[currentLangCode] = "currentLangCode"
[currentText] = "final_transcript"
[triggerChange] = "triggerChange"
[totalScore] = "totalScore"
[overall_comment] = "overall_comment"
(recognize) = toggleRecording($event)
(file_context) = recognizeRecording($event)
(calculate_total) = calculateTotal($event)>
</app-test-component>
<app-loader *ngIf = "isLoading" [message] = "message"></app-loader>
`
})
export class TestContainer implements OnInit {
/**
* The username for the currently logged in user.
*/
public subTests$: SubTest[];
public final_transcript: string = '';
private transcript_array: any = {};
private current_seq: number = 0;
public interim_transcript: string = '';
public triggerChange: number = 0;
private recognizing: boolean = false;
private ignore_onend: boolean;
private start_timestamp: Date;
private end_timestamp: Date;
private duration: number;
private totalScore: number;
private overall_comment: string;
private recognition;
private currentTest: number;
private currentTranscriptTemplate: string;
/** Recording data */
private isRecording:boolean = false;
public recordAudio: any;
currentPatientId: number;
currentTestId: number;
currentTestObj: Test;
currentLangCode: string;
/** Distance function */
public distanceFnc = lev_dem_distance;
public cleanString = cleanString;
public distance: Distance;
public isLoading: boolean = false;
public message: string = "";
/**
* Constructor.
*/
public constructor(private store$: Store<any>,
private ioService:IoService,
private fileService: FileService,
private testService: TestService,
private subTestsService: SubTestsService
) |
/**
* Initialize the component.
*/
public ngOnInit() {
this.store$.pipe(select(fromState.selectCurrentPatient)).subscribe(
(patient) => {
if (patient) {
this.currentPatientId = patient.patient_id;
this.currentLangCode = patient.lang_code;
}
}
);
this.store$.pipe(select(fromState.selectCurrentTest)).subscribe(
(test) => {
//Selecting current test Id
if (test) {
this.currentTestObj = test;
this.currentTestId = test.test_id;
this.totalScore = test.score;
this.overall_comment = test.comments;
this.subTestsService.getSubTests(this.currentTestId).subscribe(
(data) => {
console.log('Got the following subtests:', data);
if (data && data.length > 0){
this.subTests$ = data;
//this.subTests$ = [{...defaultSubTest}, {...defaultSubTest}, {...defaultSubTest}];
} else {
console.log('Filling the default data');
let subTest: SubTest = {...defaultSubTest};
subTest.test_id = this.currentTestId;
this.subTests$ = [{...subTest}, {...subTest}, {...subTest}];
}
}
);
}
}
);
//this.initializeRecognitionSettings();
this.ioService.receiveStream('transcript', this, function(transcript, scope) {
console.log(transcript, scope);
let i:number = 0;
let ind: number = 1;
/** Clean the transcript string, apply substitution of know words, replace all the rest non digit chars */
if (!!transcript.transcript) {
transcript.transcript = scope.cleanString(transcript.transcript);
}
console.log('Received: ', !transcript.transcript && !scope.transcript_array[transcript.seq_num_prev], transcript.transcript);
/** Update the transcript array object (holds all the recived up to now chunks) and then try to construct interim transcript */
if ((!transcript.transcript && !scope.transcript_array[transcript.seq_num_prev]) || transcript.transcript) {
scope.transcript_array[transcript.seq_num_prev] = transcript.transcript ? transcript.transcript : '';
console.log('Update transcript array: ', transcript.seq_num_prev, scope.transcript_array[transcript.seq_num_prev]);
/** special treatment to start */
if (transcript.seq_num_prev == 0){
scope.interim_transcript = scope.transcript_array[transcript.seq_num_prev];
scope.current_seq++;
}
console.log('Set current sequesnce: ', scope.current_seq);
console.log('Current sequesnce arr value: ', scope.transcript_array[scope.current_seq]);
/** Combine current transcript string - take maximum continuous sequence of transcripts and join them together*/
while ( scope.transcript_array[scope.current_seq] || (scope.transcript_array[scope.current_seq] == '')) {
if ( scope.transcript_array[scope.current_seq] != '' ) {
i = 0;
while ( i >= 0 && (scope.transcript_array[scope.current_seq].length + 1) >= ind
&& ((i == 0) || (i >= (scope.transcript_array[scope.current_seq-1].length - scope.transcript_array[scope.current_seq].length)))) {
i = scope.transcript_array[scope.current_seq - 1].indexOf(scope.transcript_array[scope.current_seq].substring(0, ind++));
}
scope.interim_transcript = scope.interim_transcript + scope.transcript_array[scope.current_seq].substring(ind-2, scope.transcript_array[scope.current_seq].length+1);
}
scope.current_seq++;
}
console.log(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.distance = scope.distanceFnc(scope.currentTranscriptTemplate.substr(0, scope.interim_transcript.length), scope.interim_transcript);
scope.updateCurrentTest();
}
});
}
updateCurrentTest(){
let me = this;
let current:SubTest = me.subTests$[me.currentTest];
current.sub_test_type_id = me.currentTest;
if (me.duration) {
current.time = me.duration * 1000;
me.duration = null;
} else {
current.time = (+(me.end_timestamp ? me.end_timestamp : new Date()) - +me.start_timestamp);
}
current.audio_text = me.distance.text;
current.additions = me.distance.additions;
current.deletions = me.distance.deletions;
current.substitutions = me.distance.substitutions;
current.transpositions = me.distance.transpositions;
current.mistakes_list = me.distance.ops;
me.triggerChange++;
}
//** Using RecordRTC to stream the speech to server and get transcripted string */
onStart() {
let me = this;
me.isRecording = true;
me.ioService.setDefaultLanguage(me.currentLangCode);
/**
* Initialize the current subtest recording
*/
console.log('Setting language to: ', this.currentLangCode);
me.final_transcript = '';
this.interim_transcript = '';
let stream_seq: number = 0;
navigator.mediaDevices.getUserMedia({
audio: true
}).then(function(stream: MediaStream) {
me.recordAudio = recordRTC(stream, {
type: 'audio',
mimeType: 'audio/webm',
sampleRate: 44100,
recorderType: recordRTC.StereoAudioRecorder,
numberOfAudioChannels: 1,
timeSlice: 1000,
desiredSampRate: 16000,
async ondataavailable(blob) {
let file_name = me.currentTestId + '_' + me.currentTest;
console.log('File name: ', file_name);
me.ioService.sendBinaryStream(blob, file_name, (stream_seq++));
if (!me.isRecording) {
me.stopRecording();
}
}
});
me.recordAudio.startRecording();
}).catch(function(error) {
console.error(JSON.stringify(error));
});
}
onStop() {
// recording stopped
this.isRecording = false;
// stop audio recorder
}
stopRecording(){
this.recordAudio.stopRecording();
this.transcript_array = {};
this.current_seq = 0;
this.final_transcript = this.interim_transcript;
let me = this;
let file_name:string = me.currentTestId + '_' + me.currentTest;
this.isLoading = true;
/** Combining all chunks into one single file and transcribe it */
this.fileService.combineAudio( me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
me.updateCurrentTest();
this.isLoading = false;
}
);
}
public toggleRecording(event: recType){
if (this.isRecording) {
this.onStop();
return;
} else {
console.log('Setting language to: ', this.currentLangCode);
this.final_transcript = '';
this.onStart();
this.ignore_onend = false;
this.start_timestamp = new Date();
this.currentTest = event.selectedTest;
console.log('Setting template by : ', this.currentTest);
switch (this.currentTest) {
case 0:
this.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
this.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
this.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
console.log('Setting template to : ', this.currentTranscriptTemplate);
}
}
public recognizeRecording(event: fileType){
let me = this;
let file_name:string = me.currentTestId + '_' + event.subTest;
me.currentTest = event.subTest;
this.isLoading = true;
this.message = 'Transcribing, please be patient!'
switch (event.subTest) {
case 0:
me.currentTranscriptTemplate = VERTICAL_A_TRANSCRIPT;
break;
case 1:
me.currentTranscriptTemplate = VERTICAL_B_TRANSCRIPT;
break;
default:
me.currentTranscriptTemplate = HORIZONTAL_TRANSCRIPT;
break;
}
this.subTests$[this.currentTest].audio = file_name;
this.fileService.uploadAudio(event.file, me.currentLangCode, file_name).subscribe(
transcript => {
console.log('Transcript: ', transcript['transcript']);
console.log('Duration: ', transcript['duration']);
this.subTests$[this.currentTest].audio_text = me.currentTranscriptTemplate;
me.distance = me.distanceFnc(me.currentTranscriptTemplate, transcript['transcript']);
me.duration = transcript['duration'];
this.isLoading = false;
me.updateCurrentTest();
}
);
}
public calculateTotal(event: string){
this.isLoading = true;
this.message = 'Saving tests data, please wait!';
if (event){
this.totalScore = this.subTests$[2].time * (80/(80 - this.subTests$[2].deletions + this.subTests$[2].additions));
/**
* Update all sub tests data once recognition is finished
*/
this.subTestsService.updateSubTests(this.subTests$).subscribe(data => {
/**
* Upadate total of the test
*/
this.currentTestObj.score = this.totalScore;
console.log(this.overall_comment);
this.currentTestObj.comments = event;
this.testService.update(this.currentTestObj).subscribe(data => this.isLoading = false);
});
}
}
}
| {
} | identifier_body |
detector.go | package light
import (
"bytes"
"context"
"errors"
"fmt"
"time"
"github.com/tendermint/tendermint/light/provider"
"github.com/tendermint/tendermint/types"
)
// The detector component of the light client detects and handles attacks on the light client.
// More info here:
// tendermint/docs/architecture/adr-047-handling-evidence-from-light-client.md
// detectDivergence is a second wall of defense for the light client.
//
// It takes the target verified header and compares it with the headers of a set of
// witness providers that the light client is connected to. If a conflicting header
// is returned it verifies and examines the conflicting header against the verified
// trace that was produced from the primary. If successful, it produces two sets of evidence
// and sends them to the opposite provider before halting.
//
// If there are no conflictinge headers, the light client deems the verified target header
// trusted and saves it to the trusted store.
func (c *Client) detectDivergence(ctx context.Context, primaryTrace []*types.LightBlock, now time.Time) error {
if primaryTrace == nil || len(primaryTrace) < 2 {
return errors.New("nil or single block primary trace")
}
var (
headerMatched bool
lastVerifiedHeader = primaryTrace[len(primaryTrace)-1].SignedHeader
witnessesToRemove = make([]int, 0)
)
c.logger.Debug("Running detector against trace", "endBlockHeight", lastVerifiedHeader.Height,
"endBlockHash", lastVerifiedHeader.Hash, "length", len(primaryTrace))
c.providerMutex.Lock()
defer c.providerMutex.Unlock()
if len(c.witnesses) == 0 {
return ErrNoWitnesses
}
// launch one goroutine per witness to retrieve the light block of the target height
// and compare it with the header from the primary
errc := make(chan error, len(c.witnesses))
for i, witness := range c.witnesses {
go c.compareNewHeaderWithWitness(ctx, errc, lastVerifiedHeader, witness, i)
}
// handle errors from the header comparisons as they come in
for i := 0; i < cap(errc); i++ {
err := <-errc
switch e := err.(type) {
case nil: // at least one header matched
headerMatched = true
case errConflictingHeaders:
// We have conflicting headers. This could possibly imply an attack on the light client.
// First we need to verify the witness's header using the same skipping verification and then we
// need to find the point that the headers diverge and examine this for any evidence of an attack.
//
// We combine these actions together, verifying the witnesses headers and outputting the trace
// which captures the bifurcation point and if successful provides the information to create valid evidence.
err := c.handleConflictingHeaders(ctx, primaryTrace, e.Block, e.WitnessIndex, now)
if err != nil {
// return information of the attack
return err
}
// if attempt to generate conflicting headers failed then remove witness
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
case errBadWitness:
// these are all melevolent errors and should result in removing the
// witness
c.logger.Info("witness returned an error during header comparison, removing...",
"witness", c.witnesses[e.WitnessIndex], "err", err)
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
default:
// Benign errors which can be ignored unless there was a context
// canceled
if errors.Is(e, context.Canceled) || errors.Is(e, context.DeadlineExceeded) {
return e
}
c.logger.Info("error in light block request to witness", "err", err)
}
}
// remove witnesses that have misbehaved
if err := c.removeWitnesses(witnessesToRemove); err != nil {
return err
}
// 1. If we had at least one witness that returned the same header then we
// conclude that we can trust the header
if headerMatched {
return nil
}
// 2. Else all witnesses have either not responded, don't have the block or sent invalid blocks.
return ErrFailedHeaderCrossReferencing
}
// compareNewHeaderWithWitness takes the verified header from the primary and compares it with a
// header from a specified witness. The function can return one of three errors:
//
// 1: errConflictingHeaders -> there may have been an attack on this light client
// 2: errBadWitness -> the witness has either not responded, doesn't have the header or has given us an invalid one
//
// Note: In the case of an invalid header we remove the witness
//
// 3: nil -> the hashes of the two headers match
func (c *Client) compareNewHeaderWithWitness(ctx context.Context, errc chan error, h *types.SignedHeader,
witness provider.Provider, witnessIndex int) {
lightBlock, err := witness.LightBlock(ctx, h.Height)
switch err {
// no error means we move on to checking the hash of the two headers
case nil:
break
// the witness hasn't been helpful in comparing headers, we mark the response and continue
// comparing with the rest of the witnesses
case provider.ErrNoResponse, provider.ErrLightBlockNotFound, context.DeadlineExceeded, context.Canceled:
errc <- err
return
// the witness' head of the blockchain is lower than the height of the primary. This could be one of
// two things:
// 1) The witness is lagging behind
// 2) The primary may be performing a lunatic attack with a height and time in the future
case provider.ErrHeightTooHigh:
// The light client now asks for the latest header that the witness has
var isTargetHeight bool
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
errc <- err
return
}
// if the witness caught up and has returned a block of the target height then we can
// break from this switch case and continue to verify the hashes
if isTargetHeight {
break
}
// witness' last header is below the primary's header. We check the times to see if the blocks
// have conflicting times
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// the witness is behind. We wait for a period WAITING = 2 * DRIFT + LAG.
// This should give the witness ample time if it is a participating member
// of consensus to produce a block that has a time that is after the primary's
// block time. If not the witness is too far behind and the light client removes it
time.Sleep(2*c.maxClockDrift + c.maxBlockLag)
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
errc <- err
} else {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
}
return
}
if isTargetHeight {
break
}
// the witness still doesn't have a block at the height of the primary.
// Check if there is a conflicting time
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// Following this request response procedure, the witness has been unable to produce a block
// that can somehow conflict with the primary's block. We thus conclude that the witness
// is too far behind and thus we return a no response error.
//
// NOTE: If the clock drift / lag has been miscalibrated it is feasible that the light client has
// drifted too far ahead for any witness to be able provide a comparable block and thus may allow
// for a malicious primary to attack it
errc <- provider.ErrNoResponse
return
default:
// all other errors (i.e. invalid block, closed connection or unreliable provider) we mark the
// witness as bad and remove it
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
return
}
if !bytes.Equal(h.Hash(), lightBlock.Hash()) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
}
c.logger.Debug("Matching header received by witness", "height", h.Height, "witness", witnessIndex)
errc <- nil
}
// sendEvidence sends evidence to a provider on a best effort basis.
func (c *Client) | (ctx context.Context, ev *types.LightClientAttackEvidence, receiver provider.Provider) {
err := receiver.ReportEvidence(ctx, ev)
if err != nil {
c.logger.Error("Failed to report evidence to provider", "ev", ev, "provider", receiver)
}
}
// handleConflictingHeaders handles the primary style of attack, which is where a primary and witness have
// two headers of the same height but with different hashes
func (c *Client) handleConflictingHeaders(
ctx context.Context,
primaryTrace []*types.LightBlock,
challendingBlock *types.LightBlock,
witnessIndex int,
now time.Time,
) error {
supportingWitness := c.witnesses[witnessIndex]
witnessTrace, primaryBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
primaryTrace,
challendingBlock,
supportingWitness,
now,
)
if err != nil {
c.logger.Info("error validating witness's divergent header", "witness", supportingWitness, "err", err)
return nil
}
// We are suspecting that the primary is faulty, hence we hold the witness as the source of truth
// and generate evidence against the primary that we can send to the witness
commonBlock, trustedBlock := witnessTrace[0], witnessTrace[len(witnessTrace)-1]
evidenceAgainstPrimary := newLightClientAttackEvidence(primaryBlock, trustedBlock, commonBlock)
c.logger.Error("ATTEMPTED ATTACK DETECTED. Sending evidence againt primary by witness", "ev", evidenceAgainstPrimary,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstPrimary, supportingWitness)
if primaryBlock.Commit.Round != witnessTrace[len(witnessTrace)-1].Commit.Round {
c.logger.Info("The light client has detected, and prevented, an attempted amnesia attack." +
" We think this attack is pretty unlikely, so if you see it, that's interesting to us." +
" Can you let us know by opening an issue through https://github.com/tendermint/tendermint/issues/new?")
}
// This may not be valid because the witness itself is at fault. So now we reverse it, examining the
// trace provided by the witness and holding the primary as the source of truth. Note: primary may not
// respond but this is okay as we will halt anyway.
primaryTrace, witnessBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
witnessTrace,
primaryBlock,
c.primary,
now,
)
if err != nil {
c.logger.Info("Error validating primary's divergent header", "primary", c.primary, "err", err)
return ErrLightClientAttack
}
// We now use the primary trace to create evidence against the witness and send it to the primary
commonBlock, trustedBlock = primaryTrace[0], primaryTrace[len(primaryTrace)-1]
evidenceAgainstWitness := newLightClientAttackEvidence(witnessBlock, trustedBlock, commonBlock)
c.logger.Error("Sending evidence against witness by primary", "ev", evidenceAgainstWitness,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstWitness, c.primary)
// We return the error and don't process anymore witnesses
return ErrLightClientAttack
}
// examineConflictingHeaderAgainstTrace takes a trace from one provider and a divergent header that
// it has received from another and preforms verifySkipping at the heights of each of the intermediate
// headers in the trace until it reaches the divergentHeader. 1 of 2 things can happen.
//
// 1. The light client verifies a header that is different to the intermediate header in the trace. This
// is the bifurcation point and the light client can create evidence from it
// 2. The source stops responding, doesn't have the block or sends an invalid header in which case we
// return the error and remove the witness
//
// CONTRACT:
// 1. Trace can not be empty len(trace) > 0
// 2. The last block in the trace can not be of a lower height than the target block
// trace[len(trace)-1].Height >= targetBlock.Height
// 3. The
func (c *Client) examineConflictingHeaderAgainstTrace(
ctx context.Context,
trace []*types.LightBlock,
targetBlock *types.LightBlock,
source provider.Provider, now time.Time,
) ([]*types.LightBlock, *types.LightBlock, error) {
var (
previouslyVerifiedBlock, sourceBlock *types.LightBlock
sourceTrace []*types.LightBlock
err error
)
if targetBlock.Height < trace[0].Height {
return nil, nil, fmt.Errorf("target block has a height lower than the trusted height (%d < %d)",
targetBlock.Height, trace[0].Height)
}
for idx, traceBlock := range trace {
// this case only happens in a forward lunatic attack. We treat the block with the
// height directly after the targetBlock as the divergent block
if traceBlock.Height > targetBlock.Height {
// sanity check that the time of the traceBlock is indeed less than that of the targetBlock. If the trace
// was correctly verified we should expect monotonically increasing time. This means that if the block at
// the end of the trace has a lesser time than the target block then all blocks in the trace should have a
// lesser time
if traceBlock.Time.After(targetBlock.Time) {
return nil, nil,
errors.New("sanity check failed: expected traceblock to have a lesser time than the target block")
}
// before sending back the divergent block and trace we need to ensure we have verified
// the final gap between the previouslyVerifiedBlock and the targetBlock
if previouslyVerifiedBlock.Height != targetBlock.Height {
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, targetBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
}
return sourceTrace, traceBlock, nil
}
// get the corresponding block from the source to verify and match up against the traceBlock
if traceBlock.Height == targetBlock.Height {
sourceBlock = targetBlock
} else {
sourceBlock, err = source.LightBlock(ctx, traceBlock.Height)
if err != nil {
return nil, nil, fmt.Errorf("failed to examine trace: %w", err)
}
}
// The first block in the trace MUST be the same to the light block that the source produces
// else we cannot continue with verification.
if idx == 0 {
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
return nil, nil, fmt.Errorf("trusted block is different to the source's first block (%X = %X)",
thash, shash)
}
previouslyVerifiedBlock = sourceBlock
continue
}
// we check that the source provider can verify a block at the same height of the
// intermediate height
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, sourceBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
// check if the headers verified by the source has diverged from the trace
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
// Bifurcation point found!
return sourceTrace, traceBlock, nil
}
// headers are still the same. update the previouslyVerifiedBlock
previouslyVerifiedBlock = sourceBlock
}
// We have reached the end of the trace. This should never happen. This can only happen if one of the stated
// prerequisites to this function were not met. Namely that either trace[len(trace)-1].Height < targetBlock.Height
// or that trace[i].Hash() != targetBlock.Hash()
return nil, nil, errNoDivergence
}
// getTargetBlockOrLatest gets the latest height, if it is greater than the target height then it queries
// the target heght else it returns the latest. returns true if it successfully managed to acquire the target
// height.
func (c *Client) getTargetBlockOrLatest(
ctx context.Context,
height int64,
witness provider.Provider,
) (bool, *types.LightBlock, error) {
lightBlock, err := witness.LightBlock(ctx, 0)
if err != nil {
return false, nil, err
}
if lightBlock.Height == height {
// the witness has caught up to the height of the provider's signed header. We
// can resume with checking the hashes.
return true, lightBlock, nil
}
if lightBlock.Height > height {
// the witness has caught up. We recursively call the function again. However in order
// to avoud a wild goose chase where the witness sends us one header below and one header
// above the height we set a timeout to the context
lightBlock, err := witness.LightBlock(ctx, height)
return true, lightBlock, err
}
return false, lightBlock, nil
}
// newLightClientAttackEvidence determines the type of attack and then forms the evidence filling out
// all the fields such that it is ready to be sent to a full node.
func newLightClientAttackEvidence(conflicted, trusted, common *types.LightBlock) *types.LightClientAttackEvidence {
ev := &types.LightClientAttackEvidence{ConflictingBlock: conflicted}
// if this is an equivocation or amnesia attack, i.e. the validator sets are the same, then we
// return the height of the conflicting block else if it is a lunatic attack and the validator sets
// are not the same then we send the height of the common header.
if ev.ConflictingHeaderIsInvalid(trusted.Header) {
ev.CommonHeight = common.Height
ev.Timestamp = common.Time
ev.TotalVotingPower = common.ValidatorSet.TotalVotingPower()
} else {
ev.CommonHeight = trusted.Height
ev.Timestamp = trusted.Time
ev.TotalVotingPower = trusted.ValidatorSet.TotalVotingPower()
}
ev.ByzantineValidators = ev.GetByzantineValidators(common.ValidatorSet, trusted.SignedHeader)
return ev
}
| sendEvidence | identifier_name |
detector.go | package light
import (
"bytes"
"context"
"errors"
"fmt"
"time"
"github.com/tendermint/tendermint/light/provider"
"github.com/tendermint/tendermint/types"
)
// The detector component of the light client detects and handles attacks on the light client.
// More info here:
// tendermint/docs/architecture/adr-047-handling-evidence-from-light-client.md
// detectDivergence is a second wall of defense for the light client.
//
// It takes the target verified header and compares it with the headers of a set of
// witness providers that the light client is connected to. If a conflicting header
// is returned it verifies and examines the conflicting header against the verified
// trace that was produced from the primary. If successful, it produces two sets of evidence
// and sends them to the opposite provider before halting.
//
// If there are no conflictinge headers, the light client deems the verified target header
// trusted and saves it to the trusted store.
func (c *Client) detectDivergence(ctx context.Context, primaryTrace []*types.LightBlock, now time.Time) error {
if primaryTrace == nil || len(primaryTrace) < 2 {
return errors.New("nil or single block primary trace")
}
var (
headerMatched bool
lastVerifiedHeader = primaryTrace[len(primaryTrace)-1].SignedHeader
witnessesToRemove = make([]int, 0)
)
c.logger.Debug("Running detector against trace", "endBlockHeight", lastVerifiedHeader.Height,
"endBlockHash", lastVerifiedHeader.Hash, "length", len(primaryTrace))
c.providerMutex.Lock()
defer c.providerMutex.Unlock()
if len(c.witnesses) == 0 {
return ErrNoWitnesses
}
// launch one goroutine per witness to retrieve the light block of the target height
// and compare it with the header from the primary
errc := make(chan error, len(c.witnesses))
for i, witness := range c.witnesses {
go c.compareNewHeaderWithWitness(ctx, errc, lastVerifiedHeader, witness, i)
}
// handle errors from the header comparisons as they come in
for i := 0; i < cap(errc); i++ {
err := <-errc
switch e := err.(type) {
case nil: // at least one header matched
headerMatched = true
case errConflictingHeaders:
// We have conflicting headers. This could possibly imply an attack on the light client.
// First we need to verify the witness's header using the same skipping verification and then we
// need to find the point that the headers diverge and examine this for any evidence of an attack.
//
// We combine these actions together, verifying the witnesses headers and outputting the trace
// which captures the bifurcation point and if successful provides the information to create valid evidence.
err := c.handleConflictingHeaders(ctx, primaryTrace, e.Block, e.WitnessIndex, now)
if err != nil {
// return information of the attack
return err
}
// if attempt to generate conflicting headers failed then remove witness
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
case errBadWitness:
// these are all melevolent errors and should result in removing the
// witness
c.logger.Info("witness returned an error during header comparison, removing...",
"witness", c.witnesses[e.WitnessIndex], "err", err)
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
default:
// Benign errors which can be ignored unless there was a context
// canceled
if errors.Is(e, context.Canceled) || errors.Is(e, context.DeadlineExceeded) {
return e
}
c.logger.Info("error in light block request to witness", "err", err)
}
}
// remove witnesses that have misbehaved
if err := c.removeWitnesses(witnessesToRemove); err != nil {
return err
}
// 1. If we had at least one witness that returned the same header then we
// conclude that we can trust the header
if headerMatched {
return nil
}
// 2. Else all witnesses have either not responded, don't have the block or sent invalid blocks.
return ErrFailedHeaderCrossReferencing
}
// compareNewHeaderWithWitness takes the verified header from the primary and compares it with a
// header from a specified witness. The function can return one of three errors:
//
// 1: errConflictingHeaders -> there may have been an attack on this light client
// 2: errBadWitness -> the witness has either not responded, doesn't have the header or has given us an invalid one
//
// Note: In the case of an invalid header we remove the witness
//
// 3: nil -> the hashes of the two headers match
func (c *Client) compareNewHeaderWithWitness(ctx context.Context, errc chan error, h *types.SignedHeader,
witness provider.Provider, witnessIndex int) {
lightBlock, err := witness.LightBlock(ctx, h.Height)
switch err {
// no error means we move on to checking the hash of the two headers
case nil:
break
// the witness hasn't been helpful in comparing headers, we mark the response and continue
// comparing with the rest of the witnesses
case provider.ErrNoResponse, provider.ErrLightBlockNotFound, context.DeadlineExceeded, context.Canceled:
errc <- err
return
// the witness' head of the blockchain is lower than the height of the primary. This could be one of
// two things:
// 1) The witness is lagging behind
// 2) The primary may be performing a lunatic attack with a height and time in the future
case provider.ErrHeightTooHigh:
// The light client now asks for the latest header that the witness has
var isTargetHeight bool
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
errc <- err
return
}
// if the witness caught up and has returned a block of the target height then we can
// break from this switch case and continue to verify the hashes
if isTargetHeight {
break
}
// witness' last header is below the primary's header. We check the times to see if the blocks
// have conflicting times
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// the witness is behind. We wait for a period WAITING = 2 * DRIFT + LAG.
// This should give the witness ample time if it is a participating member
// of consensus to produce a block that has a time that is after the primary's
// block time. If not the witness is too far behind and the light client removes it
time.Sleep(2*c.maxClockDrift + c.maxBlockLag)
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
errc <- err
} else {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
}
return
}
if isTargetHeight {
break
}
// the witness still doesn't have a block at the height of the primary.
// Check if there is a conflicting time
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// Following this request response procedure, the witness has been unable to produce a block
// that can somehow conflict with the primary's block. We thus conclude that the witness
// is too far behind and thus we return a no response error.
//
// NOTE: If the clock drift / lag has been miscalibrated it is feasible that the light client has
// drifted too far ahead for any witness to be able provide a comparable block and thus may allow
// for a malicious primary to attack it
errc <- provider.ErrNoResponse
return
default:
// all other errors (i.e. invalid block, closed connection or unreliable provider) we mark the
// witness as bad and remove it
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
return
}
if !bytes.Equal(h.Hash(), lightBlock.Hash()) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
}
c.logger.Debug("Matching header received by witness", "height", h.Height, "witness", witnessIndex)
errc <- nil
}
// sendEvidence sends evidence to a provider on a best effort basis.
func (c *Client) sendEvidence(ctx context.Context, ev *types.LightClientAttackEvidence, receiver provider.Provider) {
err := receiver.ReportEvidence(ctx, ev)
if err != nil {
c.logger.Error("Failed to report evidence to provider", "ev", ev, "provider", receiver)
}
}
// handleConflictingHeaders handles the primary style of attack, which is where a primary and witness have
// two headers of the same height but with different hashes
func (c *Client) handleConflictingHeaders(
ctx context.Context,
primaryTrace []*types.LightBlock,
challendingBlock *types.LightBlock,
witnessIndex int,
now time.Time,
) error {
supportingWitness := c.witnesses[witnessIndex]
witnessTrace, primaryBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
primaryTrace,
challendingBlock,
supportingWitness,
now,
)
if err != nil {
c.logger.Info("error validating witness's divergent header", "witness", supportingWitness, "err", err)
return nil
} | // We are suspecting that the primary is faulty, hence we hold the witness as the source of truth
// and generate evidence against the primary that we can send to the witness
commonBlock, trustedBlock := witnessTrace[0], witnessTrace[len(witnessTrace)-1]
evidenceAgainstPrimary := newLightClientAttackEvidence(primaryBlock, trustedBlock, commonBlock)
c.logger.Error("ATTEMPTED ATTACK DETECTED. Sending evidence againt primary by witness", "ev", evidenceAgainstPrimary,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstPrimary, supportingWitness)
if primaryBlock.Commit.Round != witnessTrace[len(witnessTrace)-1].Commit.Round {
c.logger.Info("The light client has detected, and prevented, an attempted amnesia attack." +
" We think this attack is pretty unlikely, so if you see it, that's interesting to us." +
" Can you let us know by opening an issue through https://github.com/tendermint/tendermint/issues/new?")
}
// This may not be valid because the witness itself is at fault. So now we reverse it, examining the
// trace provided by the witness and holding the primary as the source of truth. Note: primary may not
// respond but this is okay as we will halt anyway.
primaryTrace, witnessBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
witnessTrace,
primaryBlock,
c.primary,
now,
)
if err != nil {
c.logger.Info("Error validating primary's divergent header", "primary", c.primary, "err", err)
return ErrLightClientAttack
}
// We now use the primary trace to create evidence against the witness and send it to the primary
commonBlock, trustedBlock = primaryTrace[0], primaryTrace[len(primaryTrace)-1]
evidenceAgainstWitness := newLightClientAttackEvidence(witnessBlock, trustedBlock, commonBlock)
c.logger.Error("Sending evidence against witness by primary", "ev", evidenceAgainstWitness,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstWitness, c.primary)
// We return the error and don't process anymore witnesses
return ErrLightClientAttack
}
// examineConflictingHeaderAgainstTrace takes a trace from one provider and a divergent header that
// it has received from another and preforms verifySkipping at the heights of each of the intermediate
// headers in the trace until it reaches the divergentHeader. 1 of 2 things can happen.
//
// 1. The light client verifies a header that is different to the intermediate header in the trace. This
// is the bifurcation point and the light client can create evidence from it
// 2. The source stops responding, doesn't have the block or sends an invalid header in which case we
// return the error and remove the witness
//
// CONTRACT:
// 1. Trace can not be empty len(trace) > 0
// 2. The last block in the trace can not be of a lower height than the target block
// trace[len(trace)-1].Height >= targetBlock.Height
// 3. The
func (c *Client) examineConflictingHeaderAgainstTrace(
ctx context.Context,
trace []*types.LightBlock,
targetBlock *types.LightBlock,
source provider.Provider, now time.Time,
) ([]*types.LightBlock, *types.LightBlock, error) {
var (
previouslyVerifiedBlock, sourceBlock *types.LightBlock
sourceTrace []*types.LightBlock
err error
)
if targetBlock.Height < trace[0].Height {
return nil, nil, fmt.Errorf("target block has a height lower than the trusted height (%d < %d)",
targetBlock.Height, trace[0].Height)
}
for idx, traceBlock := range trace {
// this case only happens in a forward lunatic attack. We treat the block with the
// height directly after the targetBlock as the divergent block
if traceBlock.Height > targetBlock.Height {
// sanity check that the time of the traceBlock is indeed less than that of the targetBlock. If the trace
// was correctly verified we should expect monotonically increasing time. This means that if the block at
// the end of the trace has a lesser time than the target block then all blocks in the trace should have a
// lesser time
if traceBlock.Time.After(targetBlock.Time) {
return nil, nil,
errors.New("sanity check failed: expected traceblock to have a lesser time than the target block")
}
// before sending back the divergent block and trace we need to ensure we have verified
// the final gap between the previouslyVerifiedBlock and the targetBlock
if previouslyVerifiedBlock.Height != targetBlock.Height {
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, targetBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
}
return sourceTrace, traceBlock, nil
}
// get the corresponding block from the source to verify and match up against the traceBlock
if traceBlock.Height == targetBlock.Height {
sourceBlock = targetBlock
} else {
sourceBlock, err = source.LightBlock(ctx, traceBlock.Height)
if err != nil {
return nil, nil, fmt.Errorf("failed to examine trace: %w", err)
}
}
// The first block in the trace MUST be the same to the light block that the source produces
// else we cannot continue with verification.
if idx == 0 {
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
return nil, nil, fmt.Errorf("trusted block is different to the source's first block (%X = %X)",
thash, shash)
}
previouslyVerifiedBlock = sourceBlock
continue
}
// we check that the source provider can verify a block at the same height of the
// intermediate height
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, sourceBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
// check if the headers verified by the source has diverged from the trace
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
// Bifurcation point found!
return sourceTrace, traceBlock, nil
}
// headers are still the same. update the previouslyVerifiedBlock
previouslyVerifiedBlock = sourceBlock
}
// We have reached the end of the trace. This should never happen. This can only happen if one of the stated
// prerequisites to this function were not met. Namely that either trace[len(trace)-1].Height < targetBlock.Height
// or that trace[i].Hash() != targetBlock.Hash()
return nil, nil, errNoDivergence
}
// getTargetBlockOrLatest gets the latest height, if it is greater than the target height then it queries
// the target heght else it returns the latest. returns true if it successfully managed to acquire the target
// height.
func (c *Client) getTargetBlockOrLatest(
ctx context.Context,
height int64,
witness provider.Provider,
) (bool, *types.LightBlock, error) {
lightBlock, err := witness.LightBlock(ctx, 0)
if err != nil {
return false, nil, err
}
if lightBlock.Height == height {
// the witness has caught up to the height of the provider's signed header. We
// can resume with checking the hashes.
return true, lightBlock, nil
}
if lightBlock.Height > height {
// the witness has caught up. We recursively call the function again. However in order
// to avoud a wild goose chase where the witness sends us one header below and one header
// above the height we set a timeout to the context
lightBlock, err := witness.LightBlock(ctx, height)
return true, lightBlock, err
}
return false, lightBlock, nil
}
// newLightClientAttackEvidence determines the type of attack and then forms the evidence filling out
// all the fields such that it is ready to be sent to a full node.
func newLightClientAttackEvidence(conflicted, trusted, common *types.LightBlock) *types.LightClientAttackEvidence {
ev := &types.LightClientAttackEvidence{ConflictingBlock: conflicted}
// if this is an equivocation or amnesia attack, i.e. the validator sets are the same, then we
// return the height of the conflicting block else if it is a lunatic attack and the validator sets
// are not the same then we send the height of the common header.
if ev.ConflictingHeaderIsInvalid(trusted.Header) {
ev.CommonHeight = common.Height
ev.Timestamp = common.Time
ev.TotalVotingPower = common.ValidatorSet.TotalVotingPower()
} else {
ev.CommonHeight = trusted.Height
ev.Timestamp = trusted.Time
ev.TotalVotingPower = trusted.ValidatorSet.TotalVotingPower()
}
ev.ByzantineValidators = ev.GetByzantineValidators(common.ValidatorSet, trusted.SignedHeader)
return ev
} | random_line_split |
|
detector.go | package light
import (
"bytes"
"context"
"errors"
"fmt"
"time"
"github.com/tendermint/tendermint/light/provider"
"github.com/tendermint/tendermint/types"
)
// The detector component of the light client detects and handles attacks on the light client.
// More info here:
// tendermint/docs/architecture/adr-047-handling-evidence-from-light-client.md
// detectDivergence is a second wall of defense for the light client.
//
// It takes the target verified header and compares it with the headers of a set of
// witness providers that the light client is connected to. If a conflicting header
// is returned it verifies and examines the conflicting header against the verified
// trace that was produced from the primary. If successful, it produces two sets of evidence
// and sends them to the opposite provider before halting.
//
// If there are no conflictinge headers, the light client deems the verified target header
// trusted and saves it to the trusted store.
func (c *Client) detectDivergence(ctx context.Context, primaryTrace []*types.LightBlock, now time.Time) error {
if primaryTrace == nil || len(primaryTrace) < 2 {
return errors.New("nil or single block primary trace")
}
var (
headerMatched bool
lastVerifiedHeader = primaryTrace[len(primaryTrace)-1].SignedHeader
witnessesToRemove = make([]int, 0)
)
c.logger.Debug("Running detector against trace", "endBlockHeight", lastVerifiedHeader.Height,
"endBlockHash", lastVerifiedHeader.Hash, "length", len(primaryTrace))
c.providerMutex.Lock()
defer c.providerMutex.Unlock()
if len(c.witnesses) == 0 {
return ErrNoWitnesses
}
// launch one goroutine per witness to retrieve the light block of the target height
// and compare it with the header from the primary
errc := make(chan error, len(c.witnesses))
for i, witness := range c.witnesses {
go c.compareNewHeaderWithWitness(ctx, errc, lastVerifiedHeader, witness, i)
}
// handle errors from the header comparisons as they come in
for i := 0; i < cap(errc); i++ {
err := <-errc
switch e := err.(type) {
case nil: // at least one header matched
headerMatched = true
case errConflictingHeaders:
// We have conflicting headers. This could possibly imply an attack on the light client.
// First we need to verify the witness's header using the same skipping verification and then we
// need to find the point that the headers diverge and examine this for any evidence of an attack.
//
// We combine these actions together, verifying the witnesses headers and outputting the trace
// which captures the bifurcation point and if successful provides the information to create valid evidence.
err := c.handleConflictingHeaders(ctx, primaryTrace, e.Block, e.WitnessIndex, now)
if err != nil {
// return information of the attack
return err
}
// if attempt to generate conflicting headers failed then remove witness
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
case errBadWitness:
// these are all melevolent errors and should result in removing the
// witness
c.logger.Info("witness returned an error during header comparison, removing...",
"witness", c.witnesses[e.WitnessIndex], "err", err)
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
default:
// Benign errors which can be ignored unless there was a context
// canceled
if errors.Is(e, context.Canceled) || errors.Is(e, context.DeadlineExceeded) {
return e
}
c.logger.Info("error in light block request to witness", "err", err)
}
}
// remove witnesses that have misbehaved
if err := c.removeWitnesses(witnessesToRemove); err != nil {
return err
}
// 1. If we had at least one witness that returned the same header then we
// conclude that we can trust the header
if headerMatched {
return nil
}
// 2. Else all witnesses have either not responded, don't have the block or sent invalid blocks.
return ErrFailedHeaderCrossReferencing
}
// compareNewHeaderWithWitness takes the verified header from the primary and compares it with a
// header from a specified witness. The function can return one of three errors:
//
// 1: errConflictingHeaders -> there may have been an attack on this light client
// 2: errBadWitness -> the witness has either not responded, doesn't have the header or has given us an invalid one
//
// Note: In the case of an invalid header we remove the witness
//
// 3: nil -> the hashes of the two headers match
func (c *Client) compareNewHeaderWithWitness(ctx context.Context, errc chan error, h *types.SignedHeader,
witness provider.Provider, witnessIndex int) {
lightBlock, err := witness.LightBlock(ctx, h.Height)
switch err {
// no error means we move on to checking the hash of the two headers
case nil:
break
// the witness hasn't been helpful in comparing headers, we mark the response and continue
// comparing with the rest of the witnesses
case provider.ErrNoResponse, provider.ErrLightBlockNotFound, context.DeadlineExceeded, context.Canceled:
errc <- err
return
// the witness' head of the blockchain is lower than the height of the primary. This could be one of
// two things:
// 1) The witness is lagging behind
// 2) The primary may be performing a lunatic attack with a height and time in the future
case provider.ErrHeightTooHigh:
// The light client now asks for the latest header that the witness has
var isTargetHeight bool
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
errc <- err
return
}
// if the witness caught up and has returned a block of the target height then we can
// break from this switch case and continue to verify the hashes
if isTargetHeight {
break
}
// witness' last header is below the primary's header. We check the times to see if the blocks
// have conflicting times
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// the witness is behind. We wait for a period WAITING = 2 * DRIFT + LAG.
// This should give the witness ample time if it is a participating member
// of consensus to produce a block that has a time that is after the primary's
// block time. If not the witness is too far behind and the light client removes it
time.Sleep(2*c.maxClockDrift + c.maxBlockLag)
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
errc <- err
} else {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
}
return
}
if isTargetHeight {
break
}
// the witness still doesn't have a block at the height of the primary.
// Check if there is a conflicting time
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// Following this request response procedure, the witness has been unable to produce a block
// that can somehow conflict with the primary's block. We thus conclude that the witness
// is too far behind and thus we return a no response error.
//
// NOTE: If the clock drift / lag has been miscalibrated it is feasible that the light client has
// drifted too far ahead for any witness to be able provide a comparable block and thus may allow
// for a malicious primary to attack it
errc <- provider.ErrNoResponse
return
default:
// all other errors (i.e. invalid block, closed connection or unreliable provider) we mark the
// witness as bad and remove it
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
return
}
if !bytes.Equal(h.Hash(), lightBlock.Hash()) |
c.logger.Debug("Matching header received by witness", "height", h.Height, "witness", witnessIndex)
errc <- nil
}
// sendEvidence sends evidence to a provider on a best effort basis.
func (c *Client) sendEvidence(ctx context.Context, ev *types.LightClientAttackEvidence, receiver provider.Provider) {
err := receiver.ReportEvidence(ctx, ev)
if err != nil {
c.logger.Error("Failed to report evidence to provider", "ev", ev, "provider", receiver)
}
}
// handleConflictingHeaders handles the primary style of attack, which is where a primary and witness have
// two headers of the same height but with different hashes
func (c *Client) handleConflictingHeaders(
ctx context.Context,
primaryTrace []*types.LightBlock,
challendingBlock *types.LightBlock,
witnessIndex int,
now time.Time,
) error {
supportingWitness := c.witnesses[witnessIndex]
witnessTrace, primaryBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
primaryTrace,
challendingBlock,
supportingWitness,
now,
)
if err != nil {
c.logger.Info("error validating witness's divergent header", "witness", supportingWitness, "err", err)
return nil
}
// We are suspecting that the primary is faulty, hence we hold the witness as the source of truth
// and generate evidence against the primary that we can send to the witness
commonBlock, trustedBlock := witnessTrace[0], witnessTrace[len(witnessTrace)-1]
evidenceAgainstPrimary := newLightClientAttackEvidence(primaryBlock, trustedBlock, commonBlock)
c.logger.Error("ATTEMPTED ATTACK DETECTED. Sending evidence againt primary by witness", "ev", evidenceAgainstPrimary,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstPrimary, supportingWitness)
if primaryBlock.Commit.Round != witnessTrace[len(witnessTrace)-1].Commit.Round {
c.logger.Info("The light client has detected, and prevented, an attempted amnesia attack." +
" We think this attack is pretty unlikely, so if you see it, that's interesting to us." +
" Can you let us know by opening an issue through https://github.com/tendermint/tendermint/issues/new?")
}
// This may not be valid because the witness itself is at fault. So now we reverse it, examining the
// trace provided by the witness and holding the primary as the source of truth. Note: primary may not
// respond but this is okay as we will halt anyway.
primaryTrace, witnessBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
witnessTrace,
primaryBlock,
c.primary,
now,
)
if err != nil {
c.logger.Info("Error validating primary's divergent header", "primary", c.primary, "err", err)
return ErrLightClientAttack
}
// We now use the primary trace to create evidence against the witness and send it to the primary
commonBlock, trustedBlock = primaryTrace[0], primaryTrace[len(primaryTrace)-1]
evidenceAgainstWitness := newLightClientAttackEvidence(witnessBlock, trustedBlock, commonBlock)
c.logger.Error("Sending evidence against witness by primary", "ev", evidenceAgainstWitness,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstWitness, c.primary)
// We return the error and don't process anymore witnesses
return ErrLightClientAttack
}
// examineConflictingHeaderAgainstTrace takes a trace from one provider and a divergent header that
// it has received from another and preforms verifySkipping at the heights of each of the intermediate
// headers in the trace until it reaches the divergentHeader. 1 of 2 things can happen.
//
// 1. The light client verifies a header that is different to the intermediate header in the trace. This
// is the bifurcation point and the light client can create evidence from it
// 2. The source stops responding, doesn't have the block or sends an invalid header in which case we
// return the error and remove the witness
//
// CONTRACT:
// 1. Trace can not be empty len(trace) > 0
// 2. The last block in the trace can not be of a lower height than the target block
// trace[len(trace)-1].Height >= targetBlock.Height
// 3. The
func (c *Client) examineConflictingHeaderAgainstTrace(
ctx context.Context,
trace []*types.LightBlock,
targetBlock *types.LightBlock,
source provider.Provider, now time.Time,
) ([]*types.LightBlock, *types.LightBlock, error) {
var (
previouslyVerifiedBlock, sourceBlock *types.LightBlock
sourceTrace []*types.LightBlock
err error
)
if targetBlock.Height < trace[0].Height {
return nil, nil, fmt.Errorf("target block has a height lower than the trusted height (%d < %d)",
targetBlock.Height, trace[0].Height)
}
for idx, traceBlock := range trace {
// this case only happens in a forward lunatic attack. We treat the block with the
// height directly after the targetBlock as the divergent block
if traceBlock.Height > targetBlock.Height {
// sanity check that the time of the traceBlock is indeed less than that of the targetBlock. If the trace
// was correctly verified we should expect monotonically increasing time. This means that if the block at
// the end of the trace has a lesser time than the target block then all blocks in the trace should have a
// lesser time
if traceBlock.Time.After(targetBlock.Time) {
return nil, nil,
errors.New("sanity check failed: expected traceblock to have a lesser time than the target block")
}
// before sending back the divergent block and trace we need to ensure we have verified
// the final gap between the previouslyVerifiedBlock and the targetBlock
if previouslyVerifiedBlock.Height != targetBlock.Height {
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, targetBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
}
return sourceTrace, traceBlock, nil
}
// get the corresponding block from the source to verify and match up against the traceBlock
if traceBlock.Height == targetBlock.Height {
sourceBlock = targetBlock
} else {
sourceBlock, err = source.LightBlock(ctx, traceBlock.Height)
if err != nil {
return nil, nil, fmt.Errorf("failed to examine trace: %w", err)
}
}
// The first block in the trace MUST be the same to the light block that the source produces
// else we cannot continue with verification.
if idx == 0 {
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
return nil, nil, fmt.Errorf("trusted block is different to the source's first block (%X = %X)",
thash, shash)
}
previouslyVerifiedBlock = sourceBlock
continue
}
// we check that the source provider can verify a block at the same height of the
// intermediate height
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, sourceBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
// check if the headers verified by the source has diverged from the trace
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
// Bifurcation point found!
return sourceTrace, traceBlock, nil
}
// headers are still the same. update the previouslyVerifiedBlock
previouslyVerifiedBlock = sourceBlock
}
// We have reached the end of the trace. This should never happen. This can only happen if one of the stated
// prerequisites to this function were not met. Namely that either trace[len(trace)-1].Height < targetBlock.Height
// or that trace[i].Hash() != targetBlock.Hash()
return nil, nil, errNoDivergence
}
// getTargetBlockOrLatest gets the latest height, if it is greater than the target height then it queries
// the target heght else it returns the latest. returns true if it successfully managed to acquire the target
// height.
func (c *Client) getTargetBlockOrLatest(
ctx context.Context,
height int64,
witness provider.Provider,
) (bool, *types.LightBlock, error) {
lightBlock, err := witness.LightBlock(ctx, 0)
if err != nil {
return false, nil, err
}
if lightBlock.Height == height {
// the witness has caught up to the height of the provider's signed header. We
// can resume with checking the hashes.
return true, lightBlock, nil
}
if lightBlock.Height > height {
// the witness has caught up. We recursively call the function again. However in order
// to avoud a wild goose chase where the witness sends us one header below and one header
// above the height we set a timeout to the context
lightBlock, err := witness.LightBlock(ctx, height)
return true, lightBlock, err
}
return false, lightBlock, nil
}
// newLightClientAttackEvidence determines the type of attack and then forms the evidence filling out
// all the fields such that it is ready to be sent to a full node.
func newLightClientAttackEvidence(conflicted, trusted, common *types.LightBlock) *types.LightClientAttackEvidence {
ev := &types.LightClientAttackEvidence{ConflictingBlock: conflicted}
// if this is an equivocation or amnesia attack, i.e. the validator sets are the same, then we
// return the height of the conflicting block else if it is a lunatic attack and the validator sets
// are not the same then we send the height of the common header.
if ev.ConflictingHeaderIsInvalid(trusted.Header) {
ev.CommonHeight = common.Height
ev.Timestamp = common.Time
ev.TotalVotingPower = common.ValidatorSet.TotalVotingPower()
} else {
ev.CommonHeight = trusted.Height
ev.Timestamp = trusted.Time
ev.TotalVotingPower = trusted.ValidatorSet.TotalVotingPower()
}
ev.ByzantineValidators = ev.GetByzantineValidators(common.ValidatorSet, trusted.SignedHeader)
return ev
}
| {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
} | conditional_block |
detector.go | package light
import (
"bytes"
"context"
"errors"
"fmt"
"time"
"github.com/tendermint/tendermint/light/provider"
"github.com/tendermint/tendermint/types"
)
// The detector component of the light client detects and handles attacks on the light client.
// More info here:
// tendermint/docs/architecture/adr-047-handling-evidence-from-light-client.md
// detectDivergence is a second wall of defense for the light client.
//
// It takes the target verified header and compares it with the headers of a set of
// witness providers that the light client is connected to. If a conflicting header
// is returned it verifies and examines the conflicting header against the verified
// trace that was produced from the primary. If successful, it produces two sets of evidence
// and sends them to the opposite provider before halting.
//
// If there are no conflictinge headers, the light client deems the verified target header
// trusted and saves it to the trusted store.
func (c *Client) detectDivergence(ctx context.Context, primaryTrace []*types.LightBlock, now time.Time) error {
if primaryTrace == nil || len(primaryTrace) < 2 {
return errors.New("nil or single block primary trace")
}
var (
headerMatched bool
lastVerifiedHeader = primaryTrace[len(primaryTrace)-1].SignedHeader
witnessesToRemove = make([]int, 0)
)
c.logger.Debug("Running detector against trace", "endBlockHeight", lastVerifiedHeader.Height,
"endBlockHash", lastVerifiedHeader.Hash, "length", len(primaryTrace))
c.providerMutex.Lock()
defer c.providerMutex.Unlock()
if len(c.witnesses) == 0 {
return ErrNoWitnesses
}
// launch one goroutine per witness to retrieve the light block of the target height
// and compare it with the header from the primary
errc := make(chan error, len(c.witnesses))
for i, witness := range c.witnesses {
go c.compareNewHeaderWithWitness(ctx, errc, lastVerifiedHeader, witness, i)
}
// handle errors from the header comparisons as they come in
for i := 0; i < cap(errc); i++ {
err := <-errc
switch e := err.(type) {
case nil: // at least one header matched
headerMatched = true
case errConflictingHeaders:
// We have conflicting headers. This could possibly imply an attack on the light client.
// First we need to verify the witness's header using the same skipping verification and then we
// need to find the point that the headers diverge and examine this for any evidence of an attack.
//
// We combine these actions together, verifying the witnesses headers and outputting the trace
// which captures the bifurcation point and if successful provides the information to create valid evidence.
err := c.handleConflictingHeaders(ctx, primaryTrace, e.Block, e.WitnessIndex, now)
if err != nil {
// return information of the attack
return err
}
// if attempt to generate conflicting headers failed then remove witness
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
case errBadWitness:
// these are all melevolent errors and should result in removing the
// witness
c.logger.Info("witness returned an error during header comparison, removing...",
"witness", c.witnesses[e.WitnessIndex], "err", err)
witnessesToRemove = append(witnessesToRemove, e.WitnessIndex)
default:
// Benign errors which can be ignored unless there was a context
// canceled
if errors.Is(e, context.Canceled) || errors.Is(e, context.DeadlineExceeded) {
return e
}
c.logger.Info("error in light block request to witness", "err", err)
}
}
// remove witnesses that have misbehaved
if err := c.removeWitnesses(witnessesToRemove); err != nil {
return err
}
// 1. If we had at least one witness that returned the same header then we
// conclude that we can trust the header
if headerMatched {
return nil
}
// 2. Else all witnesses have either not responded, don't have the block or sent invalid blocks.
return ErrFailedHeaderCrossReferencing
}
// compareNewHeaderWithWitness takes the verified header from the primary and compares it with a
// header from a specified witness. The function can return one of three errors:
//
// 1: errConflictingHeaders -> there may have been an attack on this light client
// 2: errBadWitness -> the witness has either not responded, doesn't have the header or has given us an invalid one
//
// Note: In the case of an invalid header we remove the witness
//
// 3: nil -> the hashes of the two headers match
func (c *Client) compareNewHeaderWithWitness(ctx context.Context, errc chan error, h *types.SignedHeader,
witness provider.Provider, witnessIndex int) |
// sendEvidence sends evidence to a provider on a best effort basis.
func (c *Client) sendEvidence(ctx context.Context, ev *types.LightClientAttackEvidence, receiver provider.Provider) {
err := receiver.ReportEvidence(ctx, ev)
if err != nil {
c.logger.Error("Failed to report evidence to provider", "ev", ev, "provider", receiver)
}
}
// handleConflictingHeaders handles the primary style of attack, which is where a primary and witness have
// two headers of the same height but with different hashes
func (c *Client) handleConflictingHeaders(
ctx context.Context,
primaryTrace []*types.LightBlock,
challendingBlock *types.LightBlock,
witnessIndex int,
now time.Time,
) error {
supportingWitness := c.witnesses[witnessIndex]
witnessTrace, primaryBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
primaryTrace,
challendingBlock,
supportingWitness,
now,
)
if err != nil {
c.logger.Info("error validating witness's divergent header", "witness", supportingWitness, "err", err)
return nil
}
// We are suspecting that the primary is faulty, hence we hold the witness as the source of truth
// and generate evidence against the primary that we can send to the witness
commonBlock, trustedBlock := witnessTrace[0], witnessTrace[len(witnessTrace)-1]
evidenceAgainstPrimary := newLightClientAttackEvidence(primaryBlock, trustedBlock, commonBlock)
c.logger.Error("ATTEMPTED ATTACK DETECTED. Sending evidence againt primary by witness", "ev", evidenceAgainstPrimary,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstPrimary, supportingWitness)
if primaryBlock.Commit.Round != witnessTrace[len(witnessTrace)-1].Commit.Round {
c.logger.Info("The light client has detected, and prevented, an attempted amnesia attack." +
" We think this attack is pretty unlikely, so if you see it, that's interesting to us." +
" Can you let us know by opening an issue through https://github.com/tendermint/tendermint/issues/new?")
}
// This may not be valid because the witness itself is at fault. So now we reverse it, examining the
// trace provided by the witness and holding the primary as the source of truth. Note: primary may not
// respond but this is okay as we will halt anyway.
primaryTrace, witnessBlock, err := c.examineConflictingHeaderAgainstTrace(
ctx,
witnessTrace,
primaryBlock,
c.primary,
now,
)
if err != nil {
c.logger.Info("Error validating primary's divergent header", "primary", c.primary, "err", err)
return ErrLightClientAttack
}
// We now use the primary trace to create evidence against the witness and send it to the primary
commonBlock, trustedBlock = primaryTrace[0], primaryTrace[len(primaryTrace)-1]
evidenceAgainstWitness := newLightClientAttackEvidence(witnessBlock, trustedBlock, commonBlock)
c.logger.Error("Sending evidence against witness by primary", "ev", evidenceAgainstWitness,
"primary", c.primary, "witness", supportingWitness)
c.sendEvidence(ctx, evidenceAgainstWitness, c.primary)
// We return the error and don't process anymore witnesses
return ErrLightClientAttack
}
// examineConflictingHeaderAgainstTrace takes a trace from one provider and a divergent header that
// it has received from another and preforms verifySkipping at the heights of each of the intermediate
// headers in the trace until it reaches the divergentHeader. 1 of 2 things can happen.
//
// 1. The light client verifies a header that is different to the intermediate header in the trace. This
// is the bifurcation point and the light client can create evidence from it
// 2. The source stops responding, doesn't have the block or sends an invalid header in which case we
// return the error and remove the witness
//
// CONTRACT:
// 1. Trace can not be empty len(trace) > 0
// 2. The last block in the trace can not be of a lower height than the target block
// trace[len(trace)-1].Height >= targetBlock.Height
// 3. The
func (c *Client) examineConflictingHeaderAgainstTrace(
ctx context.Context,
trace []*types.LightBlock,
targetBlock *types.LightBlock,
source provider.Provider, now time.Time,
) ([]*types.LightBlock, *types.LightBlock, error) {
var (
previouslyVerifiedBlock, sourceBlock *types.LightBlock
sourceTrace []*types.LightBlock
err error
)
if targetBlock.Height < trace[0].Height {
return nil, nil, fmt.Errorf("target block has a height lower than the trusted height (%d < %d)",
targetBlock.Height, trace[0].Height)
}
for idx, traceBlock := range trace {
// this case only happens in a forward lunatic attack. We treat the block with the
// height directly after the targetBlock as the divergent block
if traceBlock.Height > targetBlock.Height {
// sanity check that the time of the traceBlock is indeed less than that of the targetBlock. If the trace
// was correctly verified we should expect monotonically increasing time. This means that if the block at
// the end of the trace has a lesser time than the target block then all blocks in the trace should have a
// lesser time
if traceBlock.Time.After(targetBlock.Time) {
return nil, nil,
errors.New("sanity check failed: expected traceblock to have a lesser time than the target block")
}
// before sending back the divergent block and trace we need to ensure we have verified
// the final gap between the previouslyVerifiedBlock and the targetBlock
if previouslyVerifiedBlock.Height != targetBlock.Height {
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, targetBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
}
return sourceTrace, traceBlock, nil
}
// get the corresponding block from the source to verify and match up against the traceBlock
if traceBlock.Height == targetBlock.Height {
sourceBlock = targetBlock
} else {
sourceBlock, err = source.LightBlock(ctx, traceBlock.Height)
if err != nil {
return nil, nil, fmt.Errorf("failed to examine trace: %w", err)
}
}
// The first block in the trace MUST be the same to the light block that the source produces
// else we cannot continue with verification.
if idx == 0 {
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
return nil, nil, fmt.Errorf("trusted block is different to the source's first block (%X = %X)",
thash, shash)
}
previouslyVerifiedBlock = sourceBlock
continue
}
// we check that the source provider can verify a block at the same height of the
// intermediate height
sourceTrace, err = c.verifySkipping(ctx, source, previouslyVerifiedBlock, sourceBlock, now)
if err != nil {
return nil, nil, fmt.Errorf("verifySkipping of conflicting header failed: %w", err)
}
// check if the headers verified by the source has diverged from the trace
if shash, thash := sourceBlock.Hash(), traceBlock.Hash(); !bytes.Equal(shash, thash) {
// Bifurcation point found!
return sourceTrace, traceBlock, nil
}
// headers are still the same. update the previouslyVerifiedBlock
previouslyVerifiedBlock = sourceBlock
}
// We have reached the end of the trace. This should never happen. This can only happen if one of the stated
// prerequisites to this function were not met. Namely that either trace[len(trace)-1].Height < targetBlock.Height
// or that trace[i].Hash() != targetBlock.Hash()
return nil, nil, errNoDivergence
}
// getTargetBlockOrLatest gets the latest height, if it is greater than the target height then it queries
// the target heght else it returns the latest. returns true if it successfully managed to acquire the target
// height.
func (c *Client) getTargetBlockOrLatest(
ctx context.Context,
height int64,
witness provider.Provider,
) (bool, *types.LightBlock, error) {
lightBlock, err := witness.LightBlock(ctx, 0)
if err != nil {
return false, nil, err
}
if lightBlock.Height == height {
// the witness has caught up to the height of the provider's signed header. We
// can resume with checking the hashes.
return true, lightBlock, nil
}
if lightBlock.Height > height {
// the witness has caught up. We recursively call the function again. However in order
// to avoud a wild goose chase where the witness sends us one header below and one header
// above the height we set a timeout to the context
lightBlock, err := witness.LightBlock(ctx, height)
return true, lightBlock, err
}
return false, lightBlock, nil
}
// newLightClientAttackEvidence determines the type of attack and then forms the evidence filling out
// all the fields such that it is ready to be sent to a full node.
func newLightClientAttackEvidence(conflicted, trusted, common *types.LightBlock) *types.LightClientAttackEvidence {
ev := &types.LightClientAttackEvidence{ConflictingBlock: conflicted}
// if this is an equivocation or amnesia attack, i.e. the validator sets are the same, then we
// return the height of the conflicting block else if it is a lunatic attack and the validator sets
// are not the same then we send the height of the common header.
if ev.ConflictingHeaderIsInvalid(trusted.Header) {
ev.CommonHeight = common.Height
ev.Timestamp = common.Time
ev.TotalVotingPower = common.ValidatorSet.TotalVotingPower()
} else {
ev.CommonHeight = trusted.Height
ev.Timestamp = trusted.Time
ev.TotalVotingPower = trusted.ValidatorSet.TotalVotingPower()
}
ev.ByzantineValidators = ev.GetByzantineValidators(common.ValidatorSet, trusted.SignedHeader)
return ev
}
| {
lightBlock, err := witness.LightBlock(ctx, h.Height)
switch err {
// no error means we move on to checking the hash of the two headers
case nil:
break
// the witness hasn't been helpful in comparing headers, we mark the response and continue
// comparing with the rest of the witnesses
case provider.ErrNoResponse, provider.ErrLightBlockNotFound, context.DeadlineExceeded, context.Canceled:
errc <- err
return
// the witness' head of the blockchain is lower than the height of the primary. This could be one of
// two things:
// 1) The witness is lagging behind
// 2) The primary may be performing a lunatic attack with a height and time in the future
case provider.ErrHeightTooHigh:
// The light client now asks for the latest header that the witness has
var isTargetHeight bool
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
errc <- err
return
}
// if the witness caught up and has returned a block of the target height then we can
// break from this switch case and continue to verify the hashes
if isTargetHeight {
break
}
// witness' last header is below the primary's header. We check the times to see if the blocks
// have conflicting times
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// the witness is behind. We wait for a period WAITING = 2 * DRIFT + LAG.
// This should give the witness ample time if it is a participating member
// of consensus to produce a block that has a time that is after the primary's
// block time. If not the witness is too far behind and the light client removes it
time.Sleep(2*c.maxClockDrift + c.maxBlockLag)
isTargetHeight, lightBlock, err = c.getTargetBlockOrLatest(ctx, h.Height, witness)
if err != nil {
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
errc <- err
} else {
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
}
return
}
if isTargetHeight {
break
}
// the witness still doesn't have a block at the height of the primary.
// Check if there is a conflicting time
if !lightBlock.Time.Before(h.Time) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
return
}
// Following this request response procedure, the witness has been unable to produce a block
// that can somehow conflict with the primary's block. We thus conclude that the witness
// is too far behind and thus we return a no response error.
//
// NOTE: If the clock drift / lag has been miscalibrated it is feasible that the light client has
// drifted too far ahead for any witness to be able provide a comparable block and thus may allow
// for a malicious primary to attack it
errc <- provider.ErrNoResponse
return
default:
// all other errors (i.e. invalid block, closed connection or unreliable provider) we mark the
// witness as bad and remove it
errc <- errBadWitness{Reason: err, WitnessIndex: witnessIndex}
return
}
if !bytes.Equal(h.Hash(), lightBlock.Hash()) {
errc <- errConflictingHeaders{Block: lightBlock, WitnessIndex: witnessIndex}
}
c.logger.Debug("Matching header received by witness", "height", h.Height, "witness", witnessIndex)
errc <- nil
} | identifier_body |
BeanModel.ts | /* Generated from Java with JSweet 2.2.0-SNAPSHOT - http://www.jsweet.org */
import {CollectionAndSequence} from '../../core/CollectionAndSequence';
import {_DelayedFTLTypeDescription} from '../../core/_DelayedFTLTypeDescription';
import {_DelayedJQuote} from '../../core/_DelayedJQuote';
import {_TemplateModelException} from '../../core/_TemplateModelException';
import {ModelFactory} from '../util/ModelFactory';
import {WrapperTemplateModel} from '../util/WrapperTemplateModel';
import {Logger} from '../../log/Logger';
import {AdapterTemplateModel} from '../../template/AdapterTemplateModel';
import {ObjectWrapper} from '../../template/ObjectWrapper';
import {SimpleScalar} from '../../template/SimpleScalar';
import {SimpleSequence} from '../../template/SimpleSequence';
import {TemplateCollectionModel} from '../../template/TemplateCollectionModel';
import {TemplateHashModelEx} from '../../template/TemplateHashModelEx';
import {TemplateModel} from '../../template/TemplateModel';
import {TemplateModelException} from '../../template/TemplateModelException';
import {TemplateModelIterator} from '../../template/TemplateModelIterator';
import {TemplateModelWithAPISupport} from '../../template/TemplateModelWithAPISupport';
import {TemplateScalarModel} from '../../template/TemplateScalarModel';
import {StringUtil} from '../../template/utility/StringUtil';
import {BeansWrapper} from './BeansWrapper';
import {ClassIntrospector} from './ClassIntrospector';
import {InvalidPropertyException} from './InvalidPropertyException';
import {Set} from '../../../java/util/Set';
import {Map} from "../../../java/util/Map";
/**
* Creates a new model that wraps the specified object. Note that there are
* specialized subclasses of this class for wrapping arrays, collections,
* enumeration, iterators, and maps. Note also that the superclass can be
* used to wrap String objects if only scalar functionality is needed. You
* can also choose to delegate the choice over which model class is used for
* wrapping to {link BeansWrapper#wrap(Object)}.
*
* @param {Object} object the object to wrap into a model.
* @param {BeansWrapper} wrapper the {link BeansWrapper} associated with this model.
* Every model has to have an associated {link BeansWrapper} instance. The
* model gains many attributes from its wrapper, including the caching
* behavior, method exposure level, method-over-item shadowing policy etc.
* @class
*/
export class BeanModel implements TemplateHashModelEx, AdapterTemplateModel, WrapperTemplateModel, TemplateModelWithAPISupport {
static LOG : Logger; public static LOG_$LI$() : Logger { if(BeanModel.LOG == null) BeanModel.LOG = Logger.getLogger("freemarker.beans"); return BeanModel.LOG; };
object : any;
wrapper : BeansWrapper;
static UNKNOWN : TemplateModel; public static UNKNOWN_$LI$() : TemplateModel { if(BeanModel.UNKNOWN == null) BeanModel.UNKNOWN = new SimpleScalar("UNKNOWN"); return BeanModel.UNKNOWN; };
static FACTORY : ModelFactory; public static FACTORY_$LI$() : ModelFactory { if(BeanModel.FACTORY == null) BeanModel.FACTORY = new BeanModel.BeanModel$0(); return BeanModel.FACTORY; };
/*private*/ memberCache : Map<any, any>;
public constructor(object : any, wrapper : any, introspectNow? : any) {
if(introspectNow === undefined) {
introspectNow = true;
}
this.object = object;
this.wrapper = wrapper;
// if(introspectNow && object != null) {
// this.wrapper.getClassIntrospector().get(object.constructor);
// }
}
public get$java_lang_String(key : string) : TemplateModel {
if(this.object.hasOwnProperty(key)) {
const value = this.object[key];
if(typeof value === 'function') {
return new (require('./SimpleMethodModel').SimpleMethodModel)(this.object, value, null, this.wrapper)
}
return this.wrapper.wrap(value);
}
// let clazz : any = (<any>this.object.constructor);
// let classInfo : Map<any, any> = this.wrapper.getClassIntrospector().get(clazz);
// let retval : TemplateModel = null;
// try {
// if(this.wrapper.isMethodsShadowItems()) {
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// } else {
// retval = this.invokeGenericGet(classInfo, clazz, key);
// }
// } else {
// let model : TemplateModel = this.invokeGenericGet(classInfo, clazz, key);
// let nullModel : TemplateModel = this.wrapper.wrap$java_lang_Object(null);
// if(model !== nullModel && model !== BeanModel.UNKNOWN_$LI$()) {
// return model;
// }
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// if(retval === BeanModel.UNKNOWN_$LI$() && model === nullModel) {
// retval = nullModel;
// }
// }
// }
// if(retval === BeanModel.UNKNOWN_$LI$()) {
// if(this.wrapper.isStrict()) {
// throw new InvalidPropertyException("No such bean property: " + key);
// } else if(BeanModel.LOG_$LI$().isDebugEnabled()) {
// this.logNoSuchKey(key, classInfo);
// }
// retval = this.wrapper.wrap$java_lang_Object(null);
// }
// return retval;
// } catch(__e) {
// if(__e != null && __e instanceof <any>TemplateModelException) {
// let e : TemplateModelException = <TemplateModelException>__e;
// throw e;
//
// }
// if(__e != null && (__e["__classes"] && __e["__classes"].indexOf("java.lang.Exception") >= 0) || __e != null && __e instanceof <any>Error) {
// let e : Error = <Error>__e;
// throw new _TemplateModelException(e, "An error has occurred when reading existing sub-variable ", new _DelayedJQuote(key), "; see cause exception! The type of the containing value was: ", new _DelayedFTLTypeDescription(this));
//
// }
// }
}
/**
* Uses Beans introspection to locate a property or method with name
* matching the key name. If a method or property is found, it's wrapped
* into {link freemarker.template.TemplateMethodModelEx} (for a method or
* indexed property), or evaluated on-the-fly and the return value wrapped
* into appropriate model (for a non-indexed property) Models for various
* properties and methods are cached on a per-class basis, so the costly
* introspection is performed only once per property or method of a class.
* (Side-note: this also implies that any class whose method has been called
* will be strongly referred to by the framework and will not become
* unloadable until this class has been unloaded first. Normally this is not
* an issue, but can be in a rare scenario where you create many classes on-
* the-fly. Also, as the cache grows with new classes and methods introduced
* to the framework, it may appear as if it were leaking memory. The
* framework does, however detect class reloads (if you happen to be in an
* environment that does this kind of things--servlet containers do it when
* they reload a web application) and flushes the cache. If no method or
* property matching the key is found, the framework will try to invoke
* methods with signature
* <tt>non-void-return-type get(java.lang.String)</tt>,
* then <tt>non-void-return-type get(java.lang.Object)</tt>, or
* alternatively (if the wrapped object is a resource bundle)
* <tt>Object getObject(java.lang.String)</tt>.
*
* @throws TemplateModelException if there was no property nor method nor
* a generic <tt>get</tt> method to invoke.
* @param {String} key
* @return {*}
*/
public get(key? : any) : any {
if(((typeof key === 'string') || key === null)) {
return <any>this.get$java_lang_String(key);
} else throw new Error('invalid overload');
}
/*private*/ logNoSuchKey(key : string, keyMap : Map<any, any>) {
BeanModel.LOG_$LI$().debug$java_lang_String("Key " + StringUtil.jQuoteNoXSS$java_lang_Object(key) + " was not found on instance of " + /* getName */(c => c["__class"]?c["__class"]:c["name"])((<any>this.object.constructor)) + ". Introspection information for the class is: " + keyMap);
}
/**
* Whether the model has a plain get(String) or get(Object) method
* @return {boolean}
*/
hasPlainGetMethod() : boolean {
return /* get */this.wrapper.getClassIntrospector().get((<any>this.object.constructor)).get(ClassIntrospector.GENERIC_GET_KEY_$LI$()) != null;
}
/*private*/ invokeThroughDescriptor(desc : any, classInfo : any) : TemplateModel {
// let cachedModel : TemplateModel;
// {
// cachedModel = this.memberCache != null?/* get */this.memberCache.get(desc):null;
// }
// if(cachedModel != null) {
// return cachedModel;
// }
// let resultModel : TemplateModel = BeanModel.UNKNOWN_$LI$();
// if(desc != null && desc instanceof <any>FastPropertyDescriptor) {
// let pd : FastPropertyDescriptor = <FastPropertyDescriptor>desc;
// let indexedReadMethod : Function = pd.getIndexedReadMethod();
// if(indexedReadMethod != null) {
// if(!this.wrapper.getPreferIndexedReadMethod() && (pd.getReadMethod()) != null) {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// } else {
// resultModel = cachedModel = new SimpleMethodModel(this.object, indexedReadMethod, ClassIntrospector.getArgTypes(classInfo, indexedReadMethod), this.wrapper);
// }
// } else {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// }
// } else if(desc != null && desc instanceof <any>Field) {
// resultModel = this.wrapper.wrap$java_lang_Object(/* get */this.object[(<Field>desc).name]);
// } else if(desc != null && (desc instanceof Function)) {
// let method : Function = <Function>desc;
// resultModel = cachedModel = new SimpleMethodModel(this.object, method, ClassIntrospector.getArgTypes(classInfo, method), this.wrapper);
// } else if(desc != null && desc instanceof <any>OverloadedMethods) {
// resultModel = cachedModel = new OverloadedMethodsModel(this.object, <OverloadedMethods>desc, this.wrapper);
// }
// if(cachedModel != null) {
// {
// if(this.memberCache == null) {
// this.memberCache = <any>(new Map<any, any>());
// }
// /* put */this.memberCache.set(desc, cachedModel);
// }
// }
// return resultModel;
throw new Error();
}
clearMemberCache() {
{
this.memberCache = null;
}
}
invokeGenericGet(classInfo : Map<any, any>, clazz : any, key : string) : TemplateModel {
let genericGet : Function = <Function>/* get */classInfo.get(ClassIntrospector.GENERIC_GET_KEY_$LI$());
if(genericGet == null) {
return BeanModel.UNKNOWN_$LI$();
}
return this.wrapper.invokeMethod(this.object, genericGet, [key]);
}
wrap(obj : any) : TemplateModel {
return this.wrapper.getOuterIdentity()['wrap$java_lang_Object'](obj);
}
unwrap(model : TemplateModel) : any {
return this.wrapper.unwrap$freemarker_template_TemplateModel(model);
}
/**
* Tells whether the model is empty. It is empty if either the wrapped
* object is null, or it's a Boolean with false value.
* @return {boolean}
*/
public isEmpty() : boolean {
if(typeof this.object === 'string') {
return (<string>this.object).length === 0; | }
if(this.object != null && (this.object instanceof Array)) {
return /* isEmpty */((<any>this.object).length == 0);
}
// if((this.object != null && (this.object instanceof Object)) && this.wrapper.is2324Bugfixed()) {
// return !(<Iterator><any>this.object).hasNext();
// }
if(this.object != null && (this.object instanceof Map)) {
return /* isEmpty */((m) => { if(m.entries==null) m.entries=[]; return m.entries.length == 0; })(<any>(<Map<any, any>><any>this.object));
}
return this.object == null || this.object === false;
}
/**
* Returns the same as {link #getWrappedObject()}; to ensure that, this method will be final starting from 2.4.
* This behavior of {link BeanModel} is assumed by some FreeMarker code.
* @param {*} hint
* @return {Object}
*/
public getAdaptedObject(hint : any) : any {
return this.object;
}
public getWrappedObject() : any {
return this.object;
}
public size() : number {
return this.wrapper.getClassIntrospector().keyCount((<any>this.object.constructor));
}
public keys() : TemplateCollectionModel {
return new CollectionAndSequence(new SimpleSequence(this.keySet(), this.wrapper));
}
public values() : TemplateCollectionModel {
let values : Array<any> = <any>([]);
let it : TemplateModelIterator = this.keys().iterator();
while((it.hasNext())) {
let key : string = (<TemplateScalarModel><any>it.next()).getAsString();
/* add */values.push(this.get$java_lang_String(key));
}
return new CollectionAndSequence(new SimpleSequence(values, this.wrapper));
}
/**
* Used for {@code classic_compatbile} mode; don't use it for anything else.
* In FreeMarker 1.7 (and also at least in 2.1) {link BeanModel} was a {link TemplateScalarModel}. Some internal
* FreeMarker code tries to emulate FreeMarker classic by calling this method when a {link TemplateScalarModel} is
* expected.
*
* @return {String} Never {@code null}
*/
getAsClassicCompatibleString() : string {
if(this.object == null) {
return "null";
}
let s : string = this.object.toString();
return s != null?s:"null";
}
/**
*
* @return {String}
*/
public toString() : string {
return this.object.toString();
}
/**
* Helper method to support TemplateHashModelEx. Returns the Set of
* Strings which are available via the TemplateHashModel
* interface. Subclasses that override <tt>invokeGenericGet</tt> to
* provide additional hash keys should also override this method.
* @return {Set}
*/
keySet() : Set<any> {
return this.wrapper.getClassIntrospector().keySet((<any>this.object.constructor));
}
public getAPI() : TemplateModel {
return this.wrapper.wrapAsAPI(this.object);
}
}
BeanModel["__class"] = "freemarker.ext.beans.BeanModel";
BeanModel["__interfaces"] = ["freemarker.ext.util.WrapperTemplateModel","freemarker.template.TemplateModelWithAPISupport","freemarker.template.TemplateHashModel","freemarker.template.TemplateHashModelEx","freemarker.template.TemplateModel","freemarker.template.AdapterTemplateModel"];
export namespace BeanModel {
export class BeanModel$0 implements ModelFactory {
public create(object : any, wrapper : ObjectWrapper) : TemplateModel {
return new BeanModel(object, <BeansWrapper><any>wrapper);
}
constructor() {
}
}
BeanModel$0["__interfaces"] = ["freemarker.ext.util.ModelFactory"];
}
BeanModel.FACTORY_$LI$();
BeanModel.UNKNOWN_$LI$();
BeanModel.LOG_$LI$(); | random_line_split |
|
BeanModel.ts | /* Generated from Java with JSweet 2.2.0-SNAPSHOT - http://www.jsweet.org */
import {CollectionAndSequence} from '../../core/CollectionAndSequence';
import {_DelayedFTLTypeDescription} from '../../core/_DelayedFTLTypeDescription';
import {_DelayedJQuote} from '../../core/_DelayedJQuote';
import {_TemplateModelException} from '../../core/_TemplateModelException';
import {ModelFactory} from '../util/ModelFactory';
import {WrapperTemplateModel} from '../util/WrapperTemplateModel';
import {Logger} from '../../log/Logger';
import {AdapterTemplateModel} from '../../template/AdapterTemplateModel';
import {ObjectWrapper} from '../../template/ObjectWrapper';
import {SimpleScalar} from '../../template/SimpleScalar';
import {SimpleSequence} from '../../template/SimpleSequence';
import {TemplateCollectionModel} from '../../template/TemplateCollectionModel';
import {TemplateHashModelEx} from '../../template/TemplateHashModelEx';
import {TemplateModel} from '../../template/TemplateModel';
import {TemplateModelException} from '../../template/TemplateModelException';
import {TemplateModelIterator} from '../../template/TemplateModelIterator';
import {TemplateModelWithAPISupport} from '../../template/TemplateModelWithAPISupport';
import {TemplateScalarModel} from '../../template/TemplateScalarModel';
import {StringUtil} from '../../template/utility/StringUtil';
import {BeansWrapper} from './BeansWrapper';
import {ClassIntrospector} from './ClassIntrospector';
import {InvalidPropertyException} from './InvalidPropertyException';
import {Set} from '../../../java/util/Set';
import {Map} from "../../../java/util/Map";
/**
* Creates a new model that wraps the specified object. Note that there are
* specialized subclasses of this class for wrapping arrays, collections,
* enumeration, iterators, and maps. Note also that the superclass can be
* used to wrap String objects if only scalar functionality is needed. You
* can also choose to delegate the choice over which model class is used for
* wrapping to {link BeansWrapper#wrap(Object)}.
*
* @param {Object} object the object to wrap into a model.
* @param {BeansWrapper} wrapper the {link BeansWrapper} associated with this model.
* Every model has to have an associated {link BeansWrapper} instance. The
* model gains many attributes from its wrapper, including the caching
* behavior, method exposure level, method-over-item shadowing policy etc.
* @class
*/
export class BeanModel implements TemplateHashModelEx, AdapterTemplateModel, WrapperTemplateModel, TemplateModelWithAPISupport {
static LOG : Logger; public static LOG_$LI$() : Logger { if(BeanModel.LOG == null) BeanModel.LOG = Logger.getLogger("freemarker.beans"); return BeanModel.LOG; };
object : any;
wrapper : BeansWrapper;
static UNKNOWN : TemplateModel; public static UNKNOWN_$LI$() : TemplateModel { if(BeanModel.UNKNOWN == null) BeanModel.UNKNOWN = new SimpleScalar("UNKNOWN"); return BeanModel.UNKNOWN; };
static FACTORY : ModelFactory; public static FACTORY_$LI$() : ModelFactory { if(BeanModel.FACTORY == null) BeanModel.FACTORY = new BeanModel.BeanModel$0(); return BeanModel.FACTORY; };
/*private*/ memberCache : Map<any, any>;
public constructor(object : any, wrapper : any, introspectNow? : any) {
if(introspectNow === undefined) {
introspectNow = true;
}
this.object = object;
this.wrapper = wrapper;
// if(introspectNow && object != null) {
// this.wrapper.getClassIntrospector().get(object.constructor);
// }
}
public get$java_lang_String(key : string) : TemplateModel {
if(this.object.hasOwnProperty(key)) {
const value = this.object[key];
if(typeof value === 'function') {
return new (require('./SimpleMethodModel').SimpleMethodModel)(this.object, value, null, this.wrapper)
}
return this.wrapper.wrap(value);
}
// let clazz : any = (<any>this.object.constructor);
// let classInfo : Map<any, any> = this.wrapper.getClassIntrospector().get(clazz);
// let retval : TemplateModel = null;
// try {
// if(this.wrapper.isMethodsShadowItems()) {
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// } else {
// retval = this.invokeGenericGet(classInfo, clazz, key);
// }
// } else {
// let model : TemplateModel = this.invokeGenericGet(classInfo, clazz, key);
// let nullModel : TemplateModel = this.wrapper.wrap$java_lang_Object(null);
// if(model !== nullModel && model !== BeanModel.UNKNOWN_$LI$()) {
// return model;
// }
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// if(retval === BeanModel.UNKNOWN_$LI$() && model === nullModel) {
// retval = nullModel;
// }
// }
// }
// if(retval === BeanModel.UNKNOWN_$LI$()) {
// if(this.wrapper.isStrict()) {
// throw new InvalidPropertyException("No such bean property: " + key);
// } else if(BeanModel.LOG_$LI$().isDebugEnabled()) {
// this.logNoSuchKey(key, classInfo);
// }
// retval = this.wrapper.wrap$java_lang_Object(null);
// }
// return retval;
// } catch(__e) {
// if(__e != null && __e instanceof <any>TemplateModelException) {
// let e : TemplateModelException = <TemplateModelException>__e;
// throw e;
//
// }
// if(__e != null && (__e["__classes"] && __e["__classes"].indexOf("java.lang.Exception") >= 0) || __e != null && __e instanceof <any>Error) {
// let e : Error = <Error>__e;
// throw new _TemplateModelException(e, "An error has occurred when reading existing sub-variable ", new _DelayedJQuote(key), "; see cause exception! The type of the containing value was: ", new _DelayedFTLTypeDescription(this));
//
// }
// }
}
/**
* Uses Beans introspection to locate a property or method with name
* matching the key name. If a method or property is found, it's wrapped
* into {link freemarker.template.TemplateMethodModelEx} (for a method or
* indexed property), or evaluated on-the-fly and the return value wrapped
* into appropriate model (for a non-indexed property) Models for various
* properties and methods are cached on a per-class basis, so the costly
* introspection is performed only once per property or method of a class.
* (Side-note: this also implies that any class whose method has been called
* will be strongly referred to by the framework and will not become
* unloadable until this class has been unloaded first. Normally this is not
* an issue, but can be in a rare scenario where you create many classes on-
* the-fly. Also, as the cache grows with new classes and methods introduced
* to the framework, it may appear as if it were leaking memory. The
* framework does, however detect class reloads (if you happen to be in an
* environment that does this kind of things--servlet containers do it when
* they reload a web application) and flushes the cache. If no method or
* property matching the key is found, the framework will try to invoke
* methods with signature
* <tt>non-void-return-type get(java.lang.String)</tt>,
* then <tt>non-void-return-type get(java.lang.Object)</tt>, or
* alternatively (if the wrapped object is a resource bundle)
* <tt>Object getObject(java.lang.String)</tt>.
*
* @throws TemplateModelException if there was no property nor method nor
* a generic <tt>get</tt> method to invoke.
* @param {String} key
* @return {*}
*/
public get(key? : any) : any {
if(((typeof key === 'string') || key === null)) {
return <any>this.get$java_lang_String(key);
} else throw new Error('invalid overload');
}
/*private*/ logNoSuchKey(key : string, keyMap : Map<any, any>) {
BeanModel.LOG_$LI$().debug$java_lang_String("Key " + StringUtil.jQuoteNoXSS$java_lang_Object(key) + " was not found on instance of " + /* getName */(c => c["__class"]?c["__class"]:c["name"])((<any>this.object.constructor)) + ". Introspection information for the class is: " + keyMap);
}
/**
* Whether the model has a plain get(String) or get(Object) method
* @return {boolean}
*/
hasPlainGetMethod() : boolean {
return /* get */this.wrapper.getClassIntrospector().get((<any>this.object.constructor)).get(ClassIntrospector.GENERIC_GET_KEY_$LI$()) != null;
}
/*private*/ invokeThroughDescriptor(desc : any, classInfo : any) : TemplateModel {
// let cachedModel : TemplateModel;
// {
// cachedModel = this.memberCache != null?/* get */this.memberCache.get(desc):null;
// }
// if(cachedModel != null) {
// return cachedModel;
// }
// let resultModel : TemplateModel = BeanModel.UNKNOWN_$LI$();
// if(desc != null && desc instanceof <any>FastPropertyDescriptor) {
// let pd : FastPropertyDescriptor = <FastPropertyDescriptor>desc;
// let indexedReadMethod : Function = pd.getIndexedReadMethod();
// if(indexedReadMethod != null) {
// if(!this.wrapper.getPreferIndexedReadMethod() && (pd.getReadMethod()) != null) {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// } else {
// resultModel = cachedModel = new SimpleMethodModel(this.object, indexedReadMethod, ClassIntrospector.getArgTypes(classInfo, indexedReadMethod), this.wrapper);
// }
// } else {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// }
// } else if(desc != null && desc instanceof <any>Field) {
// resultModel = this.wrapper.wrap$java_lang_Object(/* get */this.object[(<Field>desc).name]);
// } else if(desc != null && (desc instanceof Function)) {
// let method : Function = <Function>desc;
// resultModel = cachedModel = new SimpleMethodModel(this.object, method, ClassIntrospector.getArgTypes(classInfo, method), this.wrapper);
// } else if(desc != null && desc instanceof <any>OverloadedMethods) {
// resultModel = cachedModel = new OverloadedMethodsModel(this.object, <OverloadedMethods>desc, this.wrapper);
// }
// if(cachedModel != null) {
// {
// if(this.memberCache == null) {
// this.memberCache = <any>(new Map<any, any>());
// }
// /* put */this.memberCache.set(desc, cachedModel);
// }
// }
// return resultModel;
throw new Error();
}
clearMemberCache() {
{
this.memberCache = null;
}
}
invokeGenericGet(classInfo : Map<any, any>, clazz : any, key : string) : TemplateModel {
let genericGet : Function = <Function>/* get */classInfo.get(ClassIntrospector.GENERIC_GET_KEY_$LI$());
if(genericGet == null) {
return BeanModel.UNKNOWN_$LI$();
}
return this.wrapper.invokeMethod(this.object, genericGet, [key]);
}
wrap(obj : any) : TemplateModel {
return this.wrapper.getOuterIdentity()['wrap$java_lang_Object'](obj);
}
unwrap(model : TemplateModel) : any {
return this.wrapper.unwrap$freemarker_template_TemplateModel(model);
}
/**
* Tells whether the model is empty. It is empty if either the wrapped
* object is null, or it's a Boolean with false value.
* @return {boolean}
*/
public isEmpty() : boolean {
if(typeof this.object === 'string') {
return (<string>this.object).length === 0;
}
if(this.object != null && (this.object instanceof Array)) {
return /* isEmpty */((<any>this.object).length == 0);
}
// if((this.object != null && (this.object instanceof Object)) && this.wrapper.is2324Bugfixed()) {
// return !(<Iterator><any>this.object).hasNext();
// }
if(this.object != null && (this.object instanceof Map)) {
return /* isEmpty */((m) => { if(m.entries==null) m.entries=[]; return m.entries.length == 0; })(<any>(<Map<any, any>><any>this.object));
}
return this.object == null || this.object === false;
}
/**
* Returns the same as {link #getWrappedObject()}; to ensure that, this method will be final starting from 2.4.
* This behavior of {link BeanModel} is assumed by some FreeMarker code.
* @param {*} hint
* @return {Object}
*/
public getAdaptedObject(hint : any) : any {
return this.object;
}
public getWrappedObject() : any {
return this.object;
}
public size() : number {
return this.wrapper.getClassIntrospector().keyCount((<any>this.object.constructor));
}
public | () : TemplateCollectionModel {
return new CollectionAndSequence(new SimpleSequence(this.keySet(), this.wrapper));
}
public values() : TemplateCollectionModel {
let values : Array<any> = <any>([]);
let it : TemplateModelIterator = this.keys().iterator();
while((it.hasNext())) {
let key : string = (<TemplateScalarModel><any>it.next()).getAsString();
/* add */values.push(this.get$java_lang_String(key));
}
return new CollectionAndSequence(new SimpleSequence(values, this.wrapper));
}
/**
* Used for {@code classic_compatbile} mode; don't use it for anything else.
* In FreeMarker 1.7 (and also at least in 2.1) {link BeanModel} was a {link TemplateScalarModel}. Some internal
* FreeMarker code tries to emulate FreeMarker classic by calling this method when a {link TemplateScalarModel} is
* expected.
*
* @return {String} Never {@code null}
*/
getAsClassicCompatibleString() : string {
if(this.object == null) {
return "null";
}
let s : string = this.object.toString();
return s != null?s:"null";
}
/**
*
* @return {String}
*/
public toString() : string {
return this.object.toString();
}
/**
* Helper method to support TemplateHashModelEx. Returns the Set of
* Strings which are available via the TemplateHashModel
* interface. Subclasses that override <tt>invokeGenericGet</tt> to
* provide additional hash keys should also override this method.
* @return {Set}
*/
keySet() : Set<any> {
return this.wrapper.getClassIntrospector().keySet((<any>this.object.constructor));
}
public getAPI() : TemplateModel {
return this.wrapper.wrapAsAPI(this.object);
}
}
BeanModel["__class"] = "freemarker.ext.beans.BeanModel";
BeanModel["__interfaces"] = ["freemarker.ext.util.WrapperTemplateModel","freemarker.template.TemplateModelWithAPISupport","freemarker.template.TemplateHashModel","freemarker.template.TemplateHashModelEx","freemarker.template.TemplateModel","freemarker.template.AdapterTemplateModel"];
export namespace BeanModel {
export class BeanModel$0 implements ModelFactory {
public create(object : any, wrapper : ObjectWrapper) : TemplateModel {
return new BeanModel(object, <BeansWrapper><any>wrapper);
}
constructor() {
}
}
BeanModel$0["__interfaces"] = ["freemarker.ext.util.ModelFactory"];
}
BeanModel.FACTORY_$LI$();
BeanModel.UNKNOWN_$LI$();
BeanModel.LOG_$LI$();
| keys | identifier_name |
BeanModel.ts | /* Generated from Java with JSweet 2.2.0-SNAPSHOT - http://www.jsweet.org */
import {CollectionAndSequence} from '../../core/CollectionAndSequence';
import {_DelayedFTLTypeDescription} from '../../core/_DelayedFTLTypeDescription';
import {_DelayedJQuote} from '../../core/_DelayedJQuote';
import {_TemplateModelException} from '../../core/_TemplateModelException';
import {ModelFactory} from '../util/ModelFactory';
import {WrapperTemplateModel} from '../util/WrapperTemplateModel';
import {Logger} from '../../log/Logger';
import {AdapterTemplateModel} from '../../template/AdapterTemplateModel';
import {ObjectWrapper} from '../../template/ObjectWrapper';
import {SimpleScalar} from '../../template/SimpleScalar';
import {SimpleSequence} from '../../template/SimpleSequence';
import {TemplateCollectionModel} from '../../template/TemplateCollectionModel';
import {TemplateHashModelEx} from '../../template/TemplateHashModelEx';
import {TemplateModel} from '../../template/TemplateModel';
import {TemplateModelException} from '../../template/TemplateModelException';
import {TemplateModelIterator} from '../../template/TemplateModelIterator';
import {TemplateModelWithAPISupport} from '../../template/TemplateModelWithAPISupport';
import {TemplateScalarModel} from '../../template/TemplateScalarModel';
import {StringUtil} from '../../template/utility/StringUtil';
import {BeansWrapper} from './BeansWrapper';
import {ClassIntrospector} from './ClassIntrospector';
import {InvalidPropertyException} from './InvalidPropertyException';
import {Set} from '../../../java/util/Set';
import {Map} from "../../../java/util/Map";
/**
* Creates a new model that wraps the specified object. Note that there are
* specialized subclasses of this class for wrapping arrays, collections,
* enumeration, iterators, and maps. Note also that the superclass can be
* used to wrap String objects if only scalar functionality is needed. You
* can also choose to delegate the choice over which model class is used for
* wrapping to {link BeansWrapper#wrap(Object)}.
*
* @param {Object} object the object to wrap into a model.
* @param {BeansWrapper} wrapper the {link BeansWrapper} associated with this model.
* Every model has to have an associated {link BeansWrapper} instance. The
* model gains many attributes from its wrapper, including the caching
* behavior, method exposure level, method-over-item shadowing policy etc.
* @class
*/
export class BeanModel implements TemplateHashModelEx, AdapterTemplateModel, WrapperTemplateModel, TemplateModelWithAPISupport {
static LOG : Logger; public static LOG_$LI$() : Logger { if(BeanModel.LOG == null) BeanModel.LOG = Logger.getLogger("freemarker.beans"); return BeanModel.LOG; };
object : any;
wrapper : BeansWrapper;
static UNKNOWN : TemplateModel; public static UNKNOWN_$LI$() : TemplateModel { if(BeanModel.UNKNOWN == null) BeanModel.UNKNOWN = new SimpleScalar("UNKNOWN"); return BeanModel.UNKNOWN; };
static FACTORY : ModelFactory; public static FACTORY_$LI$() : ModelFactory { if(BeanModel.FACTORY == null) BeanModel.FACTORY = new BeanModel.BeanModel$0(); return BeanModel.FACTORY; };
/*private*/ memberCache : Map<any, any>;
public constructor(object : any, wrapper : any, introspectNow? : any) {
if(introspectNow === undefined) {
introspectNow = true;
}
this.object = object;
this.wrapper = wrapper;
// if(introspectNow && object != null) {
// this.wrapper.getClassIntrospector().get(object.constructor);
// }
}
public get$java_lang_String(key : string) : TemplateModel {
if(this.object.hasOwnProperty(key)) {
const value = this.object[key];
if(typeof value === 'function') {
return new (require('./SimpleMethodModel').SimpleMethodModel)(this.object, value, null, this.wrapper)
}
return this.wrapper.wrap(value);
}
// let clazz : any = (<any>this.object.constructor);
// let classInfo : Map<any, any> = this.wrapper.getClassIntrospector().get(clazz);
// let retval : TemplateModel = null;
// try {
// if(this.wrapper.isMethodsShadowItems()) {
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// } else {
// retval = this.invokeGenericGet(classInfo, clazz, key);
// }
// } else {
// let model : TemplateModel = this.invokeGenericGet(classInfo, clazz, key);
// let nullModel : TemplateModel = this.wrapper.wrap$java_lang_Object(null);
// if(model !== nullModel && model !== BeanModel.UNKNOWN_$LI$()) {
// return model;
// }
// let fd : any = /* get */classInfo.get(key);
// if(fd != null) {
// retval = this.invokeThroughDescriptor(fd, classInfo);
// if(retval === BeanModel.UNKNOWN_$LI$() && model === nullModel) {
// retval = nullModel;
// }
// }
// }
// if(retval === BeanModel.UNKNOWN_$LI$()) {
// if(this.wrapper.isStrict()) {
// throw new InvalidPropertyException("No such bean property: " + key);
// } else if(BeanModel.LOG_$LI$().isDebugEnabled()) {
// this.logNoSuchKey(key, classInfo);
// }
// retval = this.wrapper.wrap$java_lang_Object(null);
// }
// return retval;
// } catch(__e) {
// if(__e != null && __e instanceof <any>TemplateModelException) {
// let e : TemplateModelException = <TemplateModelException>__e;
// throw e;
//
// }
// if(__e != null && (__e["__classes"] && __e["__classes"].indexOf("java.lang.Exception") >= 0) || __e != null && __e instanceof <any>Error) {
// let e : Error = <Error>__e;
// throw new _TemplateModelException(e, "An error has occurred when reading existing sub-variable ", new _DelayedJQuote(key), "; see cause exception! The type of the containing value was: ", new _DelayedFTLTypeDescription(this));
//
// }
// }
}
/**
* Uses Beans introspection to locate a property or method with name
* matching the key name. If a method or property is found, it's wrapped
* into {link freemarker.template.TemplateMethodModelEx} (for a method or
* indexed property), or evaluated on-the-fly and the return value wrapped
* into appropriate model (for a non-indexed property) Models for various
* properties and methods are cached on a per-class basis, so the costly
* introspection is performed only once per property or method of a class.
* (Side-note: this also implies that any class whose method has been called
* will be strongly referred to by the framework and will not become
* unloadable until this class has been unloaded first. Normally this is not
* an issue, but can be in a rare scenario where you create many classes on-
* the-fly. Also, as the cache grows with new classes and methods introduced
* to the framework, it may appear as if it were leaking memory. The
* framework does, however detect class reloads (if you happen to be in an
* environment that does this kind of things--servlet containers do it when
* they reload a web application) and flushes the cache. If no method or
* property matching the key is found, the framework will try to invoke
* methods with signature
* <tt>non-void-return-type get(java.lang.String)</tt>,
* then <tt>non-void-return-type get(java.lang.Object)</tt>, or
* alternatively (if the wrapped object is a resource bundle)
* <tt>Object getObject(java.lang.String)</tt>.
*
* @throws TemplateModelException if there was no property nor method nor
* a generic <tt>get</tt> method to invoke.
* @param {String} key
* @return {*}
*/
public get(key? : any) : any {
if(((typeof key === 'string') || key === null)) {
return <any>this.get$java_lang_String(key);
} else throw new Error('invalid overload');
}
/*private*/ logNoSuchKey(key : string, keyMap : Map<any, any>) {
BeanModel.LOG_$LI$().debug$java_lang_String("Key " + StringUtil.jQuoteNoXSS$java_lang_Object(key) + " was not found on instance of " + /* getName */(c => c["__class"]?c["__class"]:c["name"])((<any>this.object.constructor)) + ". Introspection information for the class is: " + keyMap);
}
/**
* Whether the model has a plain get(String) or get(Object) method
* @return {boolean}
*/
hasPlainGetMethod() : boolean {
return /* get */this.wrapper.getClassIntrospector().get((<any>this.object.constructor)).get(ClassIntrospector.GENERIC_GET_KEY_$LI$()) != null;
}
/*private*/ invokeThroughDescriptor(desc : any, classInfo : any) : TemplateModel {
// let cachedModel : TemplateModel;
// {
// cachedModel = this.memberCache != null?/* get */this.memberCache.get(desc):null;
// }
// if(cachedModel != null) {
// return cachedModel;
// }
// let resultModel : TemplateModel = BeanModel.UNKNOWN_$LI$();
// if(desc != null && desc instanceof <any>FastPropertyDescriptor) {
// let pd : FastPropertyDescriptor = <FastPropertyDescriptor>desc;
// let indexedReadMethod : Function = pd.getIndexedReadMethod();
// if(indexedReadMethod != null) {
// if(!this.wrapper.getPreferIndexedReadMethod() && (pd.getReadMethod()) != null) {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// } else {
// resultModel = cachedModel = new SimpleMethodModel(this.object, indexedReadMethod, ClassIntrospector.getArgTypes(classInfo, indexedReadMethod), this.wrapper);
// }
// } else {
// resultModel = this.wrapper.invokeMethod(this.object, pd.getReadMethod(), null);
// }
// } else if(desc != null && desc instanceof <any>Field) {
// resultModel = this.wrapper.wrap$java_lang_Object(/* get */this.object[(<Field>desc).name]);
// } else if(desc != null && (desc instanceof Function)) {
// let method : Function = <Function>desc;
// resultModel = cachedModel = new SimpleMethodModel(this.object, method, ClassIntrospector.getArgTypes(classInfo, method), this.wrapper);
// } else if(desc != null && desc instanceof <any>OverloadedMethods) {
// resultModel = cachedModel = new OverloadedMethodsModel(this.object, <OverloadedMethods>desc, this.wrapper);
// }
// if(cachedModel != null) {
// {
// if(this.memberCache == null) {
// this.memberCache = <any>(new Map<any, any>());
// }
// /* put */this.memberCache.set(desc, cachedModel);
// }
// }
// return resultModel;
throw new Error();
}
clearMemberCache() {
{
this.memberCache = null;
}
}
invokeGenericGet(classInfo : Map<any, any>, clazz : any, key : string) : TemplateModel {
let genericGet : Function = <Function>/* get */classInfo.get(ClassIntrospector.GENERIC_GET_KEY_$LI$());
if(genericGet == null) {
return BeanModel.UNKNOWN_$LI$();
}
return this.wrapper.invokeMethod(this.object, genericGet, [key]);
}
wrap(obj : any) : TemplateModel {
return this.wrapper.getOuterIdentity()['wrap$java_lang_Object'](obj);
}
unwrap(model : TemplateModel) : any {
return this.wrapper.unwrap$freemarker_template_TemplateModel(model);
}
/**
* Tells whether the model is empty. It is empty if either the wrapped
* object is null, or it's a Boolean with false value.
* @return {boolean}
*/
public isEmpty() : boolean |
/**
* Returns the same as {link #getWrappedObject()}; to ensure that, this method will be final starting from 2.4.
* This behavior of {link BeanModel} is assumed by some FreeMarker code.
* @param {*} hint
* @return {Object}
*/
public getAdaptedObject(hint : any) : any {
return this.object;
}
public getWrappedObject() : any {
return this.object;
}
public size() : number {
return this.wrapper.getClassIntrospector().keyCount((<any>this.object.constructor));
}
public keys() : TemplateCollectionModel {
return new CollectionAndSequence(new SimpleSequence(this.keySet(), this.wrapper));
}
public values() : TemplateCollectionModel {
let values : Array<any> = <any>([]);
let it : TemplateModelIterator = this.keys().iterator();
while((it.hasNext())) {
let key : string = (<TemplateScalarModel><any>it.next()).getAsString();
/* add */values.push(this.get$java_lang_String(key));
}
return new CollectionAndSequence(new SimpleSequence(values, this.wrapper));
}
/**
* Used for {@code classic_compatbile} mode; don't use it for anything else.
* In FreeMarker 1.7 (and also at least in 2.1) {link BeanModel} was a {link TemplateScalarModel}. Some internal
* FreeMarker code tries to emulate FreeMarker classic by calling this method when a {link TemplateScalarModel} is
* expected.
*
* @return {String} Never {@code null}
*/
getAsClassicCompatibleString() : string {
if(this.object == null) {
return "null";
}
let s : string = this.object.toString();
return s != null?s:"null";
}
/**
*
* @return {String}
*/
public toString() : string {
return this.object.toString();
}
/**
* Helper method to support TemplateHashModelEx. Returns the Set of
* Strings which are available via the TemplateHashModel
* interface. Subclasses that override <tt>invokeGenericGet</tt> to
* provide additional hash keys should also override this method.
* @return {Set}
*/
keySet() : Set<any> {
return this.wrapper.getClassIntrospector().keySet((<any>this.object.constructor));
}
public getAPI() : TemplateModel {
return this.wrapper.wrapAsAPI(this.object);
}
}
BeanModel["__class"] = "freemarker.ext.beans.BeanModel";
BeanModel["__interfaces"] = ["freemarker.ext.util.WrapperTemplateModel","freemarker.template.TemplateModelWithAPISupport","freemarker.template.TemplateHashModel","freemarker.template.TemplateHashModelEx","freemarker.template.TemplateModel","freemarker.template.AdapterTemplateModel"];
export namespace BeanModel {
export class BeanModel$0 implements ModelFactory {
public create(object : any, wrapper : ObjectWrapper) : TemplateModel {
return new BeanModel(object, <BeansWrapper><any>wrapper);
}
constructor() {
}
}
BeanModel$0["__interfaces"] = ["freemarker.ext.util.ModelFactory"];
}
BeanModel.FACTORY_$LI$();
BeanModel.UNKNOWN_$LI$();
BeanModel.LOG_$LI$();
| {
if(typeof this.object === 'string') {
return (<string>this.object).length === 0;
}
if(this.object != null && (this.object instanceof Array)) {
return /* isEmpty */((<any>this.object).length == 0);
}
// if((this.object != null && (this.object instanceof Object)) && this.wrapper.is2324Bugfixed()) {
// return !(<Iterator><any>this.object).hasNext();
// }
if(this.object != null && (this.object instanceof Map)) {
return /* isEmpty */((m) => { if(m.entries==null) m.entries=[]; return m.entries.length == 0; })(<any>(<Map<any, any>><any>this.object));
}
return this.object == null || this.object === false;
} | identifier_body |
users.js | // needs dom.js, selector.js and ajax.js (all included in main)
var groupSelector = null;
// Using window.onload because an onclick="..." handler doesn't give the
// handler a this-variable
var old_load = window.onload;
window.onload = function() {
if (old_load) old_load();
$('password_button').parentNode.onsubmit = verifyPassword;
$('email').focus();
$('is_admin').onclick = setIsAdmin;
var ssh_key_add_link = $('ssh_key_add_link');
if (ssh_key_add_link) {
ssh_key_add_link.onclick = toggleSSHKeyAddForm;
}
var content = document.getElementById('content');
setupCollapsables(content, "usershowhide", users_collapse, users_expand);
// Initialize the select-dropdowns
groupSelectorInit();
reloadNotifications();
if (ssh_key_add_link) {
reloadSSHKeys();
}
$('savenotifications').parentNode.onsubmit = saveNotifications;
}
function users_collapse(me) {
}
function users_expand(me) {
}
function sendEmail() {
var eemail = escape_plus($('email').value);
AjaxAsyncPostRequest(document.location, "email=" + eemail, sendEmailCB);
}
function sendEmailCB(response) {
// TODO: Before email is set, notifications are disabled
LogResponse(response);
reloadNotifications();
}
function sendSendPasswordMail() {
AjaxAsyncPostLog(document.location, "sendPasswordMail=1");
}
function sendFullName() {
AjaxAsyncPostLog(document.location, "fullname=" + escape_plus($('fullname').value));
}
// global variable to temporarily store the password to be verified
var enteredPassword;
function verifyPassword() {
var input = $('password');
enteredPassword = input.value;
if (!enteredPassword) {
Log('Please enter a password before pressing "save password"', 'error');
return false;
}
// Some cleaning up.
input.value = '';
this.firstChild.innerHTML = 'Verify';
// change te onclick handler
input.focus();
addClassName(input, 'hint_bg');
$('password_button').parentNode.onsubmit = checkPasswords;
Log('Please verify your password', 'hint');
return false;
}
function setIsAdmin() {
// this function is called when the checkbox is already changed.
// the checkbox reflects the desired (new) value. Don't negate!
var newvalue = $('is_admin').checked;
AjaxAsyncPostRequest(document.location, "setIsAdmin=" + newvalue, setIsAdminCB);
}
function setIsAdminCB(response) {
LogResponse(response);
var setIsAdmin = FindResponse(response, 'setIsAdmin');
if (!setIsAdmin.success) {
$('is_admin').checked = ! $('is_admin').checked;
}
}
function checkPasswords() {
// Again some cleaning up.
this.firstChild.innerHTML = 'Password';
// Make sure the change-button isn't highlighted anymore
this.blur();
// change the onclick handler back to the verifyPassword function
$('password_button').parentNode.onsubmit = verifyPassword;
var input = $('password');
// Do the check after cleaning up.
if (input.value != enteredPassword) {
Log('Verification of password failed', 'error');
enteredPassword = '';
input.value = '';
return false;
}
removeClassName(input, 'hint_bg');
input.value = '';
// Send the entered password to the server.
sendPassword(enteredPassword);
enteredPassword = '';
change_password_hint = $('change_password_hint')
if (change_password_hint) {
change_password_hint.style.display = 'none';
}
return false;
}
function sendPassword(password) {
AjaxAsyncPostLog(document.location, "password=" + escape_plus(password));
}
/*
* Adding users to groups and removing them from groups
*/
function addMemberToGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "addToGroup=" + egroup, groupRefreshAndLog);
}
function removeMemberFromGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "removeFromGroup=" + egroup, groupRefreshAndLog);
}
/* Requests the groups via ajax, and forms two lists to be used by Selector */
function initGroups() {
var added = [];
var addable = [];
var response = AjaxSyncPostRequest(document.location, 'listUserGroups');
// log if something went wrong
LogResponse(response);
var usergroups = FindResponse(response, 'listUserGroups');
if (!usergroups)
return {"added": [], "addable": []};
var groups = usergroups.xml.getElementsByTagName("group");
for (var group_idx=0; group_idx < groups.length; ++group_idx) |
return {"added": added, "addable": addable};
}
function groupSelectorInit() {
groupSelector = new Selector({
"selectorId": "memberof",
"urlPrefix": base_url + "groups/show/",
"initCallback": initGroups,
"addCallback": addMemberToGroupAjax,
"removeCallback": removeMemberFromGroupAjax,
"canLink": function(user) { return true; },
"canEdit": function() { return is_admin; }
});
}
function enableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=true&repository=" + erepository);
}
function disableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=false&repository=" + erepository);
}
function groupRefreshAndLog(response) {
groupSelector.reInit();
LogResponse(response);
}
function reloadNotifications() {
AjaxAsyncPostRequest(document.location, "listNotifications", reloadNotificationsCB);
return false;
}
function reloadNotificationsCB(response) {
list = FindResponse(response, "listNotifications");
LogResponse(response);
var notifications = list.xml.getElementsByTagName("notification");
notifications = Array.prototype.slice.call(notifications, 0);
notifications.sort(xmlSortByName);
var n = [];
for (var i = 0; i < notifications.length; ++i) {
name = notifications[i].getAttribute('name');
vcs = notifications[i].getAttribute('vcs');
enabled = notifications[i].getAttribute('enabled').toLowerCase() == "true";
n[n.length] = {"name": name, "vcs": vcs, "enabled": enabled};
}
redrawNotifications(n);
}
function redrawNotifications(notifications) {
var table = document.getElementById('notifications');
var email = document.getElementById('email');
email = email.value;
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx)
tbody.removeChild(tbody.childNodes[item_idx]);
for (var i = 0; i < notifications.length; ++i) {
var tr = $c("tr");
var td_name = $c("td");
var reposname = notifications[i].name;
var vcs = notifications[i].vcs;
var a_name = $c("a");
a_name.appendChild(document.createTextNode(reposname));
a_name.setAttribute("href", base_url + "repositories/show/" + vcs + "/" + reposname);
td_name.appendChild(a_name);
tr.appendChild(td_name);
// We need to store the repository type somewhere, we use a custom attribute
tr.setAttribute("data-vcstype", vcs);
var input = $c("input", {type: "checkbox"});
input.value = reposname + "_" + vcs + "_enabled";
input.checked = notifications[i].enabled;
input.defaultChecked = input.checked; // IE7 quirk
if (!email || email == "") {
input.disabled = "disabled";
input.title = "Please fill in an email address to enable this control";
input.setAttribute("class", "disabled")
}
var td_enabled = $c("td");
td_enabled.appendChild(input);
tr.appendChild(td_enabled);
tbody.appendChild(tr);
}
}
function saveNotifications() {
var table = document.getElementById('notifications');
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
var str = "";
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx) {
// every childnode is a tr. Layout as follows:
// <tr><td><a href="...">repository name</a></td><td>enabled</td></tr>
var tr = tbody.childNodes[item_idx];
var name = tr.childNodes[0].childNodes[0].innerHTML;
var vcstype = tr.getAttribute("data-vcstype");
var enabled = tr.childNodes[1].childNodes[0].checked;
if (str != "")
str += "|";
var ename = escape_plus(name);
str += vcstype + ":" + ename + "," + enabled;
}
// XXX maybe we should use XML here, or even better: JSON
AjaxAsyncPostRequest(document.location, "saveNotifications=" + str, saveNotificationsCB);
return false;
}
function saveNotificationsCB(response) {
list = FindResponse(response, "saveNotifications");
LogResponse(response);
reloadNotifications();
}
function toggleSSHKeyAddForm() {
var ssh_key_parent = $('ssh_key_add_link').parentNode;
var ssh_key_add_form = $('ssh_key_add_form');
ssh_key_parent.style.display = ssh_key_parent.style.display == "none" ? "block" : "none";
ssh_key_add_form.style.display = ssh_key_add_form.style.display == "none" ? "block" : "none";
if (ssh_key_add_form.style.display != "none")
$("ssh_key").focus();
return false;
}
function reloadSSHKeys() {
AjaxAsyncPostRequest(document.location, "listSSHKeys", reloadSSHKeysCB);
return false;
}
function reloadSSHKeysCB(response) {
list = FindResponse(response, "listSSHKeys");
LogResponse(response);
var ssh_keys = list.xml.getElementsByTagName("ssh_key");
var n = [];
for (var i = 0; i < ssh_keys.length; ++i) {
id = ssh_keys[i].getAttribute("id");
title = ssh_keys[i].getAttribute("title");
key = ssh_keys[i].getAttribute("key");
n[n.length] = {"id": id, "title": title, "key": key};
}
redrawSSHKeys(n);
}
function redrawSSHKeys(ssh_keys) {
var list = document.getElementById('ssh_keys');
for (var item_idx = list.childNodes.length - 1; item_idx > 0; --item_idx)
list.removeChild(list.childNodes[item_idx]);
for (var i = 0; i < ssh_keys.length; ++i) {
var li = $c("li");
li.ssh_key_id = ssh_keys[i].id;
var a = $c("a", {href: "#"});
a.appendChild(document.createTextNode(ssh_keys[i].title));
a.ssh_key = ssh_keys[i].key;
a.ssh_title = ssh_keys[i].title;
a.onclick = function() {
alert("SSH Key `" + this.ssh_title + "':\n" + this.ssh_key);
return false;
}
li.appendChild(a);
var minus = $c("div", {className: "remover c_icon minus"});
minus.setAttribute("title", "Delete " + ssh_keys[i].title);
minus.onclick = removeSSHKey;
li.appendChild(minus);
list.appendChild(li);
}
}
function addSSHKey() {
post_vars = "addSSHKey=true&title=" + escape_plus($("title").value);
post_vars += "&ssh_key=" + escape_plus($("ssh_key").value);
AjaxAsyncPostRequest(document.location, post_vars, addSSHKeyCB);
}
function addSSHKeyCB(response) {
LogResponse(response);
if (response[0].success)
{
toggleSSHKeyAddForm();
$("title").value = "";
$("ssh_key").value = "";
reloadSSHKeys();
}
}
function removeSSHKey() {
AjaxAsyncPostRequest(document.location,
"removeSSHKey=" + this.parentNode.ssh_key_id,
removeSSHKeyCB);
}
function removeSSHKeyCB(response) {
LogResponse(response);
reloadSSHKeys();
}
| {
var group = groups[group_idx];
if (group.getAttribute("member") == "true")
added[added.length] = group.getAttribute("name");
else
addable[addable.length] = group.getAttribute("name");
} | conditional_block |
users.js | // needs dom.js, selector.js and ajax.js (all included in main)
var groupSelector = null;
// Using window.onload because an onclick="..." handler doesn't give the
// handler a this-variable
var old_load = window.onload;
window.onload = function() {
if (old_load) old_load();
$('password_button').parentNode.onsubmit = verifyPassword;
$('email').focus();
$('is_admin').onclick = setIsAdmin;
var ssh_key_add_link = $('ssh_key_add_link');
if (ssh_key_add_link) {
ssh_key_add_link.onclick = toggleSSHKeyAddForm;
}
var content = document.getElementById('content');
setupCollapsables(content, "usershowhide", users_collapse, users_expand);
// Initialize the select-dropdowns
groupSelectorInit();
reloadNotifications();
if (ssh_key_add_link) {
reloadSSHKeys();
}
$('savenotifications').parentNode.onsubmit = saveNotifications;
}
function users_collapse(me) {
}
function users_expand(me) {
}
function sendEmail() {
var eemail = escape_plus($('email').value);
AjaxAsyncPostRequest(document.location, "email=" + eemail, sendEmailCB);
}
function sendEmailCB(response) {
// TODO: Before email is set, notifications are disabled
LogResponse(response);
reloadNotifications();
}
function sendSendPasswordMail() {
AjaxAsyncPostLog(document.location, "sendPasswordMail=1");
}
function sendFullName() {
AjaxAsyncPostLog(document.location, "fullname=" + escape_plus($('fullname').value));
}
// global variable to temporarily store the password to be verified
var enteredPassword;
function verifyPassword() {
var input = $('password');
enteredPassword = input.value;
if (!enteredPassword) {
Log('Please enter a password before pressing "save password"', 'error');
return false;
}
// Some cleaning up.
input.value = '';
this.firstChild.innerHTML = 'Verify';
// change te onclick handler
input.focus();
addClassName(input, 'hint_bg');
$('password_button').parentNode.onsubmit = checkPasswords;
Log('Please verify your password', 'hint');
return false;
}
function setIsAdmin() |
function setIsAdminCB(response) {
LogResponse(response);
var setIsAdmin = FindResponse(response, 'setIsAdmin');
if (!setIsAdmin.success) {
$('is_admin').checked = ! $('is_admin').checked;
}
}
function checkPasswords() {
// Again some cleaning up.
this.firstChild.innerHTML = 'Password';
// Make sure the change-button isn't highlighted anymore
this.blur();
// change the onclick handler back to the verifyPassword function
$('password_button').parentNode.onsubmit = verifyPassword;
var input = $('password');
// Do the check after cleaning up.
if (input.value != enteredPassword) {
Log('Verification of password failed', 'error');
enteredPassword = '';
input.value = '';
return false;
}
removeClassName(input, 'hint_bg');
input.value = '';
// Send the entered password to the server.
sendPassword(enteredPassword);
enteredPassword = '';
change_password_hint = $('change_password_hint')
if (change_password_hint) {
change_password_hint.style.display = 'none';
}
return false;
}
function sendPassword(password) {
AjaxAsyncPostLog(document.location, "password=" + escape_plus(password));
}
/*
* Adding users to groups and removing them from groups
*/
function addMemberToGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "addToGroup=" + egroup, groupRefreshAndLog);
}
function removeMemberFromGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "removeFromGroup=" + egroup, groupRefreshAndLog);
}
/* Requests the groups via ajax, and forms two lists to be used by Selector */
function initGroups() {
var added = [];
var addable = [];
var response = AjaxSyncPostRequest(document.location, 'listUserGroups');
// log if something went wrong
LogResponse(response);
var usergroups = FindResponse(response, 'listUserGroups');
if (!usergroups)
return {"added": [], "addable": []};
var groups = usergroups.xml.getElementsByTagName("group");
for (var group_idx=0; group_idx < groups.length; ++group_idx) {
var group = groups[group_idx];
if (group.getAttribute("member") == "true")
added[added.length] = group.getAttribute("name");
else
addable[addable.length] = group.getAttribute("name");
}
return {"added": added, "addable": addable};
}
function groupSelectorInit() {
groupSelector = new Selector({
"selectorId": "memberof",
"urlPrefix": base_url + "groups/show/",
"initCallback": initGroups,
"addCallback": addMemberToGroupAjax,
"removeCallback": removeMemberFromGroupAjax,
"canLink": function(user) { return true; },
"canEdit": function() { return is_admin; }
});
}
function enableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=true&repository=" + erepository);
}
function disableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=false&repository=" + erepository);
}
function groupRefreshAndLog(response) {
groupSelector.reInit();
LogResponse(response);
}
function reloadNotifications() {
AjaxAsyncPostRequest(document.location, "listNotifications", reloadNotificationsCB);
return false;
}
function reloadNotificationsCB(response) {
list = FindResponse(response, "listNotifications");
LogResponse(response);
var notifications = list.xml.getElementsByTagName("notification");
notifications = Array.prototype.slice.call(notifications, 0);
notifications.sort(xmlSortByName);
var n = [];
for (var i = 0; i < notifications.length; ++i) {
name = notifications[i].getAttribute('name');
vcs = notifications[i].getAttribute('vcs');
enabled = notifications[i].getAttribute('enabled').toLowerCase() == "true";
n[n.length] = {"name": name, "vcs": vcs, "enabled": enabled};
}
redrawNotifications(n);
}
function redrawNotifications(notifications) {
var table = document.getElementById('notifications');
var email = document.getElementById('email');
email = email.value;
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx)
tbody.removeChild(tbody.childNodes[item_idx]);
for (var i = 0; i < notifications.length; ++i) {
var tr = $c("tr");
var td_name = $c("td");
var reposname = notifications[i].name;
var vcs = notifications[i].vcs;
var a_name = $c("a");
a_name.appendChild(document.createTextNode(reposname));
a_name.setAttribute("href", base_url + "repositories/show/" + vcs + "/" + reposname);
td_name.appendChild(a_name);
tr.appendChild(td_name);
// We need to store the repository type somewhere, we use a custom attribute
tr.setAttribute("data-vcstype", vcs);
var input = $c("input", {type: "checkbox"});
input.value = reposname + "_" + vcs + "_enabled";
input.checked = notifications[i].enabled;
input.defaultChecked = input.checked; // IE7 quirk
if (!email || email == "") {
input.disabled = "disabled";
input.title = "Please fill in an email address to enable this control";
input.setAttribute("class", "disabled")
}
var td_enabled = $c("td");
td_enabled.appendChild(input);
tr.appendChild(td_enabled);
tbody.appendChild(tr);
}
}
function saveNotifications() {
var table = document.getElementById('notifications');
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
var str = "";
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx) {
// every childnode is a tr. Layout as follows:
// <tr><td><a href="...">repository name</a></td><td>enabled</td></tr>
var tr = tbody.childNodes[item_idx];
var name = tr.childNodes[0].childNodes[0].innerHTML;
var vcstype = tr.getAttribute("data-vcstype");
var enabled = tr.childNodes[1].childNodes[0].checked;
if (str != "")
str += "|";
var ename = escape_plus(name);
str += vcstype + ":" + ename + "," + enabled;
}
// XXX maybe we should use XML here, or even better: JSON
AjaxAsyncPostRequest(document.location, "saveNotifications=" + str, saveNotificationsCB);
return false;
}
function saveNotificationsCB(response) {
list = FindResponse(response, "saveNotifications");
LogResponse(response);
reloadNotifications();
}
function toggleSSHKeyAddForm() {
var ssh_key_parent = $('ssh_key_add_link').parentNode;
var ssh_key_add_form = $('ssh_key_add_form');
ssh_key_parent.style.display = ssh_key_parent.style.display == "none" ? "block" : "none";
ssh_key_add_form.style.display = ssh_key_add_form.style.display == "none" ? "block" : "none";
if (ssh_key_add_form.style.display != "none")
$("ssh_key").focus();
return false;
}
function reloadSSHKeys() {
AjaxAsyncPostRequest(document.location, "listSSHKeys", reloadSSHKeysCB);
return false;
}
function reloadSSHKeysCB(response) {
list = FindResponse(response, "listSSHKeys");
LogResponse(response);
var ssh_keys = list.xml.getElementsByTagName("ssh_key");
var n = [];
for (var i = 0; i < ssh_keys.length; ++i) {
id = ssh_keys[i].getAttribute("id");
title = ssh_keys[i].getAttribute("title");
key = ssh_keys[i].getAttribute("key");
n[n.length] = {"id": id, "title": title, "key": key};
}
redrawSSHKeys(n);
}
function redrawSSHKeys(ssh_keys) {
var list = document.getElementById('ssh_keys');
for (var item_idx = list.childNodes.length - 1; item_idx > 0; --item_idx)
list.removeChild(list.childNodes[item_idx]);
for (var i = 0; i < ssh_keys.length; ++i) {
var li = $c("li");
li.ssh_key_id = ssh_keys[i].id;
var a = $c("a", {href: "#"});
a.appendChild(document.createTextNode(ssh_keys[i].title));
a.ssh_key = ssh_keys[i].key;
a.ssh_title = ssh_keys[i].title;
a.onclick = function() {
alert("SSH Key `" + this.ssh_title + "':\n" + this.ssh_key);
return false;
}
li.appendChild(a);
var minus = $c("div", {className: "remover c_icon minus"});
minus.setAttribute("title", "Delete " + ssh_keys[i].title);
minus.onclick = removeSSHKey;
li.appendChild(minus);
list.appendChild(li);
}
}
function addSSHKey() {
post_vars = "addSSHKey=true&title=" + escape_plus($("title").value);
post_vars += "&ssh_key=" + escape_plus($("ssh_key").value);
AjaxAsyncPostRequest(document.location, post_vars, addSSHKeyCB);
}
function addSSHKeyCB(response) {
LogResponse(response);
if (response[0].success)
{
toggleSSHKeyAddForm();
$("title").value = "";
$("ssh_key").value = "";
reloadSSHKeys();
}
}
function removeSSHKey() {
AjaxAsyncPostRequest(document.location,
"removeSSHKey=" + this.parentNode.ssh_key_id,
removeSSHKeyCB);
}
function removeSSHKeyCB(response) {
LogResponse(response);
reloadSSHKeys();
}
| {
// this function is called when the checkbox is already changed.
// the checkbox reflects the desired (new) value. Don't negate!
var newvalue = $('is_admin').checked;
AjaxAsyncPostRequest(document.location, "setIsAdmin=" + newvalue, setIsAdminCB);
} | identifier_body |
users.js | // needs dom.js, selector.js and ajax.js (all included in main)
var groupSelector = null;
// Using window.onload because an onclick="..." handler doesn't give the
// handler a this-variable
var old_load = window.onload;
window.onload = function() {
if (old_load) old_load();
$('password_button').parentNode.onsubmit = verifyPassword;
$('email').focus();
$('is_admin').onclick = setIsAdmin;
var ssh_key_add_link = $('ssh_key_add_link');
if (ssh_key_add_link) {
ssh_key_add_link.onclick = toggleSSHKeyAddForm;
}
var content = document.getElementById('content');
setupCollapsables(content, "usershowhide", users_collapse, users_expand);
// Initialize the select-dropdowns
groupSelectorInit();
reloadNotifications();
if (ssh_key_add_link) {
reloadSSHKeys();
}
$('savenotifications').parentNode.onsubmit = saveNotifications;
}
function users_collapse(me) {
}
function users_expand(me) {
}
function sendEmail() {
var eemail = escape_plus($('email').value);
AjaxAsyncPostRequest(document.location, "email=" + eemail, sendEmailCB);
}
function sendEmailCB(response) {
// TODO: Before email is set, notifications are disabled
LogResponse(response);
reloadNotifications();
}
function sendSendPasswordMail() {
AjaxAsyncPostLog(document.location, "sendPasswordMail=1");
}
function sendFullName() {
AjaxAsyncPostLog(document.location, "fullname=" + escape_plus($('fullname').value));
}
// global variable to temporarily store the password to be verified
var enteredPassword;
function verifyPassword() {
var input = $('password');
enteredPassword = input.value;
if (!enteredPassword) {
Log('Please enter a password before pressing "save password"', 'error');
return false;
}
// Some cleaning up.
input.value = '';
this.firstChild.innerHTML = 'Verify';
// change te onclick handler
input.focus();
addClassName(input, 'hint_bg');
$('password_button').parentNode.onsubmit = checkPasswords;
Log('Please verify your password', 'hint');
return false;
}
function | () {
// this function is called when the checkbox is already changed.
// the checkbox reflects the desired (new) value. Don't negate!
var newvalue = $('is_admin').checked;
AjaxAsyncPostRequest(document.location, "setIsAdmin=" + newvalue, setIsAdminCB);
}
function setIsAdminCB(response) {
LogResponse(response);
var setIsAdmin = FindResponse(response, 'setIsAdmin');
if (!setIsAdmin.success) {
$('is_admin').checked = ! $('is_admin').checked;
}
}
function checkPasswords() {
// Again some cleaning up.
this.firstChild.innerHTML = 'Password';
// Make sure the change-button isn't highlighted anymore
this.blur();
// change the onclick handler back to the verifyPassword function
$('password_button').parentNode.onsubmit = verifyPassword;
var input = $('password');
// Do the check after cleaning up.
if (input.value != enteredPassword) {
Log('Verification of password failed', 'error');
enteredPassword = '';
input.value = '';
return false;
}
removeClassName(input, 'hint_bg');
input.value = '';
// Send the entered password to the server.
sendPassword(enteredPassword);
enteredPassword = '';
change_password_hint = $('change_password_hint')
if (change_password_hint) {
change_password_hint.style.display = 'none';
}
return false;
}
function sendPassword(password) {
AjaxAsyncPostLog(document.location, "password=" + escape_plus(password));
}
/*
* Adding users to groups and removing them from groups
*/
function addMemberToGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "addToGroup=" + egroup, groupRefreshAndLog);
}
function removeMemberFromGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "removeFromGroup=" + egroup, groupRefreshAndLog);
}
/* Requests the groups via ajax, and forms two lists to be used by Selector */
function initGroups() {
var added = [];
var addable = [];
var response = AjaxSyncPostRequest(document.location, 'listUserGroups');
// log if something went wrong
LogResponse(response);
var usergroups = FindResponse(response, 'listUserGroups');
if (!usergroups)
return {"added": [], "addable": []};
var groups = usergroups.xml.getElementsByTagName("group");
for (var group_idx=0; group_idx < groups.length; ++group_idx) {
var group = groups[group_idx];
if (group.getAttribute("member") == "true")
added[added.length] = group.getAttribute("name");
else
addable[addable.length] = group.getAttribute("name");
}
return {"added": added, "addable": addable};
}
function groupSelectorInit() {
groupSelector = new Selector({
"selectorId": "memberof",
"urlPrefix": base_url + "groups/show/",
"initCallback": initGroups,
"addCallback": addMemberToGroupAjax,
"removeCallback": removeMemberFromGroupAjax,
"canLink": function(user) { return true; },
"canEdit": function() { return is_admin; }
});
}
function enableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=true&repository=" + erepository);
}
function disableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=false&repository=" + erepository);
}
function groupRefreshAndLog(response) {
groupSelector.reInit();
LogResponse(response);
}
function reloadNotifications() {
AjaxAsyncPostRequest(document.location, "listNotifications", reloadNotificationsCB);
return false;
}
function reloadNotificationsCB(response) {
list = FindResponse(response, "listNotifications");
LogResponse(response);
var notifications = list.xml.getElementsByTagName("notification");
notifications = Array.prototype.slice.call(notifications, 0);
notifications.sort(xmlSortByName);
var n = [];
for (var i = 0; i < notifications.length; ++i) {
name = notifications[i].getAttribute('name');
vcs = notifications[i].getAttribute('vcs');
enabled = notifications[i].getAttribute('enabled').toLowerCase() == "true";
n[n.length] = {"name": name, "vcs": vcs, "enabled": enabled};
}
redrawNotifications(n);
}
function redrawNotifications(notifications) {
var table = document.getElementById('notifications');
var email = document.getElementById('email');
email = email.value;
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx)
tbody.removeChild(tbody.childNodes[item_idx]);
for (var i = 0; i < notifications.length; ++i) {
var tr = $c("tr");
var td_name = $c("td");
var reposname = notifications[i].name;
var vcs = notifications[i].vcs;
var a_name = $c("a");
a_name.appendChild(document.createTextNode(reposname));
a_name.setAttribute("href", base_url + "repositories/show/" + vcs + "/" + reposname);
td_name.appendChild(a_name);
tr.appendChild(td_name);
// We need to store the repository type somewhere, we use a custom attribute
tr.setAttribute("data-vcstype", vcs);
var input = $c("input", {type: "checkbox"});
input.value = reposname + "_" + vcs + "_enabled";
input.checked = notifications[i].enabled;
input.defaultChecked = input.checked; // IE7 quirk
if (!email || email == "") {
input.disabled = "disabled";
input.title = "Please fill in an email address to enable this control";
input.setAttribute("class", "disabled")
}
var td_enabled = $c("td");
td_enabled.appendChild(input);
tr.appendChild(td_enabled);
tbody.appendChild(tr);
}
}
function saveNotifications() {
var table = document.getElementById('notifications');
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
var str = "";
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx) {
// every childnode is a tr. Layout as follows:
// <tr><td><a href="...">repository name</a></td><td>enabled</td></tr>
var tr = tbody.childNodes[item_idx];
var name = tr.childNodes[0].childNodes[0].innerHTML;
var vcstype = tr.getAttribute("data-vcstype");
var enabled = tr.childNodes[1].childNodes[0].checked;
if (str != "")
str += "|";
var ename = escape_plus(name);
str += vcstype + ":" + ename + "," + enabled;
}
// XXX maybe we should use XML here, or even better: JSON
AjaxAsyncPostRequest(document.location, "saveNotifications=" + str, saveNotificationsCB);
return false;
}
function saveNotificationsCB(response) {
list = FindResponse(response, "saveNotifications");
LogResponse(response);
reloadNotifications();
}
function toggleSSHKeyAddForm() {
var ssh_key_parent = $('ssh_key_add_link').parentNode;
var ssh_key_add_form = $('ssh_key_add_form');
ssh_key_parent.style.display = ssh_key_parent.style.display == "none" ? "block" : "none";
ssh_key_add_form.style.display = ssh_key_add_form.style.display == "none" ? "block" : "none";
if (ssh_key_add_form.style.display != "none")
$("ssh_key").focus();
return false;
}
function reloadSSHKeys() {
AjaxAsyncPostRequest(document.location, "listSSHKeys", reloadSSHKeysCB);
return false;
}
function reloadSSHKeysCB(response) {
list = FindResponse(response, "listSSHKeys");
LogResponse(response);
var ssh_keys = list.xml.getElementsByTagName("ssh_key");
var n = [];
for (var i = 0; i < ssh_keys.length; ++i) {
id = ssh_keys[i].getAttribute("id");
title = ssh_keys[i].getAttribute("title");
key = ssh_keys[i].getAttribute("key");
n[n.length] = {"id": id, "title": title, "key": key};
}
redrawSSHKeys(n);
}
function redrawSSHKeys(ssh_keys) {
var list = document.getElementById('ssh_keys');
for (var item_idx = list.childNodes.length - 1; item_idx > 0; --item_idx)
list.removeChild(list.childNodes[item_idx]);
for (var i = 0; i < ssh_keys.length; ++i) {
var li = $c("li");
li.ssh_key_id = ssh_keys[i].id;
var a = $c("a", {href: "#"});
a.appendChild(document.createTextNode(ssh_keys[i].title));
a.ssh_key = ssh_keys[i].key;
a.ssh_title = ssh_keys[i].title;
a.onclick = function() {
alert("SSH Key `" + this.ssh_title + "':\n" + this.ssh_key);
return false;
}
li.appendChild(a);
var minus = $c("div", {className: "remover c_icon minus"});
minus.setAttribute("title", "Delete " + ssh_keys[i].title);
minus.onclick = removeSSHKey;
li.appendChild(minus);
list.appendChild(li);
}
}
function addSSHKey() {
post_vars = "addSSHKey=true&title=" + escape_plus($("title").value);
post_vars += "&ssh_key=" + escape_plus($("ssh_key").value);
AjaxAsyncPostRequest(document.location, post_vars, addSSHKeyCB);
}
function addSSHKeyCB(response) {
LogResponse(response);
if (response[0].success)
{
toggleSSHKeyAddForm();
$("title").value = "";
$("ssh_key").value = "";
reloadSSHKeys();
}
}
function removeSSHKey() {
AjaxAsyncPostRequest(document.location,
"removeSSHKey=" + this.parentNode.ssh_key_id,
removeSSHKeyCB);
}
function removeSSHKeyCB(response) {
LogResponse(response);
reloadSSHKeys();
}
| setIsAdmin | identifier_name |
users.js | // needs dom.js, selector.js and ajax.js (all included in main)
var groupSelector = null;
// Using window.onload because an onclick="..." handler doesn't give the
// handler a this-variable
var old_load = window.onload;
window.onload = function() {
if (old_load) old_load();
$('password_button').parentNode.onsubmit = verifyPassword;
$('email').focus();
$('is_admin').onclick = setIsAdmin;
var ssh_key_add_link = $('ssh_key_add_link');
if (ssh_key_add_link) {
ssh_key_add_link.onclick = toggleSSHKeyAddForm;
}
var content = document.getElementById('content');
setupCollapsables(content, "usershowhide", users_collapse, users_expand);
// Initialize the select-dropdowns
groupSelectorInit();
reloadNotifications();
if (ssh_key_add_link) {
reloadSSHKeys();
}
$('savenotifications').parentNode.onsubmit = saveNotifications;
}
function users_collapse(me) {
}
| AjaxAsyncPostRequest(document.location, "email=" + eemail, sendEmailCB);
}
function sendEmailCB(response) {
// TODO: Before email is set, notifications are disabled
LogResponse(response);
reloadNotifications();
}
function sendSendPasswordMail() {
AjaxAsyncPostLog(document.location, "sendPasswordMail=1");
}
function sendFullName() {
AjaxAsyncPostLog(document.location, "fullname=" + escape_plus($('fullname').value));
}
// global variable to temporarily store the password to be verified
var enteredPassword;
function verifyPassword() {
var input = $('password');
enteredPassword = input.value;
if (!enteredPassword) {
Log('Please enter a password before pressing "save password"', 'error');
return false;
}
// Some cleaning up.
input.value = '';
this.firstChild.innerHTML = 'Verify';
// change te onclick handler
input.focus();
addClassName(input, 'hint_bg');
$('password_button').parentNode.onsubmit = checkPasswords;
Log('Please verify your password', 'hint');
return false;
}
function setIsAdmin() {
// this function is called when the checkbox is already changed.
// the checkbox reflects the desired (new) value. Don't negate!
var newvalue = $('is_admin').checked;
AjaxAsyncPostRequest(document.location, "setIsAdmin=" + newvalue, setIsAdminCB);
}
function setIsAdminCB(response) {
LogResponse(response);
var setIsAdmin = FindResponse(response, 'setIsAdmin');
if (!setIsAdmin.success) {
$('is_admin').checked = ! $('is_admin').checked;
}
}
function checkPasswords() {
// Again some cleaning up.
this.firstChild.innerHTML = 'Password';
// Make sure the change-button isn't highlighted anymore
this.blur();
// change the onclick handler back to the verifyPassword function
$('password_button').parentNode.onsubmit = verifyPassword;
var input = $('password');
// Do the check after cleaning up.
if (input.value != enteredPassword) {
Log('Verification of password failed', 'error');
enteredPassword = '';
input.value = '';
return false;
}
removeClassName(input, 'hint_bg');
input.value = '';
// Send the entered password to the server.
sendPassword(enteredPassword);
enteredPassword = '';
change_password_hint = $('change_password_hint')
if (change_password_hint) {
change_password_hint.style.display = 'none';
}
return false;
}
function sendPassword(password) {
AjaxAsyncPostLog(document.location, "password=" + escape_plus(password));
}
/*
* Adding users to groups and removing them from groups
*/
function addMemberToGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "addToGroup=" + egroup, groupRefreshAndLog);
}
function removeMemberFromGroupAjax(group) {
var egroup = escape_plus(group);
return AjaxAsyncPostRequest(document.location, "removeFromGroup=" + egroup, groupRefreshAndLog);
}
/* Requests the groups via ajax, and forms two lists to be used by Selector */
function initGroups() {
var added = [];
var addable = [];
var response = AjaxSyncPostRequest(document.location, 'listUserGroups');
// log if something went wrong
LogResponse(response);
var usergroups = FindResponse(response, 'listUserGroups');
if (!usergroups)
return {"added": [], "addable": []};
var groups = usergroups.xml.getElementsByTagName("group");
for (var group_idx=0; group_idx < groups.length; ++group_idx) {
var group = groups[group_idx];
if (group.getAttribute("member") == "true")
added[added.length] = group.getAttribute("name");
else
addable[addable.length] = group.getAttribute("name");
}
return {"added": added, "addable": addable};
}
function groupSelectorInit() {
groupSelector = new Selector({
"selectorId": "memberof",
"urlPrefix": base_url + "groups/show/",
"initCallback": initGroups,
"addCallback": addMemberToGroupAjax,
"removeCallback": removeMemberFromGroupAjax,
"canLink": function(user) { return true; },
"canEdit": function() { return is_admin; }
});
}
function enableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=true&repository=" + erepository);
}
function disableRepositoryAjax(repository) {
var erepository = escape_plus(repository);
return AjaxAsyncPostRequest(document.location, "setNotification=false&repository=" + erepository);
}
function groupRefreshAndLog(response) {
groupSelector.reInit();
LogResponse(response);
}
function reloadNotifications() {
AjaxAsyncPostRequest(document.location, "listNotifications", reloadNotificationsCB);
return false;
}
function reloadNotificationsCB(response) {
list = FindResponse(response, "listNotifications");
LogResponse(response);
var notifications = list.xml.getElementsByTagName("notification");
notifications = Array.prototype.slice.call(notifications, 0);
notifications.sort(xmlSortByName);
var n = [];
for (var i = 0; i < notifications.length; ++i) {
name = notifications[i].getAttribute('name');
vcs = notifications[i].getAttribute('vcs');
enabled = notifications[i].getAttribute('enabled').toLowerCase() == "true";
n[n.length] = {"name": name, "vcs": vcs, "enabled": enabled};
}
redrawNotifications(n);
}
function redrawNotifications(notifications) {
var table = document.getElementById('notifications');
var email = document.getElementById('email');
email = email.value;
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx)
tbody.removeChild(tbody.childNodes[item_idx]);
for (var i = 0; i < notifications.length; ++i) {
var tr = $c("tr");
var td_name = $c("td");
var reposname = notifications[i].name;
var vcs = notifications[i].vcs;
var a_name = $c("a");
a_name.appendChild(document.createTextNode(reposname));
a_name.setAttribute("href", base_url + "repositories/show/" + vcs + "/" + reposname);
td_name.appendChild(a_name);
tr.appendChild(td_name);
// We need to store the repository type somewhere, we use a custom attribute
tr.setAttribute("data-vcstype", vcs);
var input = $c("input", {type: "checkbox"});
input.value = reposname + "_" + vcs + "_enabled";
input.checked = notifications[i].enabled;
input.defaultChecked = input.checked; // IE7 quirk
if (!email || email == "") {
input.disabled = "disabled";
input.title = "Please fill in an email address to enable this control";
input.setAttribute("class", "disabled")
}
var td_enabled = $c("td");
td_enabled.appendChild(input);
tr.appendChild(td_enabled);
tbody.appendChild(tr);
}
}
function saveNotifications() {
var table = document.getElementById('notifications');
var tbodies = table.getElementsByTagName('tbody');
if (tbodies.length != 1)
return;
var tbody = tbodies[0];
var str = "";
for (var item_idx = tbody.childNodes.length - 1; item_idx > 0; --item_idx) {
// every childnode is a tr. Layout as follows:
// <tr><td><a href="...">repository name</a></td><td>enabled</td></tr>
var tr = tbody.childNodes[item_idx];
var name = tr.childNodes[0].childNodes[0].innerHTML;
var vcstype = tr.getAttribute("data-vcstype");
var enabled = tr.childNodes[1].childNodes[0].checked;
if (str != "")
str += "|";
var ename = escape_plus(name);
str += vcstype + ":" + ename + "," + enabled;
}
// XXX maybe we should use XML here, or even better: JSON
AjaxAsyncPostRequest(document.location, "saveNotifications=" + str, saveNotificationsCB);
return false;
}
function saveNotificationsCB(response) {
list = FindResponse(response, "saveNotifications");
LogResponse(response);
reloadNotifications();
}
function toggleSSHKeyAddForm() {
var ssh_key_parent = $('ssh_key_add_link').parentNode;
var ssh_key_add_form = $('ssh_key_add_form');
ssh_key_parent.style.display = ssh_key_parent.style.display == "none" ? "block" : "none";
ssh_key_add_form.style.display = ssh_key_add_form.style.display == "none" ? "block" : "none";
if (ssh_key_add_form.style.display != "none")
$("ssh_key").focus();
return false;
}
function reloadSSHKeys() {
AjaxAsyncPostRequest(document.location, "listSSHKeys", reloadSSHKeysCB);
return false;
}
function reloadSSHKeysCB(response) {
list = FindResponse(response, "listSSHKeys");
LogResponse(response);
var ssh_keys = list.xml.getElementsByTagName("ssh_key");
var n = [];
for (var i = 0; i < ssh_keys.length; ++i) {
id = ssh_keys[i].getAttribute("id");
title = ssh_keys[i].getAttribute("title");
key = ssh_keys[i].getAttribute("key");
n[n.length] = {"id": id, "title": title, "key": key};
}
redrawSSHKeys(n);
}
function redrawSSHKeys(ssh_keys) {
var list = document.getElementById('ssh_keys');
for (var item_idx = list.childNodes.length - 1; item_idx > 0; --item_idx)
list.removeChild(list.childNodes[item_idx]);
for (var i = 0; i < ssh_keys.length; ++i) {
var li = $c("li");
li.ssh_key_id = ssh_keys[i].id;
var a = $c("a", {href: "#"});
a.appendChild(document.createTextNode(ssh_keys[i].title));
a.ssh_key = ssh_keys[i].key;
a.ssh_title = ssh_keys[i].title;
a.onclick = function() {
alert("SSH Key `" + this.ssh_title + "':\n" + this.ssh_key);
return false;
}
li.appendChild(a);
var minus = $c("div", {className: "remover c_icon minus"});
minus.setAttribute("title", "Delete " + ssh_keys[i].title);
minus.onclick = removeSSHKey;
li.appendChild(minus);
list.appendChild(li);
}
}
function addSSHKey() {
post_vars = "addSSHKey=true&title=" + escape_plus($("title").value);
post_vars += "&ssh_key=" + escape_plus($("ssh_key").value);
AjaxAsyncPostRequest(document.location, post_vars, addSSHKeyCB);
}
function addSSHKeyCB(response) {
LogResponse(response);
if (response[0].success)
{
toggleSSHKeyAddForm();
$("title").value = "";
$("ssh_key").value = "";
reloadSSHKeys();
}
}
function removeSSHKey() {
AjaxAsyncPostRequest(document.location,
"removeSSHKey=" + this.parentNode.ssh_key_id,
removeSSHKeyCB);
}
function removeSSHKeyCB(response) {
LogResponse(response);
reloadSSHKeys();
} | function users_expand(me) {
}
function sendEmail() {
var eemail = escape_plus($('email').value); | random_line_split |
MobileChartDashboardItemConfigGenerator.Mobile.js | /**
* @class Terrasoft.configuration.ChartDashboardItemConfigGenerator
* Config generator of chart dashboard item.
*/
Ext.define("Terrasoft.configuration.ChartDashboardItemConfigGenerator", {
extend: "Terrasoft.BaseDashboardItemConfigGenerator",
alternateClassName: "Terrasoft.ChartDashboardItemConfigGenerator",
//region Properties: Private
/**
* @private
*/
chartConfig: null,
/**
* @private
*/
chartData: null,
//endregion
//region Methods: Private
/**
* @private
*/
getGaugeChartPlotBandsConfig: function(chartConfig) {
return [
{
from: chartConfig.min,
to: chartConfig.middleFrom,
color: chartConfig.orderDirection === 2 ?
Terrasoft.DashboardGaugeScaleColor.max : Terrasoft.DashboardGaugeScaleColor.min
},
{
from: chartConfig.middleFrom,
to: chartConfig.middleTo,
color: Terrasoft.DashboardGaugeScaleColor.middle
},
{
from: chartConfig.middleTo,
to: chartConfig.max,
color: chartConfig.orderDirection === 2 ?
Terrasoft.DashboardGaugeScaleColor.min : Terrasoft.DashboardGaugeScaleColor.max
}
];
},
/**
* @private
*/
applyGaugeChartConfig: function(config) {
var chartConfig = this.chartConfig;
var plotBands = this.getGaugeChartPlotBandsConfig(chartConfig);
var dataLabelColor;
var dataValue = config.series[0].data[0];
for (var i = 0, ln = plotBands.length; i < ln; i++) {
var plotBand = plotBands[i];
if ((dataValue >= plotBand.from && dataValue <= plotBand.to) ||
(plotBand.to === chartConfig.max && dataValue > plotBand.to) ||
(plotBand.from === chartConfig.min && dataValue < plotBand.from)) {
dataLabelColor = plotBand.color;
break;
}
}
Ext.merge(config, {
plotOptions: {
series: {
dataLabels: {
style: {
color: dataLabelColor
}
}
}
},
yAxis: {
min: chartConfig.min,
max: chartConfig.max,
tickPositions: [chartConfig.min, chartConfig.middleFrom, chartConfig.middleTo, chartConfig.max],
plotBands: plotBands
}
});
},
/**
* @private
*/
getSortableValueByItem: function(item, config) {
var sortableValue = null;
if (config.orderBy === Terrasoft.DashboardOrderBy.GroupByField) {
var formatConfig = config.formatConfig;
if (formatConfig && formatConfig.dateTime) {
var date = this.getDateByFormat(formatConfig.dateTime, item.xAxis);
sortableValue = date && date.getTime();
} else {
sortableValue = this.getSortableValue(item.xAxis, config.xAxisType);
}
} else {
sortableValue = this.getSortableValue(item.yAxis[config.orderBySerieNumber], config.yAxisType);
}
return sortableValue;
},
/**
* @private
*/
sortChartData: function(chartData, config) {
var coefficient = (config.orderDirection === Terrasoft.OrderTypes.DESC) ? -1 : 1;
chartData.sort(function(itemA, itemB) {
var valueA = this.getSortableValueByItem(itemA, config);
var valueB = this.getSortableValueByItem(itemB, config);
if (valueA === valueB) {
return 0;
} else if (valueB === null) {
return -1;
} else if (valueA === null) {
return 1;
} else if (valueA > valueB) {
return coefficient;
} else if (valueA < valueB) {
return -coefficient;
} else {
return 0;
}
}.bind(this));
},
/**
* Returns time value according to format.
* @private
*/
getDateByFormat: function(datetimeFormat, value) {
var formats = datetimeFormat.split(";");
var year = 0;
var month = 0;
var day = 1;
var hour = 0;
var extraDays = 0;
for (var i = 0, ln = formats.length; i < ln; i++) {
var format = formats[i];
var valueByFormat = parseInt(value[format], 10);
if (!Ext.isNumber(valueByFormat)) {
return null;
}
switch (format) {
case "Year":
year = valueByFormat;
break;
case "Month":
month = valueByFormat - 1;
break;
case "Day":
day = valueByFormat;
break;
case "Hour":
hour = valueByFormat;
break;
case "Week":
extraDays += 7 * valueByFormat;
break;
default:
return null;
}
}
return new Date(year, month, day + extraDays, hour, 0);
},
/**
* @private
*/
getSerieData: function(config) {
var chartConfig = config.chartConfig;
var serieConfig = config.serieConfig;
var chartData = config.chartData;
var serieData = {
type: serieConfig.type,
name: (serieConfig.yAxis && serieConfig.yAxis.caption) ||
chartConfig.yAxisDefaultCaption || serieConfig.schemaCaption,
color: Terrasoft.DashboardStyleColor[serieConfig.style],
data: []
};
if (Ext.isObject(serieConfig.xAxis)) {
this.sortChartData(chartData, {
orderDirection: chartConfig.orderDirection,
orderBy: chartConfig.orderBy,
orderBySerieNumber: config.orderBySerieNumber,
xAxisType: serieConfig.xAxis.dataValueType,
yAxisType: serieConfig.yAxis.dataValueType,
formatConfig: {
dateTime: serieConfig.xAxis.dateTimeFormat
}
});
}
for (var j = 0, dataLn = chartData.length; j < dataLn; j++) {
var dataItem = chartData[j];
var dataValue = dataItem;
if (Ext.isObject(dataItem)) {
if (config.serieIndex === 0) {
config.categories.push(this.convertValue(dataItem.xAxis, serieConfig.xAxis.dataValueType,
{dateTime: serieConfig.xAxis.dateTimeFormat}));
}
dataValue = {
name: config.categories[j],
y: parseFloat(dataItem.yAxis[config.serieIndex], 10)
};
}
serieData.data.push(dataValue);
}
return serieData;
},
/**
* @private
*/
convertDateTimeValue: function(value, formatConfig) {
if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.DayMonth) {
return Ext.Date.format(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.CurrentUserInfo.shortMonthDayFormat);
} else if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.MonthYear) {
return Ext.Date.format(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.CurrentUserInfo.shortYearMonthFormat);
} else if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.DayMonthYear) {
return Terrasoft.String.getTypedValue(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.DataValueType.Date);
} else {
var formats = formatConfig.dateTime.split(";");
var result = [];
var valueIsNotEmpty = false;
for (var k = 0, ln = formats.length; k < ln; k++) {
var format = formats[k];
if (!Ext.isEmpty(value[format])) {
valueIsNotEmpty = true;
}
result.push(value[format]);
} | if (valueIsNotEmpty) {
return result.join(Terrasoft.LS.MobileChartDashboardItemDatePartSeparator);
}
}
return null;
},
//endregion
//region Methods: Protected
/**
* Gets default colors for dashboard.
* @protected
* @virtual
*/
getColors: function() {
var colors = [];
for (var colorName in Terrasoft.DashboardStyleColor) {
if (Terrasoft.DashboardStyleColor.hasOwnProperty(colorName)) {
colors.push(Terrasoft.DashboardStyleColor[colorName]);
}
}
return colors;
},
/**
* Gets configuration object for series.
* @protected
* @virtual
* @param {String[]} categories Y Axis values.
* @returns {Object[]} Array of serie.
*/
getSeriesConfig: function(categories) {
var chartConfig = this.chartConfig;
var chartData = this.chartData;
var seriesData = [];
var seriesConfig = chartConfig.seriesConfig;
var orderBySerieNumber = -1;
if (chartConfig.orderBy &&
Terrasoft.String.startsWith(chartConfig.orderBy, Terrasoft.DashboardOrderBy.ChartEntityColumn)) {
orderBySerieNumber = chartConfig.orderBy.split(":")[1];
}
for (var i = 0, ln = seriesConfig.length; i < ln; i++) {
var serieConfig = seriesConfig[i];
var serieData = this.getSerieData({
chartConfig: chartConfig,
serieConfig: serieConfig,
chartData: chartData,
orderBySerieNumber: orderBySerieNumber,
categories: categories,
serieIndex: i
});
seriesData.push(serieData);
}
return seriesData;
},
/**
* Applies type specific config.
* @protected
* @virtual
* @param {Object} config Chart configuration object.
*/
applyChartConfigByType: function(config) {
var seriesConfig = config.series;
for (var i = seriesConfig.length - 1; i >= 0; i--) {
var serieConfig = seriesConfig[i];
var chartType = serieConfig.type;
switch (chartType) {
case Terrasoft.ChartType.Pipeline:
var chartConfig = {
plotOptions: {},
customConfig: {
legend: {
labelFormat: "{name} ({y})"
}
}
};
chartConfig.plotOptions[chartType] = {
dataLabels: {
format: "{point.name} ({point.y})"
}
};
Ext.merge(config, chartConfig);
break;
case Terrasoft.ChartType.Gauge:
this.applyGaugeChartConfig(config);
break;
default:
break;
}
}
},
/**
* @inheritdoc
* @protected
* @overridden
*/
convertValue: function(value, type, formatConfig) {
var caption = null;
if (formatConfig && formatConfig.dateTime) {
caption = this.convertDateTimeValue(value, formatConfig);
} else {
caption = this.callParent(arguments);
}
if (Ext.isEmpty(caption)) {
caption = Terrasoft.LS.MobileChartDashboardItemEmptyValueText;
}
return caption;
},
//endregion
//region Methods: Public
/**
* Generates chart dashboard item config.
* @param {Object} config Configuration object.
* @param {Object} config.chartConfig Chart config.
* @param {Object} config.chartData Chart data.
* @return {Object} Chart dashboard item config.
*/
generate: function(config) {
var chartConfig = this.chartConfig = config.chartConfig;
this.chartData = config.chartData;
var categories = [];
var mainSerieConfig = chartConfig.seriesConfig[0];
var seriesData = this.getSeriesConfig(categories);
var result = {
colors: this.getColors(),
title: {
text: ""
},
series: seriesData,
xAxis: {
categories: categories,
labels: {
autoRotation: [-10, -20, -30, -40, -50, -60, -70, -80, -90]
},
title: {
text: chartConfig.xAxisDefaultCaption
}
},
yAxis: {
title: {
text: chartConfig.yAxisDefaultCaption || (mainSerieConfig.yAxis && mainSerieConfig.yAxis.caption)
}
}
};
this.applyChartConfigByType(result);
return result;
}
// endregion
}); | random_line_split |
|
MobileChartDashboardItemConfigGenerator.Mobile.js | /**
* @class Terrasoft.configuration.ChartDashboardItemConfigGenerator
* Config generator of chart dashboard item.
*/
Ext.define("Terrasoft.configuration.ChartDashboardItemConfigGenerator", {
extend: "Terrasoft.BaseDashboardItemConfigGenerator",
alternateClassName: "Terrasoft.ChartDashboardItemConfigGenerator",
//region Properties: Private
/**
* @private
*/
chartConfig: null,
/**
* @private
*/
chartData: null,
//endregion
//region Methods: Private
/**
* @private
*/
getGaugeChartPlotBandsConfig: function(chartConfig) {
return [
{
from: chartConfig.min,
to: chartConfig.middleFrom,
color: chartConfig.orderDirection === 2 ?
Terrasoft.DashboardGaugeScaleColor.max : Terrasoft.DashboardGaugeScaleColor.min
},
{
from: chartConfig.middleFrom,
to: chartConfig.middleTo,
color: Terrasoft.DashboardGaugeScaleColor.middle
},
{
from: chartConfig.middleTo,
to: chartConfig.max,
color: chartConfig.orderDirection === 2 ?
Terrasoft.DashboardGaugeScaleColor.min : Terrasoft.DashboardGaugeScaleColor.max
}
];
},
/**
* @private
*/
applyGaugeChartConfig: function(config) {
var chartConfig = this.chartConfig;
var plotBands = this.getGaugeChartPlotBandsConfig(chartConfig);
var dataLabelColor;
var dataValue = config.series[0].data[0];
for (var i = 0, ln = plotBands.length; i < ln; i++) {
var plotBand = plotBands[i];
if ((dataValue >= plotBand.from && dataValue <= plotBand.to) ||
(plotBand.to === chartConfig.max && dataValue > plotBand.to) ||
(plotBand.from === chartConfig.min && dataValue < plotBand.from)) {
dataLabelColor = plotBand.color;
break;
}
}
Ext.merge(config, {
plotOptions: {
series: {
dataLabels: {
style: {
color: dataLabelColor
}
}
}
},
yAxis: {
min: chartConfig.min,
max: chartConfig.max,
tickPositions: [chartConfig.min, chartConfig.middleFrom, chartConfig.middleTo, chartConfig.max],
plotBands: plotBands
}
});
},
/**
* @private
*/
getSortableValueByItem: function(item, config) {
var sortableValue = null;
if (config.orderBy === Terrasoft.DashboardOrderBy.GroupByField) {
var formatConfig = config.formatConfig;
if (formatConfig && formatConfig.dateTime) {
var date = this.getDateByFormat(formatConfig.dateTime, item.xAxis);
sortableValue = date && date.getTime();
} else {
sortableValue = this.getSortableValue(item.xAxis, config.xAxisType);
}
} else {
sortableValue = this.getSortableValue(item.yAxis[config.orderBySerieNumber], config.yAxisType);
}
return sortableValue;
},
/**
* @private
*/
sortChartData: function(chartData, config) {
var coefficient = (config.orderDirection === Terrasoft.OrderTypes.DESC) ? -1 : 1;
chartData.sort(function(itemA, itemB) {
var valueA = this.getSortableValueByItem(itemA, config);
var valueB = this.getSortableValueByItem(itemB, config);
if (valueA === valueB) {
return 0;
} else if (valueB === null) {
return -1;
} else if (valueA === null) {
return 1;
} else if (valueA > valueB) {
return coefficient;
} else if (valueA < valueB) {
return -coefficient;
} else {
return 0;
}
}.bind(this));
},
/**
* Returns time value according to format.
* @private
*/
getDateByFormat: function(datetimeFormat, value) {
var formats = datetimeFormat.split(";");
var year = 0;
var month = 0;
var day = 1;
var hour = 0;
var extraDays = 0;
for (var i = 0, ln = formats.length; i < ln; i++) {
var format = formats[i];
var valueByFormat = parseInt(value[format], 10);
if (!Ext.isNumber(valueByFormat)) {
return null;
}
switch (format) {
case "Year":
year = valueByFormat;
break;
case "Month":
month = valueByFormat - 1;
break;
case "Day":
day = valueByFormat;
break;
case "Hour":
hour = valueByFormat;
break;
case "Week":
extraDays += 7 * valueByFormat;
break;
default:
return null;
}
}
return new Date(year, month, day + extraDays, hour, 0);
},
/**
* @private
*/
getSerieData: function(config) {
var chartConfig = config.chartConfig;
var serieConfig = config.serieConfig;
var chartData = config.chartData;
var serieData = {
type: serieConfig.type,
name: (serieConfig.yAxis && serieConfig.yAxis.caption) ||
chartConfig.yAxisDefaultCaption || serieConfig.schemaCaption,
color: Terrasoft.DashboardStyleColor[serieConfig.style],
data: []
};
if (Ext.isObject(serieConfig.xAxis)) {
this.sortChartData(chartData, {
orderDirection: chartConfig.orderDirection,
orderBy: chartConfig.orderBy,
orderBySerieNumber: config.orderBySerieNumber,
xAxisType: serieConfig.xAxis.dataValueType,
yAxisType: serieConfig.yAxis.dataValueType,
formatConfig: {
dateTime: serieConfig.xAxis.dateTimeFormat
}
});
}
for (var j = 0, dataLn = chartData.length; j < dataLn; j++) {
var dataItem = chartData[j];
var dataValue = dataItem;
if (Ext.isObject(dataItem)) {
if (config.serieIndex === 0) {
config.categories.push(this.convertValue(dataItem.xAxis, serieConfig.xAxis.dataValueType,
{dateTime: serieConfig.xAxis.dateTimeFormat}));
}
dataValue = {
name: config.categories[j],
y: parseFloat(dataItem.yAxis[config.serieIndex], 10)
};
}
serieData.data.push(dataValue);
}
return serieData;
},
/**
* @private
*/
convertDateTimeValue: function(value, formatConfig) {
if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.DayMonth) {
return Ext.Date.format(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.CurrentUserInfo.shortMonthDayFormat);
} else if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.MonthYear) {
return Ext.Date.format(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.CurrentUserInfo.shortYearMonthFormat);
} else if (formatConfig.dateTime === Terrasoft.DashboardDateTimeFormatType.DayMonthYear) {
return Terrasoft.String.getTypedValue(this.getDateByFormat(formatConfig.dateTime, value),
Terrasoft.DataValueType.Date);
} else {
var formats = formatConfig.dateTime.split(";");
var result = [];
var valueIsNotEmpty = false;
for (var k = 0, ln = formats.length; k < ln; k++) {
var format = formats[k];
if (!Ext.isEmpty(value[format])) {
valueIsNotEmpty = true;
}
result.push(value[format]);
}
if (valueIsNotEmpty) {
return result.join(Terrasoft.LS.MobileChartDashboardItemDatePartSeparator);
}
}
return null;
},
//endregion
//region Methods: Protected
/**
* Gets default colors for dashboard.
* @protected
* @virtual
*/
getColors: function() {
var colors = [];
for (var colorName in Terrasoft.DashboardStyleColor) {
if (Terrasoft.DashboardStyleColor.hasOwnProperty(colorName)) {
colors.push(Terrasoft.DashboardStyleColor[colorName]);
}
}
return colors;
},
/**
* Gets configuration object for series.
* @protected
* @virtual
* @param {String[]} categories Y Axis values.
* @returns {Object[]} Array of serie.
*/
getSeriesConfig: function(categories) {
var chartConfig = this.chartConfig;
var chartData = this.chartData;
var seriesData = [];
var seriesConfig = chartConfig.seriesConfig;
var orderBySerieNumber = -1;
if (chartConfig.orderBy &&
Terrasoft.String.startsWith(chartConfig.orderBy, Terrasoft.DashboardOrderBy.ChartEntityColumn)) {
orderBySerieNumber = chartConfig.orderBy.split(":")[1];
}
for (var i = 0, ln = seriesConfig.length; i < ln; i++) {
var serieConfig = seriesConfig[i];
var serieData = this.getSerieData({
chartConfig: chartConfig,
serieConfig: serieConfig,
chartData: chartData,
orderBySerieNumber: orderBySerieNumber,
categories: categories,
serieIndex: i
});
seriesData.push(serieData);
}
return seriesData;
},
/**
* Applies type specific config.
* @protected
* @virtual
* @param {Object} config Chart configuration object.
*/
applyChartConfigByType: function(config) {
var seriesConfig = config.series;
for (var i = seriesConfig.length - 1; i >= 0; i--) {
var serieConfig = seriesConfig[i];
var chartType = serieConfig.type;
switch (chartType) {
case Terrasoft.ChartType.Pipeline:
var chartConfig = {
plotOptions: {},
customConfig: {
legend: {
labelFormat: "{name} ({y})"
}
}
};
chartConfig.plotOptions[chartType] = {
dataLabels: {
format: "{point.name} ({point.y})"
}
};
Ext.merge(config, chartConfig);
break;
case Terrasoft.ChartType.Gauge:
this.applyGaugeChartConfig(config);
break;
default:
break;
}
}
},
/**
* @inheritdoc
* @protected
* @overridden
*/
convertValue: function(value, type, formatConfig) {
var caption = null;
if (formatConfig && formatConfig.dateTime) {
caption = this.convertDateTimeValue(value, formatConfig);
} else {
caption = this.callParent(arguments);
}
if (Ext.isEmpty(caption)) {
| return caption;
},
//endregion
//region Methods: Public
/**
* Generates chart dashboard item config.
* @param {Object} config Configuration object.
* @param {Object} config.chartConfig Chart config.
* @param {Object} config.chartData Chart data.
* @return {Object} Chart dashboard item config.
*/
generate: function(config) {
var chartConfig = this.chartConfig = config.chartConfig;
this.chartData = config.chartData;
var categories = [];
var mainSerieConfig = chartConfig.seriesConfig[0];
var seriesData = this.getSeriesConfig(categories);
var result = {
colors: this.getColors(),
title: {
text: ""
},
series: seriesData,
xAxis: {
categories: categories,
labels: {
autoRotation: [-10, -20, -30, -40, -50, -60, -70, -80, -90]
},
title: {
text: chartConfig.xAxisDefaultCaption
}
},
yAxis: {
title: {
text: chartConfig.yAxisDefaultCaption || (mainSerieConfig.yAxis && mainSerieConfig.yAxis.caption)
}
}
};
this.applyChartConfigByType(result);
return result;
}
// endregion
});
| caption = Terrasoft.LS.MobileChartDashboardItemEmptyValueText;
}
| conditional_block |
robots.py | '''
Stuff related to robots.txt processing
'''
import asyncio
import time
import random
import json
import logging
import urllib.parse
import hashlib
import re
import reppy.robots
#import magic
from .urls import URL
from . import stats
from . import fetcher
from . import config
from . import post_fetch
from . import content
LOGGER = logging.getLogger(__name__)
def | (b):
if b[:3] == b'\xef\xbb\xbf': # utf-8, e.g. microsoft.com's sitemaps
return b[3:]
elif b[:2] in (b'\xfe\xff', b'\xff\xfe'): # utf-16 BE and LE, respectively
return b[2:]
else:
return b
def robots_facets(text, robotname, json_log):
user_agents = re.findall(r'^ \s* User-Agent: \s* (.*) \s* (?:\#.*)?', text, re.X | re.I | re.M)
action_lines = len(re.findall(r'^ \s* (allow|disallow|crawl-delay):', text, re.X | re.I | re.M))
user_agents = list(set([u.lower() for u in user_agents]))
mentions_us = robotname.lower() in user_agents
if mentions_us:
json_log['mentions_us'] = True
if user_agents:
json_log['user_agents'] = len(user_agents)
if action_lines:
json_log['action_lines'] = action_lines
if text:
json_log['size'] = len(text)
def is_plausible_robots(body_bytes):
'''
Did you know that some sites have a robots.txt that's a 100 megabyte video file?
file magic mimetype is 'text' or similar -- too expensive, 3ms per call
'''
if body_bytes.startswith(b'<'):
return False, 'robots appears to be html or xml'
if len(body_bytes) > 1000000:
return False, 'robots is too big'
return True, ''
class Robots:
def __init__(self, robotname, session, datalayer):
self.robotname = robotname
self.session = session
self.datalayer = datalayer
self.max_tries = config.read('Robots', 'MaxTries')
self.max_robots_page_size = int(config.read('Robots', 'MaxRobotsPageSize'))
self.in_progress = set()
# magic is 3 milliseconds per call, too expensive to use
#self.magic = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
self.robotslog = config.read('Logging', 'Robotslog')
if self.robotslog:
self.robotslogfd = open(self.robotslog, 'a')
else:
self.robotslogfd = None
def __del__(self):
#if self.magic is not None:
# self.magic.close()
if self.robotslogfd:
self.robotslogfd.close()
def check_cached(self, url, quiet=False):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cached_only hit', 1)
except KeyError:
stats.stats_sum('robots cached_only miss', 1)
return True
return self._check(url, schemenetloc, robots, quiet=quiet)
async def check(self, url, dns_entry=None, seed_host=None, crawler=None,
get_kwargs={}):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cache hit', 1)
except KeyError:
robots = await self.fetch_robots(schemenetloc, dns_entry, crawler,
seed_host=seed_host, get_kwargs=get_kwargs)
return self._check(url, schemenetloc, robots)
def _check(self, url, schemenetloc, robots, quiet=False):
if url.urlsplit.path:
pathplus = url.urlsplit.path
else:
pathplus = '/'
if url.urlsplit.query:
pathplus += '?' + url.urlsplit.query
if robots is None:
if quiet:
return 'no robots'
LOGGER.debug('no robots info known for %s, failing %s%s', schemenetloc, schemenetloc, pathplus)
self.jsonlog(schemenetloc, {'error': 'no robots info known', 'action': 'denied'})
stats.stats_sum('robots denied - robots info not known', 1)
stats.stats_sum('robots denied', 1)
return 'no robots'
me = self.robotname
with stats.record_burn('robots is_allowed', url=schemenetloc):
if pathplus.startswith('//') and ':' in pathplus:
pathplus = 'htp://' + pathplus
check = robots.allowed(pathplus, me)
if check:
check = 'allowed'
else:
check = 'denied'
google_check = robots.allowed(pathplus, 'googlebot')
if me != '*':
generic_check = robots.allowed(pathplus, '*')
else:
generic_check = None
if quiet:
return check
# just logging from here on down
if check == 'allowed':
LOGGER.debug('robots allowed for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots allowed', 1)
return check
LOGGER.debug('robots denied for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots denied', 1)
json_log = {'url': pathplus, 'action': 'denied'}
if google_check:
json_log['google_action'] = 'allowed'
stats.stats_sum('robots denied - but googlebot allowed', 1)
if generic_check is not None and generic_check:
json_log['generic_action'] = 'allowed'
stats.stats_sum('robots denied - but * allowed', 1)
self.jsonlog(schemenetloc, json_log)
return check
def _cache_empty_robots(self, schemenetloc, final_schemenetloc):
parsed = reppy.robots.Robots.parse('', '')
self.datalayer.cache_robots(schemenetloc, parsed)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, parsed)
self.in_progress.discard(schemenetloc)
return parsed
async def fetch_robots(self, schemenetloc, dns_entry, crawler,
seed_host=None, get_kwargs={}):
'''
https://developers.google.com/search/reference/robots_txt
3xx redir == follow up to 5 hops, then consider it a 404.
4xx errors == no crawl restrictions
5xx errors == full disallow. fast retry if 503.
if site appears to return 5xx for 404, then 5xx is treated as a 404
'''
url = URL(schemenetloc + '/robots.txt')
# We might enter this routine multiple times, so, sleep if we aren't the first
if schemenetloc in self.in_progress:
while schemenetloc in self.in_progress:
LOGGER.debug('sleeping because someone beat me to the robots punch')
stats.stats_sum('robots sleep for collision', 1)
with stats.coroutine_state('robots collision sleep'):
interval = random.uniform(0.2, 0.3)
await asyncio.sleep(interval)
# at this point robots might be in the cache... or not.
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
except KeyError:
robots = None
if robots is not None:
return robots
# ok, so it's not in the cache -- and the other guy's fetch failed.
# if we just fell through, there would be a big race.
# treat this as a "no data" failure.
LOGGER.debug('some other fetch of robots has failed.')
stats.stats_sum('robots sleep then cache miss', 1)
return None
self.in_progress.add(schemenetloc)
f = await fetcher.fetch(url, self.session, max_page_size=self.max_robots_page_size,
allow_redirects=True, max_redirects=5, stats_prefix='robots ',
get_kwargs=get_kwargs)
json_log = {'action': 'fetch', 'time': time.time()}
if f.ip is not None:
json_log['ip'] = f.ip
if f.last_exception:
if f.last_exception.startswith('ClientError: TooManyRedirects'):
error = 'got too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, None)
else:
json_log['error'] = 'max tries exceeded, final exception is: ' + f.last_exception
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
if f.response.history:
redir_history = [str(h.url) for h in f.response.history]
redir_history.append(str(f.response.url))
json_log['redir_history'] = redir_history
stats.stats_sum('robots fetched', 1)
# If the url was redirected to a different host/robots.txt, let's cache that final host too
final_url = str(f.response.url) # YARL object
final_schemenetloc = None
if final_url != url.url:
final_parts = urllib.parse.urlsplit(final_url)
if final_parts.path == '/robots.txt':
final_schemenetloc = final_parts.scheme + '://' + final_parts.netloc
json_log['final_host'] = final_schemenetloc
status = f.response.status
json_log['status'] = status
json_log['t_first_byte'] = f.t_first_byte
if str(status).startswith('3') or str(status).startswith('4'):
if status >= 400:
error = 'got a 4xx, treating as empty robots'
else:
error = 'too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
if str(status).startswith('5'):
json_log['error'] = 'got a 5xx, treating as deny' # same as google
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
# we got a 2xx, so let's use the final headers to facet the final server
if dns_entry:
host_geoip = dns_entry[3]
else:
host_geoip = {}
if final_schemenetloc:
# if the hostname is the same and only the scheme is different, that's ok
# TODO: use URL.hostname
if ((final_url.replace('https://', 'http://', 1) != url.url and
final_url.replace('http://', 'https://', 1) != url.url)):
host_geoip = {} # the passed-in one is for the initial server
post_fetch.post_robots_txt(f, final_url, host_geoip, json_log['time'], crawler, seed_host=seed_host)
body_bytes = f.body_bytes
content_encoding = f.response.headers.get('content-encoding', 'identity')
if content_encoding != 'identity':
body_bytes = content.decompress(f.body_bytes, content_encoding, url=final_url)
with stats.record_burn('robots sha1'):
sha1 = 'sha1:' + hashlib.sha1(body_bytes).hexdigest()
json_log['checksum'] = sha1
body_bytes = strip_bom(body_bytes).lstrip()
plausible, message = is_plausible_robots(body_bytes)
if not plausible:
# policy: treat as empty
json_log['error'] = 'saw an implausible robots.txt, treating as empty'
json_log['implausible'] = message
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
try:
body = body_bytes.decode(encoding='utf8', errors='replace')
except asyncio.CancelledError:
raise
except Exception as e:
# log as surprising, also treat like a fetch error
json_log['error'] = 'robots body decode threw a surprising exception: ' + repr(e)
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
robots_facets(body, self.robotname, json_log)
with stats.record_burn('robots parse', url=schemenetloc):
robots = reppy.robots.Robots.parse('', body)
with stats.record_burn('robots is_allowed', url=schemenetloc):
check = robots.allowed('/', '*')
if check == 'denied':
json_log['generic_deny_slash'] = True
check = robots.allowed('/', 'googlebot')
json_log['google_deny_slash'] = check == 'denied'
self.datalayer.cache_robots(schemenetloc, robots)
self.in_progress.discard(schemenetloc)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, robots)
# we did not set this but we'll discard it anyway
self.in_progress.discard(final_schemenetloc)
sitemaps = list(robots.sitemaps)
if sitemaps:
json_log['sitemap_lines'] = len(sitemaps)
self.jsonlog(schemenetloc, json_log)
return robots
def jsonlog(self, schemenetloc, json_log):
if self.robotslogfd:
json_log['host'] = schemenetloc
print(json.dumps(json_log, sort_keys=True), file=self.robotslogfd)
| strip_bom | identifier_name |
robots.py | '''
Stuff related to robots.txt processing
'''
import asyncio
import time
import random
import json
import logging
import urllib.parse
import hashlib
import re
import reppy.robots
#import magic
from .urls import URL
from . import stats
from . import fetcher
from . import config
from . import post_fetch
from . import content
LOGGER = logging.getLogger(__name__)
def strip_bom(b):
if b[:3] == b'\xef\xbb\xbf': # utf-8, e.g. microsoft.com's sitemaps
return b[3:]
elif b[:2] in (b'\xfe\xff', b'\xff\xfe'): # utf-16 BE and LE, respectively
return b[2:]
else:
return b
def robots_facets(text, robotname, json_log):
user_agents = re.findall(r'^ \s* User-Agent: \s* (.*) \s* (?:\#.*)?', text, re.X | re.I | re.M)
action_lines = len(re.findall(r'^ \s* (allow|disallow|crawl-delay):', text, re.X | re.I | re.M))
user_agents = list(set([u.lower() for u in user_agents]))
mentions_us = robotname.lower() in user_agents
if mentions_us:
json_log['mentions_us'] = True
if user_agents:
json_log['user_agents'] = len(user_agents)
if action_lines:
json_log['action_lines'] = action_lines
if text:
json_log['size'] = len(text)
def is_plausible_robots(body_bytes):
'''
Did you know that some sites have a robots.txt that's a 100 megabyte video file?
file magic mimetype is 'text' or similar -- too expensive, 3ms per call
'''
if body_bytes.startswith(b'<'):
return False, 'robots appears to be html or xml'
if len(body_bytes) > 1000000:
return False, 'robots is too big'
return True, ''
class Robots:
def __init__(self, robotname, session, datalayer):
self.robotname = robotname
self.session = session
self.datalayer = datalayer
self.max_tries = config.read('Robots', 'MaxTries')
self.max_robots_page_size = int(config.read('Robots', 'MaxRobotsPageSize'))
self.in_progress = set()
# magic is 3 milliseconds per call, too expensive to use
#self.magic = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
self.robotslog = config.read('Logging', 'Robotslog')
if self.robotslog:
self.robotslogfd = open(self.robotslog, 'a')
else:
self.robotslogfd = None
def __del__(self):
#if self.magic is not None:
# self.magic.close()
if self.robotslogfd:
self.robotslogfd.close()
def check_cached(self, url, quiet=False):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cached_only hit', 1)
except KeyError:
stats.stats_sum('robots cached_only miss', 1)
return True
return self._check(url, schemenetloc, robots, quiet=quiet)
async def check(self, url, dns_entry=None, seed_host=None, crawler=None,
get_kwargs={}):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cache hit', 1)
except KeyError:
robots = await self.fetch_robots(schemenetloc, dns_entry, crawler,
seed_host=seed_host, get_kwargs=get_kwargs)
return self._check(url, schemenetloc, robots)
def _check(self, url, schemenetloc, robots, quiet=False):
if url.urlsplit.path:
pathplus = url.urlsplit.path
else:
pathplus = '/'
if url.urlsplit.query:
pathplus += '?' + url.urlsplit.query
if robots is None:
if quiet:
return 'no robots'
LOGGER.debug('no robots info known for %s, failing %s%s', schemenetloc, schemenetloc, pathplus)
self.jsonlog(schemenetloc, {'error': 'no robots info known', 'action': 'denied'})
stats.stats_sum('robots denied - robots info not known', 1)
stats.stats_sum('robots denied', 1)
return 'no robots'
me = self.robotname
with stats.record_burn('robots is_allowed', url=schemenetloc):
if pathplus.startswith('//') and ':' in pathplus:
pathplus = 'htp://' + pathplus
check = robots.allowed(pathplus, me)
if check:
check = 'allowed'
else:
check = 'denied'
google_check = robots.allowed(pathplus, 'googlebot')
if me != '*':
generic_check = robots.allowed(pathplus, '*')
else:
generic_check = None
if quiet:
return check
# just logging from here on down
if check == 'allowed':
LOGGER.debug('robots allowed for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots allowed', 1)
return check
LOGGER.debug('robots denied for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots denied', 1)
json_log = {'url': pathplus, 'action': 'denied'}
if google_check:
json_log['google_action'] = 'allowed'
stats.stats_sum('robots denied - but googlebot allowed', 1)
if generic_check is not None and generic_check:
json_log['generic_action'] = 'allowed'
stats.stats_sum('robots denied - but * allowed', 1)
self.jsonlog(schemenetloc, json_log)
return check
def _cache_empty_robots(self, schemenetloc, final_schemenetloc):
parsed = reppy.robots.Robots.parse('', '')
self.datalayer.cache_robots(schemenetloc, parsed)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, parsed)
self.in_progress.discard(schemenetloc)
return parsed
async def fetch_robots(self, schemenetloc, dns_entry, crawler,
seed_host=None, get_kwargs={}):
'''
https://developers.google.com/search/reference/robots_txt
3xx redir == follow up to 5 hops, then consider it a 404.
4xx errors == no crawl restrictions
5xx errors == full disallow. fast retry if 503.
if site appears to return 5xx for 404, then 5xx is treated as a 404
'''
url = URL(schemenetloc + '/robots.txt')
# We might enter this routine multiple times, so, sleep if we aren't the first
if schemenetloc in self.in_progress:
while schemenetloc in self.in_progress:
LOGGER.debug('sleeping because someone beat me to the robots punch')
stats.stats_sum('robots sleep for collision', 1)
with stats.coroutine_state('robots collision sleep'):
interval = random.uniform(0.2, 0.3)
await asyncio.sleep(interval)
# at this point robots might be in the cache... or not.
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
except KeyError:
robots = None
if robots is not None:
return robots
# ok, so it's not in the cache -- and the other guy's fetch failed.
# if we just fell through, there would be a big race.
# treat this as a "no data" failure.
LOGGER.debug('some other fetch of robots has failed.')
stats.stats_sum('robots sleep then cache miss', 1)
return None
self.in_progress.add(schemenetloc)
f = await fetcher.fetch(url, self.session, max_page_size=self.max_robots_page_size,
allow_redirects=True, max_redirects=5, stats_prefix='robots ',
get_kwargs=get_kwargs)
json_log = {'action': 'fetch', 'time': time.time()}
if f.ip is not None:
json_log['ip'] = f.ip
if f.last_exception:
if f.last_exception.startswith('ClientError: TooManyRedirects'):
error = 'got too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, None)
else:
json_log['error'] = 'max tries exceeded, final exception is: ' + f.last_exception
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
if f.response.history:
redir_history = [str(h.url) for h in f.response.history]
redir_history.append(str(f.response.url))
json_log['redir_history'] = redir_history
stats.stats_sum('robots fetched', 1)
# If the url was redirected to a different host/robots.txt, let's cache that final host too
final_url = str(f.response.url) # YARL object
final_schemenetloc = None
if final_url != url.url:
final_parts = urllib.parse.urlsplit(final_url)
if final_parts.path == '/robots.txt':
final_schemenetloc = final_parts.scheme + '://' + final_parts.netloc
json_log['final_host'] = final_schemenetloc
status = f.response.status
json_log['status'] = status
json_log['t_first_byte'] = f.t_first_byte
if str(status).startswith('3') or str(status).startswith('4'):
if status >= 400:
error = 'got a 4xx, treating as empty robots'
else:
error = 'too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
if str(status).startswith('5'):
json_log['error'] = 'got a 5xx, treating as deny' # same as google
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
# we got a 2xx, so let's use the final headers to facet the final server
if dns_entry:
host_geoip = dns_entry[3]
else:
host_geoip = {}
if final_schemenetloc:
# if the hostname is the same and only the scheme is different, that's ok
# TODO: use URL.hostname
if ((final_url.replace('https://', 'http://', 1) != url.url and
final_url.replace('http://', 'https://', 1) != url.url)):
host_geoip = {} # the passed-in one is for the initial server
post_fetch.post_robots_txt(f, final_url, host_geoip, json_log['time'], crawler, seed_host=seed_host)
body_bytes = f.body_bytes
content_encoding = f.response.headers.get('content-encoding', 'identity')
if content_encoding != 'identity':
body_bytes = content.decompress(f.body_bytes, content_encoding, url=final_url)
with stats.record_burn('robots sha1'):
sha1 = 'sha1:' + hashlib.sha1(body_bytes).hexdigest()
json_log['checksum'] = sha1
body_bytes = strip_bom(body_bytes).lstrip()
plausible, message = is_plausible_robots(body_bytes)
if not plausible:
# policy: treat as empty
json_log['error'] = 'saw an implausible robots.txt, treating as empty'
json_log['implausible'] = message
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
try:
body = body_bytes.decode(encoding='utf8', errors='replace')
except asyncio.CancelledError:
raise
except Exception as e:
# log as surprising, also treat like a fetch error
json_log['error'] = 'robots body decode threw a surprising exception: ' + repr(e)
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
robots_facets(body, self.robotname, json_log)
with stats.record_burn('robots parse', url=schemenetloc):
robots = reppy.robots.Robots.parse('', body)
with stats.record_burn('robots is_allowed', url=schemenetloc):
check = robots.allowed('/', '*')
if check == 'denied':
json_log['generic_deny_slash'] = True
check = robots.allowed('/', 'googlebot')
json_log['google_deny_slash'] = check == 'denied'
self.datalayer.cache_robots(schemenetloc, robots)
self.in_progress.discard(schemenetloc)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, robots)
# we did not set this but we'll discard it anyway
self.in_progress.discard(final_schemenetloc)
sitemaps = list(robots.sitemaps)
if sitemaps:
json_log['sitemap_lines'] = len(sitemaps)
self.jsonlog(schemenetloc, json_log)
return robots
def jsonlog(self, schemenetloc, json_log): | json_log['host'] = schemenetloc
print(json.dumps(json_log, sort_keys=True), file=self.robotslogfd) | if self.robotslogfd: | random_line_split |
robots.py | '''
Stuff related to robots.txt processing
'''
import asyncio
import time
import random
import json
import logging
import urllib.parse
import hashlib
import re
import reppy.robots
#import magic
from .urls import URL
from . import stats
from . import fetcher
from . import config
from . import post_fetch
from . import content
LOGGER = logging.getLogger(__name__)
def strip_bom(b):
if b[:3] == b'\xef\xbb\xbf': # utf-8, e.g. microsoft.com's sitemaps
return b[3:]
elif b[:2] in (b'\xfe\xff', b'\xff\xfe'): # utf-16 BE and LE, respectively
return b[2:]
else:
return b
def robots_facets(text, robotname, json_log):
user_agents = re.findall(r'^ \s* User-Agent: \s* (.*) \s* (?:\#.*)?', text, re.X | re.I | re.M)
action_lines = len(re.findall(r'^ \s* (allow|disallow|crawl-delay):', text, re.X | re.I | re.M))
user_agents = list(set([u.lower() for u in user_agents]))
mentions_us = robotname.lower() in user_agents
if mentions_us:
json_log['mentions_us'] = True
if user_agents:
json_log['user_agents'] = len(user_agents)
if action_lines:
json_log['action_lines'] = action_lines
if text:
json_log['size'] = len(text)
def is_plausible_robots(body_bytes):
'''
Did you know that some sites have a robots.txt that's a 100 megabyte video file?
file magic mimetype is 'text' or similar -- too expensive, 3ms per call
'''
if body_bytes.startswith(b'<'):
return False, 'robots appears to be html or xml'
if len(body_bytes) > 1000000:
return False, 'robots is too big'
return True, ''
class Robots:
def __init__(self, robotname, session, datalayer):
self.robotname = robotname
self.session = session
self.datalayer = datalayer
self.max_tries = config.read('Robots', 'MaxTries')
self.max_robots_page_size = int(config.read('Robots', 'MaxRobotsPageSize'))
self.in_progress = set()
# magic is 3 milliseconds per call, too expensive to use
#self.magic = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
self.robotslog = config.read('Logging', 'Robotslog')
if self.robotslog:
self.robotslogfd = open(self.robotslog, 'a')
else:
self.robotslogfd = None
def __del__(self):
#if self.magic is not None:
# self.magic.close()
if self.robotslogfd:
self.robotslogfd.close()
def check_cached(self, url, quiet=False):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cached_only hit', 1)
except KeyError:
stats.stats_sum('robots cached_only miss', 1)
return True
return self._check(url, schemenetloc, robots, quiet=quiet)
async def check(self, url, dns_entry=None, seed_host=None, crawler=None,
get_kwargs={}):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cache hit', 1)
except KeyError:
robots = await self.fetch_robots(schemenetloc, dns_entry, crawler,
seed_host=seed_host, get_kwargs=get_kwargs)
return self._check(url, schemenetloc, robots)
def _check(self, url, schemenetloc, robots, quiet=False):
if url.urlsplit.path:
pathplus = url.urlsplit.path
else:
pathplus = '/'
if url.urlsplit.query:
pathplus += '?' + url.urlsplit.query
if robots is None:
if quiet:
return 'no robots'
LOGGER.debug('no robots info known for %s, failing %s%s', schemenetloc, schemenetloc, pathplus)
self.jsonlog(schemenetloc, {'error': 'no robots info known', 'action': 'denied'})
stats.stats_sum('robots denied - robots info not known', 1)
stats.stats_sum('robots denied', 1)
return 'no robots'
me = self.robotname
with stats.record_burn('robots is_allowed', url=schemenetloc):
if pathplus.startswith('//') and ':' in pathplus:
pathplus = 'htp://' + pathplus
check = robots.allowed(pathplus, me)
if check:
check = 'allowed'
else:
check = 'denied'
google_check = robots.allowed(pathplus, 'googlebot')
if me != '*':
generic_check = robots.allowed(pathplus, '*')
else:
generic_check = None
if quiet:
return check
# just logging from here on down
if check == 'allowed':
LOGGER.debug('robots allowed for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots allowed', 1)
return check
LOGGER.debug('robots denied for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots denied', 1)
json_log = {'url': pathplus, 'action': 'denied'}
if google_check:
json_log['google_action'] = 'allowed'
stats.stats_sum('robots denied - but googlebot allowed', 1)
if generic_check is not None and generic_check:
json_log['generic_action'] = 'allowed'
stats.stats_sum('robots denied - but * allowed', 1)
self.jsonlog(schemenetloc, json_log)
return check
def _cache_empty_robots(self, schemenetloc, final_schemenetloc):
parsed = reppy.robots.Robots.parse('', '')
self.datalayer.cache_robots(schemenetloc, parsed)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, parsed)
self.in_progress.discard(schemenetloc)
return parsed
async def fetch_robots(self, schemenetloc, dns_entry, crawler,
seed_host=None, get_kwargs={}):
'''
https://developers.google.com/search/reference/robots_txt
3xx redir == follow up to 5 hops, then consider it a 404.
4xx errors == no crawl restrictions
5xx errors == full disallow. fast retry if 503.
if site appears to return 5xx for 404, then 5xx is treated as a 404
'''
url = URL(schemenetloc + '/robots.txt')
# We might enter this routine multiple times, so, sleep if we aren't the first
if schemenetloc in self.in_progress:
while schemenetloc in self.in_progress:
LOGGER.debug('sleeping because someone beat me to the robots punch')
stats.stats_sum('robots sleep for collision', 1)
with stats.coroutine_state('robots collision sleep'):
interval = random.uniform(0.2, 0.3)
await asyncio.sleep(interval)
# at this point robots might be in the cache... or not.
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
except KeyError:
robots = None
if robots is not None:
return robots
# ok, so it's not in the cache -- and the other guy's fetch failed.
# if we just fell through, there would be a big race.
# treat this as a "no data" failure.
LOGGER.debug('some other fetch of robots has failed.')
stats.stats_sum('robots sleep then cache miss', 1)
return None
self.in_progress.add(schemenetloc)
f = await fetcher.fetch(url, self.session, max_page_size=self.max_robots_page_size,
allow_redirects=True, max_redirects=5, stats_prefix='robots ',
get_kwargs=get_kwargs)
json_log = {'action': 'fetch', 'time': time.time()}
if f.ip is not None:
json_log['ip'] = f.ip
if f.last_exception:
if f.last_exception.startswith('ClientError: TooManyRedirects'):
error = 'got too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, None)
else:
|
if f.response.history:
redir_history = [str(h.url) for h in f.response.history]
redir_history.append(str(f.response.url))
json_log['redir_history'] = redir_history
stats.stats_sum('robots fetched', 1)
# If the url was redirected to a different host/robots.txt, let's cache that final host too
final_url = str(f.response.url) # YARL object
final_schemenetloc = None
if final_url != url.url:
final_parts = urllib.parse.urlsplit(final_url)
if final_parts.path == '/robots.txt':
final_schemenetloc = final_parts.scheme + '://' + final_parts.netloc
json_log['final_host'] = final_schemenetloc
status = f.response.status
json_log['status'] = status
json_log['t_first_byte'] = f.t_first_byte
if str(status).startswith('3') or str(status).startswith('4'):
if status >= 400:
error = 'got a 4xx, treating as empty robots'
else:
error = 'too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
if str(status).startswith('5'):
json_log['error'] = 'got a 5xx, treating as deny' # same as google
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
# we got a 2xx, so let's use the final headers to facet the final server
if dns_entry:
host_geoip = dns_entry[3]
else:
host_geoip = {}
if final_schemenetloc:
# if the hostname is the same and only the scheme is different, that's ok
# TODO: use URL.hostname
if ((final_url.replace('https://', 'http://', 1) != url.url and
final_url.replace('http://', 'https://', 1) != url.url)):
host_geoip = {} # the passed-in one is for the initial server
post_fetch.post_robots_txt(f, final_url, host_geoip, json_log['time'], crawler, seed_host=seed_host)
body_bytes = f.body_bytes
content_encoding = f.response.headers.get('content-encoding', 'identity')
if content_encoding != 'identity':
body_bytes = content.decompress(f.body_bytes, content_encoding, url=final_url)
with stats.record_burn('robots sha1'):
sha1 = 'sha1:' + hashlib.sha1(body_bytes).hexdigest()
json_log['checksum'] = sha1
body_bytes = strip_bom(body_bytes).lstrip()
plausible, message = is_plausible_robots(body_bytes)
if not plausible:
# policy: treat as empty
json_log['error'] = 'saw an implausible robots.txt, treating as empty'
json_log['implausible'] = message
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
try:
body = body_bytes.decode(encoding='utf8', errors='replace')
except asyncio.CancelledError:
raise
except Exception as e:
# log as surprising, also treat like a fetch error
json_log['error'] = 'robots body decode threw a surprising exception: ' + repr(e)
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
robots_facets(body, self.robotname, json_log)
with stats.record_burn('robots parse', url=schemenetloc):
robots = reppy.robots.Robots.parse('', body)
with stats.record_burn('robots is_allowed', url=schemenetloc):
check = robots.allowed('/', '*')
if check == 'denied':
json_log['generic_deny_slash'] = True
check = robots.allowed('/', 'googlebot')
json_log['google_deny_slash'] = check == 'denied'
self.datalayer.cache_robots(schemenetloc, robots)
self.in_progress.discard(schemenetloc)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, robots)
# we did not set this but we'll discard it anyway
self.in_progress.discard(final_schemenetloc)
sitemaps = list(robots.sitemaps)
if sitemaps:
json_log['sitemap_lines'] = len(sitemaps)
self.jsonlog(schemenetloc, json_log)
return robots
def jsonlog(self, schemenetloc, json_log):
if self.robotslogfd:
json_log['host'] = schemenetloc
print(json.dumps(json_log, sort_keys=True), file=self.robotslogfd)
| json_log['error'] = 'max tries exceeded, final exception is: ' + f.last_exception
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None | conditional_block |
robots.py | '''
Stuff related to robots.txt processing
'''
import asyncio
import time
import random
import json
import logging
import urllib.parse
import hashlib
import re
import reppy.robots
#import magic
from .urls import URL
from . import stats
from . import fetcher
from . import config
from . import post_fetch
from . import content
LOGGER = logging.getLogger(__name__)
def strip_bom(b):
if b[:3] == b'\xef\xbb\xbf': # utf-8, e.g. microsoft.com's sitemaps
return b[3:]
elif b[:2] in (b'\xfe\xff', b'\xff\xfe'): # utf-16 BE and LE, respectively
return b[2:]
else:
return b
def robots_facets(text, robotname, json_log):
user_agents = re.findall(r'^ \s* User-Agent: \s* (.*) \s* (?:\#.*)?', text, re.X | re.I | re.M)
action_lines = len(re.findall(r'^ \s* (allow|disallow|crawl-delay):', text, re.X | re.I | re.M))
user_agents = list(set([u.lower() for u in user_agents]))
mentions_us = robotname.lower() in user_agents
if mentions_us:
json_log['mentions_us'] = True
if user_agents:
json_log['user_agents'] = len(user_agents)
if action_lines:
json_log['action_lines'] = action_lines
if text:
json_log['size'] = len(text)
def is_plausible_robots(body_bytes):
'''
Did you know that some sites have a robots.txt that's a 100 megabyte video file?
file magic mimetype is 'text' or similar -- too expensive, 3ms per call
'''
if body_bytes.startswith(b'<'):
return False, 'robots appears to be html or xml'
if len(body_bytes) > 1000000:
return False, 'robots is too big'
return True, ''
class Robots:
def __init__(self, robotname, session, datalayer):
|
def __del__(self):
#if self.magic is not None:
# self.magic.close()
if self.robotslogfd:
self.robotslogfd.close()
def check_cached(self, url, quiet=False):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cached_only hit', 1)
except KeyError:
stats.stats_sum('robots cached_only miss', 1)
return True
return self._check(url, schemenetloc, robots, quiet=quiet)
async def check(self, url, dns_entry=None, seed_host=None, crawler=None,
get_kwargs={}):
schemenetloc = url.urlsplit.scheme + '://' + url.urlsplit.netloc
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
stats.stats_sum('robots cache hit', 1)
except KeyError:
robots = await self.fetch_robots(schemenetloc, dns_entry, crawler,
seed_host=seed_host, get_kwargs=get_kwargs)
return self._check(url, schemenetloc, robots)
def _check(self, url, schemenetloc, robots, quiet=False):
if url.urlsplit.path:
pathplus = url.urlsplit.path
else:
pathplus = '/'
if url.urlsplit.query:
pathplus += '?' + url.urlsplit.query
if robots is None:
if quiet:
return 'no robots'
LOGGER.debug('no robots info known for %s, failing %s%s', schemenetloc, schemenetloc, pathplus)
self.jsonlog(schemenetloc, {'error': 'no robots info known', 'action': 'denied'})
stats.stats_sum('robots denied - robots info not known', 1)
stats.stats_sum('robots denied', 1)
return 'no robots'
me = self.robotname
with stats.record_burn('robots is_allowed', url=schemenetloc):
if pathplus.startswith('//') and ':' in pathplus:
pathplus = 'htp://' + pathplus
check = robots.allowed(pathplus, me)
if check:
check = 'allowed'
else:
check = 'denied'
google_check = robots.allowed(pathplus, 'googlebot')
if me != '*':
generic_check = robots.allowed(pathplus, '*')
else:
generic_check = None
if quiet:
return check
# just logging from here on down
if check == 'allowed':
LOGGER.debug('robots allowed for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots allowed', 1)
return check
LOGGER.debug('robots denied for %s%s', schemenetloc, pathplus)
stats.stats_sum('robots denied', 1)
json_log = {'url': pathplus, 'action': 'denied'}
if google_check:
json_log['google_action'] = 'allowed'
stats.stats_sum('robots denied - but googlebot allowed', 1)
if generic_check is not None and generic_check:
json_log['generic_action'] = 'allowed'
stats.stats_sum('robots denied - but * allowed', 1)
self.jsonlog(schemenetloc, json_log)
return check
def _cache_empty_robots(self, schemenetloc, final_schemenetloc):
parsed = reppy.robots.Robots.parse('', '')
self.datalayer.cache_robots(schemenetloc, parsed)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, parsed)
self.in_progress.discard(schemenetloc)
return parsed
async def fetch_robots(self, schemenetloc, dns_entry, crawler,
seed_host=None, get_kwargs={}):
'''
https://developers.google.com/search/reference/robots_txt
3xx redir == follow up to 5 hops, then consider it a 404.
4xx errors == no crawl restrictions
5xx errors == full disallow. fast retry if 503.
if site appears to return 5xx for 404, then 5xx is treated as a 404
'''
url = URL(schemenetloc + '/robots.txt')
# We might enter this routine multiple times, so, sleep if we aren't the first
if schemenetloc in self.in_progress:
while schemenetloc in self.in_progress:
LOGGER.debug('sleeping because someone beat me to the robots punch')
stats.stats_sum('robots sleep for collision', 1)
with stats.coroutine_state('robots collision sleep'):
interval = random.uniform(0.2, 0.3)
await asyncio.sleep(interval)
# at this point robots might be in the cache... or not.
try:
robots = self.datalayer.read_robots_cache(schemenetloc)
except KeyError:
robots = None
if robots is not None:
return robots
# ok, so it's not in the cache -- and the other guy's fetch failed.
# if we just fell through, there would be a big race.
# treat this as a "no data" failure.
LOGGER.debug('some other fetch of robots has failed.')
stats.stats_sum('robots sleep then cache miss', 1)
return None
self.in_progress.add(schemenetloc)
f = await fetcher.fetch(url, self.session, max_page_size=self.max_robots_page_size,
allow_redirects=True, max_redirects=5, stats_prefix='robots ',
get_kwargs=get_kwargs)
json_log = {'action': 'fetch', 'time': time.time()}
if f.ip is not None:
json_log['ip'] = f.ip
if f.last_exception:
if f.last_exception.startswith('ClientError: TooManyRedirects'):
error = 'got too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, None)
else:
json_log['error'] = 'max tries exceeded, final exception is: ' + f.last_exception
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
if f.response.history:
redir_history = [str(h.url) for h in f.response.history]
redir_history.append(str(f.response.url))
json_log['redir_history'] = redir_history
stats.stats_sum('robots fetched', 1)
# If the url was redirected to a different host/robots.txt, let's cache that final host too
final_url = str(f.response.url) # YARL object
final_schemenetloc = None
if final_url != url.url:
final_parts = urllib.parse.urlsplit(final_url)
if final_parts.path == '/robots.txt':
final_schemenetloc = final_parts.scheme + '://' + final_parts.netloc
json_log['final_host'] = final_schemenetloc
status = f.response.status
json_log['status'] = status
json_log['t_first_byte'] = f.t_first_byte
if str(status).startswith('3') or str(status).startswith('4'):
if status >= 400:
error = 'got a 4xx, treating as empty robots'
else:
error = 'too many redirects, treating as empty robots'
json_log['error'] = error
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
if str(status).startswith('5'):
json_log['error'] = 'got a 5xx, treating as deny' # same as google
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
# we got a 2xx, so let's use the final headers to facet the final server
if dns_entry:
host_geoip = dns_entry[3]
else:
host_geoip = {}
if final_schemenetloc:
# if the hostname is the same and only the scheme is different, that's ok
# TODO: use URL.hostname
if ((final_url.replace('https://', 'http://', 1) != url.url and
final_url.replace('http://', 'https://', 1) != url.url)):
host_geoip = {} # the passed-in one is for the initial server
post_fetch.post_robots_txt(f, final_url, host_geoip, json_log['time'], crawler, seed_host=seed_host)
body_bytes = f.body_bytes
content_encoding = f.response.headers.get('content-encoding', 'identity')
if content_encoding != 'identity':
body_bytes = content.decompress(f.body_bytes, content_encoding, url=final_url)
with stats.record_burn('robots sha1'):
sha1 = 'sha1:' + hashlib.sha1(body_bytes).hexdigest()
json_log['checksum'] = sha1
body_bytes = strip_bom(body_bytes).lstrip()
plausible, message = is_plausible_robots(body_bytes)
if not plausible:
# policy: treat as empty
json_log['error'] = 'saw an implausible robots.txt, treating as empty'
json_log['implausible'] = message
self.jsonlog(schemenetloc, json_log)
return self._cache_empty_robots(schemenetloc, final_schemenetloc)
try:
body = body_bytes.decode(encoding='utf8', errors='replace')
except asyncio.CancelledError:
raise
except Exception as e:
# log as surprising, also treat like a fetch error
json_log['error'] = 'robots body decode threw a surprising exception: ' + repr(e)
self.jsonlog(schemenetloc, json_log)
self.in_progress.discard(schemenetloc)
return None
robots_facets(body, self.robotname, json_log)
with stats.record_burn('robots parse', url=schemenetloc):
robots = reppy.robots.Robots.parse('', body)
with stats.record_burn('robots is_allowed', url=schemenetloc):
check = robots.allowed('/', '*')
if check == 'denied':
json_log['generic_deny_slash'] = True
check = robots.allowed('/', 'googlebot')
json_log['google_deny_slash'] = check == 'denied'
self.datalayer.cache_robots(schemenetloc, robots)
self.in_progress.discard(schemenetloc)
if final_schemenetloc:
self.datalayer.cache_robots(final_schemenetloc, robots)
# we did not set this but we'll discard it anyway
self.in_progress.discard(final_schemenetloc)
sitemaps = list(robots.sitemaps)
if sitemaps:
json_log['sitemap_lines'] = len(sitemaps)
self.jsonlog(schemenetloc, json_log)
return robots
def jsonlog(self, schemenetloc, json_log):
if self.robotslogfd:
json_log['host'] = schemenetloc
print(json.dumps(json_log, sort_keys=True), file=self.robotslogfd)
| self.robotname = robotname
self.session = session
self.datalayer = datalayer
self.max_tries = config.read('Robots', 'MaxTries')
self.max_robots_page_size = int(config.read('Robots', 'MaxRobotsPageSize'))
self.in_progress = set()
# magic is 3 milliseconds per call, too expensive to use
#self.magic = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
self.robotslog = config.read('Logging', 'Robotslog')
if self.robotslog:
self.robotslogfd = open(self.robotslog, 'a')
else:
self.robotslogfd = None | identifier_body |
config_general.go | package chainlink
import (
_ "embed"
"fmt"
"math/big"
"net/url"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/pkg/errors"
"go.uber.org/multierr"
"go.uber.org/zap/zapcore"
ocrnetworking "github.com/smartcontractkit/libocr/networking"
"github.com/smartcontractkit/chainlink/v2/core/chains/cosmos"
evmcfg "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/chains/solana"
"github.com/smartcontractkit/chainlink/v2/core/chains/starknet"
"github.com/smartcontractkit/chainlink/v2/core/config"
coreconfig "github.com/smartcontractkit/chainlink/v2/core/config"
"github.com/smartcontractkit/chainlink/v2/core/config/env"
"github.com/smartcontractkit/chainlink/v2/core/config/parse"
v2 "github.com/smartcontractkit/chainlink/v2/core/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey"
"github.com/smartcontractkit/chainlink/v2/core/store/models"
"github.com/smartcontractkit/chainlink/v2/core/utils"
configutils "github.com/smartcontractkit/chainlink/v2/core/utils/config"
)
// generalConfig is a wrapper to adapt Config to the config.GeneralConfig interface.
type generalConfig struct {
inputTOML string // user input, normalized via de/re-serialization
effectiveTOML string // with default values included
secretsTOML string // with env overdies includes, redacted
c *Config // all fields non-nil (unless the legacy method signature return a pointer)
secrets *Secrets
logLevelDefault zapcore.Level
appIDOnce sync.Once
logMu sync.RWMutex // for the mutable fields Log.Level & Log.SQL
passwordMu sync.RWMutex // passwords are set after initialization
}
// GeneralConfigOpts holds configuration options for creating a coreconfig.GeneralConfig via New().
//
// See ParseTOML to initilialize Config and Secrets from TOML.
type GeneralConfigOpts struct {
ConfigStrings []string
SecretsStrings []string
Config
Secrets
// OverrideFn is a *test-only* hook to override effective values.
OverrideFn func(*Config, *Secrets)
SkipEnv bool
}
func (o *GeneralConfigOpts) Setup(configFiles []string, secretsFiles []string) error {
configs := []string{}
for _, fileName := range configFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read config file: %s", fileName)
}
configs = append(configs, string(b))
}
if configTOML := env.Config.Get(); configTOML != "" {
configs = append(configs, configTOML)
}
o.ConfigStrings = configs
secrets := []string{}
for _, fileName := range secretsFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read secrets file: %s", fileName)
}
secrets = append(secrets, string(b))
}
o.SecretsStrings = secrets
return nil
}
// parseConfig sets Config from the given TOML string, overriding any existing duplicate Config fields.
func (o *GeneralConfigOpts) parseConfig(config string) error {
var c Config
if err2 := configutils.DecodeTOML(strings.NewReader(config), &c); err2 != nil {
return fmt.Errorf("failed to decode config TOML: %w", err2)
}
// Overrides duplicate fields
if err4 := o.Config.SetFrom(&c); err4 != nil {
return fmt.Errorf("invalid configuration: %w", err4)
}
return nil
}
// parseSecrets sets Secrets from the given TOML string. Errors on overrides
func (o *GeneralConfigOpts) parseSecrets(secrets string) error {
var s Secrets
if err2 := configutils.DecodeTOML(strings.NewReader(secrets), &s); err2 != nil {
return fmt.Errorf("failed to decode secrets TOML: %w", err2)
}
// merge fields and err on overrides
if err4 := o.Secrets.SetFrom(&s); err4 != nil {
return fmt.Errorf("invalid secrets: %w", err4)
}
return nil
}
// New returns a coreconfig.GeneralConfig for the given options.
func (o GeneralConfigOpts) New() (GeneralConfig, error) {
err := o.parse()
if err != nil {
return nil, err
}
input, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
o.Config.setDefaults()
if !o.SkipEnv {
err = o.Secrets.setEnv()
if err != nil {
return nil, err
}
}
if fn := o.OverrideFn; fn != nil {
fn(&o.Config, &o.Secrets)
}
effective, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
secrets, err := o.Secrets.TOMLString()
if err != nil {
return nil, err
}
cfg := &generalConfig{
inputTOML: input,
effectiveTOML: effective,
secretsTOML: secrets,
c: &o.Config,
secrets: &o.Secrets,
}
if lvl := o.Config.Log.Level; lvl != nil {
cfg.logLevelDefault = zapcore.Level(*lvl)
}
return cfg, nil
}
func (o *GeneralConfigOpts) parse() (err error) {
for _, c := range o.ConfigStrings {
err := o.parseConfig(c)
if err != nil {
return err
}
}
for _, s := range o.SecretsStrings {
err := o.parseSecrets(s)
if err != nil {
return err
}
}
o.Secrets.setDefaults()
return
}
func (g *generalConfig) EVMConfigs() evmcfg.EVMConfigs {
return g.c.EVM
}
func (g *generalConfig) CosmosConfigs() cosmos.CosmosConfigs {
return g.c.Cosmos
}
func (g *generalConfig) SolanaConfigs() solana.SolanaConfigs {
return g.c.Solana
}
func (g *generalConfig) StarknetConfigs() starknet.StarknetConfigs {
return g.c.Starknet
}
func (g *generalConfig) Validate() error {
return g.validate(g.secrets.Validate)
}
func (g *generalConfig) validate(secretsValidationFn func() error) error {
err := multierr.Combine(
validateEnv(),
g.c.Validate(),
secretsValidationFn(),
)
_, errList := utils.MultiErrorList(err)
return errList
}
func (g *generalConfig) ValidateDB() error {
return g.validate(g.secrets.ValidateDB)
}
//go:embed legacy.env
var emptyStringsEnv string
// validateEnv returns an error if any legacy environment variables are set, unless a v2 equivalent exists with the same value.
func validateEnv() (err error) {
defer func() {
if err != nil {
_, err = utils.MultiErrorList(err)
err = fmt.Errorf("invalid environment: %w", err)
}
}()
for _, kv := range strings.Split(emptyStringsEnv, "\n") {
if strings.TrimSpace(kv) == "" {
continue
}
i := strings.Index(kv, "=")
if i == -1 {
return errors.Errorf("malformed .env file line: %s", kv)
}
k := kv[:i]
_, ok := os.LookupEnv(k)
if ok {
err = multierr.Append(err, fmt.Errorf("environment variable %s must not be set: %v", k, v2.ErrUnsupported))
}
}
return
}
func (g *generalConfig) LogConfiguration(log coreconfig.LogfFn) {
log("# Secrets:\n%s\n", g.secretsTOML)
log("# Input Configuration:\n%s\n", g.inputTOML)
log("# Effective Configuration, with defaults applied:\n%s\n", g.effectiveTOML)
}
// ConfigTOML implements chainlink.ConfigV2
func (g *generalConfig) ConfigTOML() (user, effective string) {
return g.inputTOML, g.effectiveTOML
}
func (g *generalConfig) Feature() coreconfig.Feature {
return &featureConfig{c: g.c.Feature}
}
func (g *generalConfig) FeatureFeedsManager() bool {
return *g.c.Feature.FeedsManager
}
func (g *generalConfig) OCR() config.OCR {
return &ocrConfig{c: g.c.OCR}
}
func (g *generalConfig) OCR2Enabled() bool {
return *g.c.OCR2.Enabled
}
func (g *generalConfig) FeatureLogPoller() bool {
return *g.c.Feature.LogPoller
}
func (g *generalConfig) FeatureUICSAKeys() bool {
return *g.c.Feature.UICSAKeys
}
func (g *generalConfig) AutoPprof() config.AutoPprof {
return &autoPprofConfig{c: g.c.AutoPprof, rootDir: g.RootDir}
}
func (g *generalConfig) EVMEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) EVMRPCEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
if len(c.Nodes) > 0 {
return true
}
}
}
return false
}
func (g *generalConfig) DefaultChainID() *big.Int {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return (*big.Int)(c.ChainID)
}
}
return nil
}
func (g *generalConfig) SolanaEnabled() bool {
for _, c := range g.c.Solana {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) CosmosEnabled() bool {
for _, c := range g.c.Cosmos {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) StarkNetEnabled() bool {
for _, c := range g.c.Starknet {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) WebServer() config.WebServer {
return &webServerConfig{c: g.c.WebServer, rootDir: g.RootDir}
}
func (g *generalConfig) AutoPprofBlockProfileRate() int {
return int(*g.c.AutoPprof.BlockProfileRate)
}
func (g *generalConfig) AutoPprofCPUProfileRate() int {
return int(*g.c.AutoPprof.CPUProfileRate)
}
func (g *generalConfig) AutoPprofGatherDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherDuration.Duration())
}
func (g *generalConfig) AutoPprofGatherTraceDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherTraceDuration.Duration())
}
func (g *generalConfig) AutoPprofGoroutineThreshold() int {
return int(*g.c.AutoPprof.GoroutineThreshold)
}
func (g *generalConfig) AutoPprofMaxProfileSize() utils.FileSize {
return *g.c.AutoPprof.MaxProfileSize
}
func (g *generalConfig) AutoPprofMemProfileRate() int {
return int(*g.c.AutoPprof.MemProfileRate)
}
func (g *generalConfig) AutoPprofMemThreshold() utils.FileSize {
return *g.c.AutoPprof.MemThreshold
}
func (g *generalConfig) | () int {
return int(*g.c.AutoPprof.MutexProfileFraction)
}
func (g *generalConfig) AutoPprofPollInterval() models.Duration {
return *g.c.AutoPprof.PollInterval
}
func (g *generalConfig) AutoPprofProfileRoot() string {
s := *g.c.AutoPprof.ProfileRoot
if s == "" {
s = filepath.Join(g.RootDir(), "pprof")
}
return s
}
func (g *generalConfig) Database() coreconfig.Database {
return &databaseConfig{c: g.c.Database, s: g.secrets.Secrets.Database, logSQL: g.logSQL}
}
func (g *generalConfig) ShutdownGracePeriod() time.Duration {
return g.c.ShutdownGracePeriod.Duration()
}
func (g *generalConfig) Explorer() config.Explorer {
return &explorerConfig{s: g.secrets.Explorer, explorerURL: g.c.ExplorerURL}
}
func (g *generalConfig) ExplorerURL() *url.URL {
u := (*url.URL)(g.c.ExplorerURL)
if *u == zeroURL {
u = nil
}
return u
}
func (g *generalConfig) FluxMonitor() config.FluxMonitor {
return &fluxMonitorConfig{c: g.c.FluxMonitor}
}
func (g *generalConfig) InsecureFastScrypt() bool {
return *g.c.InsecureFastScrypt
}
func (g *generalConfig) JobPipelineReaperInterval() time.Duration {
return g.c.JobPipeline.ReaperInterval.Duration()
}
func (g *generalConfig) JobPipelineResultWriteQueueDepth() uint64 {
return uint64(*g.c.JobPipeline.ResultWriteQueueDepth)
}
func (g *generalConfig) JobPipeline() coreconfig.JobPipeline {
return &jobPipelineConfig{c: g.c.JobPipeline}
}
func (g *generalConfig) Keeper() config.Keeper {
return &keeperConfig{c: g.c.Keeper}
}
func (g *generalConfig) Log() config.Log {
return &logConfig{c: g.c.Log, rootDir: g.RootDir, level: g.logLevel, defaultLevel: g.logLevelDefault}
}
func (g *generalConfig) OCR2() config.OCR2 {
return &ocr2Config{c: g.c.OCR2}
}
func (g *generalConfig) P2P() config.P2P {
return &p2p{c: g.c.P2P}
}
func (g *generalConfig) P2PNetworkingStack() (n ocrnetworking.NetworkingStack) {
return g.c.P2P.NetworkStack()
}
func (g *generalConfig) P2PNetworkingStackRaw() string {
return g.c.P2P.NetworkStack().String()
}
func (g *generalConfig) P2PPeerID() p2pkey.PeerID {
return *g.c.P2P.PeerID
}
func (g *generalConfig) P2PPeerIDRaw() string {
return g.c.P2P.PeerID.String()
}
func (g *generalConfig) P2PIncomingMessageBufferSize() int {
return int(*g.c.P2P.IncomingMessageBufferSize)
}
func (g *generalConfig) P2POutgoingMessageBufferSize() int {
return int(*g.c.P2P.OutgoingMessageBufferSize)
}
func (g *generalConfig) Pyroscope() config.Pyroscope {
return &pyroscopeConfig{c: g.c.Pyroscope, s: g.secrets.Pyroscope}
}
func (g *generalConfig) RootDir() string {
d := *g.c.RootDir
h, err := parse.HomeDir(d)
if err != nil {
panic(err) // never happens since we validate that the RootDir is expandable in config.Core.ValidateConfig().
}
return h
}
func (g *generalConfig) TelemetryIngress() coreconfig.TelemetryIngress {
return &telemetryIngressConfig{
c: g.c.TelemetryIngress,
}
}
func (g *generalConfig) AuditLogger() coreconfig.AuditLogger {
return auditLoggerConfig{c: g.c.AuditLogger}
}
func (g *generalConfig) Insecure() config.Insecure {
return &insecureConfig{c: g.c.Insecure}
}
func (g *generalConfig) Sentry() coreconfig.Sentry {
return sentryConfig{g.c.Sentry}
}
func (g *generalConfig) Password() coreconfig.Password {
return &passwordConfig{keystore: g.keystorePassword, vrf: g.vrfPassword}
}
func (g *generalConfig) Prometheus() coreconfig.Prometheus {
return &prometheusConfig{s: g.secrets.Prometheus}
}
func (g *generalConfig) Mercury() coreconfig.Mercury {
return &mercuryConfig{s: g.secrets.Mercury}
}
func (g *generalConfig) Threshold() coreconfig.Threshold {
return &thresholdConfig{s: g.secrets.Threshold}
}
var (
zeroURL = url.URL{}
zeroSha256Hash = models.Sha256Hash{}
)
| AutoPprofMutexProfileFraction | identifier_name |
config_general.go | package chainlink
import (
_ "embed"
"fmt"
"math/big"
"net/url"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/pkg/errors"
"go.uber.org/multierr"
"go.uber.org/zap/zapcore"
ocrnetworking "github.com/smartcontractkit/libocr/networking"
"github.com/smartcontractkit/chainlink/v2/core/chains/cosmos"
evmcfg "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/chains/solana"
"github.com/smartcontractkit/chainlink/v2/core/chains/starknet"
"github.com/smartcontractkit/chainlink/v2/core/config"
coreconfig "github.com/smartcontractkit/chainlink/v2/core/config"
"github.com/smartcontractkit/chainlink/v2/core/config/env"
"github.com/smartcontractkit/chainlink/v2/core/config/parse"
v2 "github.com/smartcontractkit/chainlink/v2/core/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey"
"github.com/smartcontractkit/chainlink/v2/core/store/models"
"github.com/smartcontractkit/chainlink/v2/core/utils"
configutils "github.com/smartcontractkit/chainlink/v2/core/utils/config"
)
// generalConfig is a wrapper to adapt Config to the config.GeneralConfig interface.
type generalConfig struct {
inputTOML string // user input, normalized via de/re-serialization
effectiveTOML string // with default values included
secretsTOML string // with env overdies includes, redacted
c *Config // all fields non-nil (unless the legacy method signature return a pointer)
secrets *Secrets
logLevelDefault zapcore.Level
appIDOnce sync.Once
logMu sync.RWMutex // for the mutable fields Log.Level & Log.SQL
passwordMu sync.RWMutex // passwords are set after initialization
}
// GeneralConfigOpts holds configuration options for creating a coreconfig.GeneralConfig via New().
//
// See ParseTOML to initilialize Config and Secrets from TOML.
type GeneralConfigOpts struct {
ConfigStrings []string
SecretsStrings []string
Config
Secrets
// OverrideFn is a *test-only* hook to override effective values.
OverrideFn func(*Config, *Secrets)
SkipEnv bool
}
func (o *GeneralConfigOpts) Setup(configFiles []string, secretsFiles []string) error {
configs := []string{}
for _, fileName := range configFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read config file: %s", fileName)
}
configs = append(configs, string(b))
}
if configTOML := env.Config.Get(); configTOML != "" {
configs = append(configs, configTOML)
}
o.ConfigStrings = configs
secrets := []string{}
for _, fileName := range secretsFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read secrets file: %s", fileName)
}
secrets = append(secrets, string(b))
}
o.SecretsStrings = secrets
return nil
}
// parseConfig sets Config from the given TOML string, overriding any existing duplicate Config fields.
func (o *GeneralConfigOpts) parseConfig(config string) error {
var c Config
if err2 := configutils.DecodeTOML(strings.NewReader(config), &c); err2 != nil {
return fmt.Errorf("failed to decode config TOML: %w", err2)
}
// Overrides duplicate fields
if err4 := o.Config.SetFrom(&c); err4 != nil {
return fmt.Errorf("invalid configuration: %w", err4)
}
return nil
}
// parseSecrets sets Secrets from the given TOML string. Errors on overrides
func (o *GeneralConfigOpts) parseSecrets(secrets string) error {
var s Secrets
if err2 := configutils.DecodeTOML(strings.NewReader(secrets), &s); err2 != nil {
return fmt.Errorf("failed to decode secrets TOML: %w", err2)
}
// merge fields and err on overrides
if err4 := o.Secrets.SetFrom(&s); err4 != nil {
return fmt.Errorf("invalid secrets: %w", err4)
}
return nil
}
// New returns a coreconfig.GeneralConfig for the given options.
func (o GeneralConfigOpts) New() (GeneralConfig, error) {
err := o.parse()
if err != nil {
return nil, err
}
input, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
o.Config.setDefaults()
if !o.SkipEnv {
err = o.Secrets.setEnv()
if err != nil {
return nil, err
}
}
if fn := o.OverrideFn; fn != nil {
fn(&o.Config, &o.Secrets)
}
effective, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
secrets, err := o.Secrets.TOMLString()
if err != nil {
return nil, err
}
cfg := &generalConfig{
inputTOML: input,
effectiveTOML: effective,
secretsTOML: secrets,
c: &o.Config,
secrets: &o.Secrets,
}
if lvl := o.Config.Log.Level; lvl != nil {
cfg.logLevelDefault = zapcore.Level(*lvl)
}
return cfg, nil
}
func (o *GeneralConfigOpts) parse() (err error) {
for _, c := range o.ConfigStrings {
err := o.parseConfig(c)
if err != nil {
return err
}
}
for _, s := range o.SecretsStrings {
err := o.parseSecrets(s)
if err != nil {
return err
}
}
o.Secrets.setDefaults()
return
}
func (g *generalConfig) EVMConfigs() evmcfg.EVMConfigs {
return g.c.EVM
}
func (g *generalConfig) CosmosConfigs() cosmos.CosmosConfigs {
return g.c.Cosmos
}
func (g *generalConfig) SolanaConfigs() solana.SolanaConfigs {
return g.c.Solana
}
func (g *generalConfig) StarknetConfigs() starknet.StarknetConfigs {
return g.c.Starknet
}
func (g *generalConfig) Validate() error {
return g.validate(g.secrets.Validate)
}
func (g *generalConfig) validate(secretsValidationFn func() error) error {
err := multierr.Combine(
validateEnv(),
g.c.Validate(),
secretsValidationFn(),
)
_, errList := utils.MultiErrorList(err)
return errList
}
func (g *generalConfig) ValidateDB() error {
return g.validate(g.secrets.ValidateDB)
}
//go:embed legacy.env
var emptyStringsEnv string
// validateEnv returns an error if any legacy environment variables are set, unless a v2 equivalent exists with the same value.
func validateEnv() (err error) {
defer func() {
if err != nil {
_, err = utils.MultiErrorList(err)
err = fmt.Errorf("invalid environment: %w", err)
}
}()
for _, kv := range strings.Split(emptyStringsEnv, "\n") {
if strings.TrimSpace(kv) == "" {
continue
}
i := strings.Index(kv, "=")
if i == -1 {
return errors.Errorf("malformed .env file line: %s", kv)
}
k := kv[:i]
_, ok := os.LookupEnv(k)
if ok {
err = multierr.Append(err, fmt.Errorf("environment variable %s must not be set: %v", k, v2.ErrUnsupported))
}
}
return
}
func (g *generalConfig) LogConfiguration(log coreconfig.LogfFn) {
log("# Secrets:\n%s\n", g.secretsTOML)
log("# Input Configuration:\n%s\n", g.inputTOML)
log("# Effective Configuration, with defaults applied:\n%s\n", g.effectiveTOML)
}
// ConfigTOML implements chainlink.ConfigV2
func (g *generalConfig) ConfigTOML() (user, effective string) {
return g.inputTOML, g.effectiveTOML
}
func (g *generalConfig) Feature() coreconfig.Feature {
return &featureConfig{c: g.c.Feature}
}
func (g *generalConfig) FeatureFeedsManager() bool {
return *g.c.Feature.FeedsManager
}
func (g *generalConfig) OCR() config.OCR {
return &ocrConfig{c: g.c.OCR}
}
func (g *generalConfig) OCR2Enabled() bool {
return *g.c.OCR2.Enabled
}
func (g *generalConfig) FeatureLogPoller() bool {
return *g.c.Feature.LogPoller
}
func (g *generalConfig) FeatureUICSAKeys() bool {
return *g.c.Feature.UICSAKeys
}
func (g *generalConfig) AutoPprof() config.AutoPprof {
return &autoPprofConfig{c: g.c.AutoPprof, rootDir: g.RootDir}
}
func (g *generalConfig) EVMEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) EVMRPCEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
if len(c.Nodes) > 0 {
return true
}
}
}
return false
}
func (g *generalConfig) DefaultChainID() *big.Int {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return (*big.Int)(c.ChainID)
}
}
return nil
}
func (g *generalConfig) SolanaEnabled() bool {
for _, c := range g.c.Solana {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) CosmosEnabled() bool {
for _, c := range g.c.Cosmos {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) StarkNetEnabled() bool {
for _, c := range g.c.Starknet {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) WebServer() config.WebServer {
return &webServerConfig{c: g.c.WebServer, rootDir: g.RootDir}
}
func (g *generalConfig) AutoPprofBlockProfileRate() int {
return int(*g.c.AutoPprof.BlockProfileRate)
}
func (g *generalConfig) AutoPprofCPUProfileRate() int {
return int(*g.c.AutoPprof.CPUProfileRate)
}
func (g *generalConfig) AutoPprofGatherDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherDuration.Duration())
}
func (g *generalConfig) AutoPprofGatherTraceDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherTraceDuration.Duration())
}
func (g *generalConfig) AutoPprofGoroutineThreshold() int {
return int(*g.c.AutoPprof.GoroutineThreshold)
}
func (g *generalConfig) AutoPprofMaxProfileSize() utils.FileSize {
return *g.c.AutoPprof.MaxProfileSize
}
func (g *generalConfig) AutoPprofMemProfileRate() int {
return int(*g.c.AutoPprof.MemProfileRate)
}
func (g *generalConfig) AutoPprofMemThreshold() utils.FileSize {
return *g.c.AutoPprof.MemThreshold
}
func (g *generalConfig) AutoPprofMutexProfileFraction() int {
return int(*g.c.AutoPprof.MutexProfileFraction)
}
func (g *generalConfig) AutoPprofPollInterval() models.Duration {
return *g.c.AutoPprof.PollInterval
}
func (g *generalConfig) AutoPprofProfileRoot() string {
s := *g.c.AutoPprof.ProfileRoot
if s == "" {
s = filepath.Join(g.RootDir(), "pprof")
}
return s
}
func (g *generalConfig) Database() coreconfig.Database {
return &databaseConfig{c: g.c.Database, s: g.secrets.Secrets.Database, logSQL: g.logSQL}
}
func (g *generalConfig) ShutdownGracePeriod() time.Duration {
return g.c.ShutdownGracePeriod.Duration()
}
func (g *generalConfig) Explorer() config.Explorer {
return &explorerConfig{s: g.secrets.Explorer, explorerURL: g.c.ExplorerURL}
}
func (g *generalConfig) ExplorerURL() *url.URL {
u := (*url.URL)(g.c.ExplorerURL)
if *u == zeroURL {
u = nil
}
return u
}
func (g *generalConfig) FluxMonitor() config.FluxMonitor {
return &fluxMonitorConfig{c: g.c.FluxMonitor}
}
func (g *generalConfig) InsecureFastScrypt() bool {
return *g.c.InsecureFastScrypt
}
func (g *generalConfig) JobPipelineReaperInterval() time.Duration {
return g.c.JobPipeline.ReaperInterval.Duration()
}
func (g *generalConfig) JobPipelineResultWriteQueueDepth() uint64 {
return uint64(*g.c.JobPipeline.ResultWriteQueueDepth)
}
func (g *generalConfig) JobPipeline() coreconfig.JobPipeline {
return &jobPipelineConfig{c: g.c.JobPipeline}
}
func (g *generalConfig) Keeper() config.Keeper {
return &keeperConfig{c: g.c.Keeper}
}
func (g *generalConfig) Log() config.Log {
return &logConfig{c: g.c.Log, rootDir: g.RootDir, level: g.logLevel, defaultLevel: g.logLevelDefault}
}
func (g *generalConfig) OCR2() config.OCR2 {
return &ocr2Config{c: g.c.OCR2}
}
func (g *generalConfig) P2P() config.P2P {
return &p2p{c: g.c.P2P}
}
func (g *generalConfig) P2PNetworkingStack() (n ocrnetworking.NetworkingStack) {
return g.c.P2P.NetworkStack()
}
func (g *generalConfig) P2PNetworkingStackRaw() string {
return g.c.P2P.NetworkStack().String()
}
func (g *generalConfig) P2PPeerID() p2pkey.PeerID {
return *g.c.P2P.PeerID
}
func (g *generalConfig) P2PPeerIDRaw() string {
return g.c.P2P.PeerID.String()
}
func (g *generalConfig) P2PIncomingMessageBufferSize() int {
return int(*g.c.P2P.IncomingMessageBufferSize)
}
func (g *generalConfig) P2POutgoingMessageBufferSize() int {
return int(*g.c.P2P.OutgoingMessageBufferSize)
}
func (g *generalConfig) Pyroscope() config.Pyroscope {
return &pyroscopeConfig{c: g.c.Pyroscope, s: g.secrets.Pyroscope}
}
func (g *generalConfig) RootDir() string {
d := *g.c.RootDir
h, err := parse.HomeDir(d)
if err != nil {
panic(err) // never happens since we validate that the RootDir is expandable in config.Core.ValidateConfig().
}
return h
}
func (g *generalConfig) TelemetryIngress() coreconfig.TelemetryIngress {
return &telemetryIngressConfig{
c: g.c.TelemetryIngress,
}
}
func (g *generalConfig) AuditLogger() coreconfig.AuditLogger {
return auditLoggerConfig{c: g.c.AuditLogger}
}
func (g *generalConfig) Insecure() config.Insecure {
return &insecureConfig{c: g.c.Insecure}
}
func (g *generalConfig) Sentry() coreconfig.Sentry {
return sentryConfig{g.c.Sentry}
}
func (g *generalConfig) Password() coreconfig.Password |
func (g *generalConfig) Prometheus() coreconfig.Prometheus {
return &prometheusConfig{s: g.secrets.Prometheus}
}
func (g *generalConfig) Mercury() coreconfig.Mercury {
return &mercuryConfig{s: g.secrets.Mercury}
}
func (g *generalConfig) Threshold() coreconfig.Threshold {
return &thresholdConfig{s: g.secrets.Threshold}
}
var (
zeroURL = url.URL{}
zeroSha256Hash = models.Sha256Hash{}
)
| {
return &passwordConfig{keystore: g.keystorePassword, vrf: g.vrfPassword}
} | identifier_body |
config_general.go | package chainlink
import (
_ "embed"
"fmt"
"math/big"
"net/url"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/pkg/errors"
"go.uber.org/multierr"
"go.uber.org/zap/zapcore"
ocrnetworking "github.com/smartcontractkit/libocr/networking"
"github.com/smartcontractkit/chainlink/v2/core/chains/cosmos"
evmcfg "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/chains/solana"
"github.com/smartcontractkit/chainlink/v2/core/chains/starknet"
"github.com/smartcontractkit/chainlink/v2/core/config"
coreconfig "github.com/smartcontractkit/chainlink/v2/core/config"
"github.com/smartcontractkit/chainlink/v2/core/config/env"
"github.com/smartcontractkit/chainlink/v2/core/config/parse"
v2 "github.com/smartcontractkit/chainlink/v2/core/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey"
"github.com/smartcontractkit/chainlink/v2/core/store/models"
"github.com/smartcontractkit/chainlink/v2/core/utils"
configutils "github.com/smartcontractkit/chainlink/v2/core/utils/config"
)
// generalConfig is a wrapper to adapt Config to the config.GeneralConfig interface.
type generalConfig struct {
inputTOML string // user input, normalized via de/re-serialization
effectiveTOML string // with default values included
secretsTOML string // with env overdies includes, redacted
c *Config // all fields non-nil (unless the legacy method signature return a pointer)
secrets *Secrets
logLevelDefault zapcore.Level
appIDOnce sync.Once
logMu sync.RWMutex // for the mutable fields Log.Level & Log.SQL
passwordMu sync.RWMutex // passwords are set after initialization
}
// GeneralConfigOpts holds configuration options for creating a coreconfig.GeneralConfig via New().
//
// See ParseTOML to initilialize Config and Secrets from TOML.
type GeneralConfigOpts struct {
ConfigStrings []string
SecretsStrings []string
Config
Secrets
// OverrideFn is a *test-only* hook to override effective values.
OverrideFn func(*Config, *Secrets)
SkipEnv bool
}
func (o *GeneralConfigOpts) Setup(configFiles []string, secretsFiles []string) error {
configs := []string{}
for _, fileName := range configFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read config file: %s", fileName)
}
configs = append(configs, string(b))
}
if configTOML := env.Config.Get(); configTOML != "" {
configs = append(configs, configTOML)
}
o.ConfigStrings = configs
secrets := []string{}
for _, fileName := range secretsFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read secrets file: %s", fileName)
}
secrets = append(secrets, string(b))
}
o.SecretsStrings = secrets
return nil
}
// parseConfig sets Config from the given TOML string, overriding any existing duplicate Config fields.
func (o *GeneralConfigOpts) parseConfig(config string) error {
var c Config
if err2 := configutils.DecodeTOML(strings.NewReader(config), &c); err2 != nil {
return fmt.Errorf("failed to decode config TOML: %w", err2)
}
// Overrides duplicate fields
if err4 := o.Config.SetFrom(&c); err4 != nil {
return fmt.Errorf("invalid configuration: %w", err4)
}
return nil
}
// parseSecrets sets Secrets from the given TOML string. Errors on overrides
func (o *GeneralConfigOpts) parseSecrets(secrets string) error {
var s Secrets
if err2 := configutils.DecodeTOML(strings.NewReader(secrets), &s); err2 != nil {
return fmt.Errorf("failed to decode secrets TOML: %w", err2)
}
// merge fields and err on overrides
if err4 := o.Secrets.SetFrom(&s); err4 != nil {
return fmt.Errorf("invalid secrets: %w", err4)
}
return nil
}
// New returns a coreconfig.GeneralConfig for the given options.
func (o GeneralConfigOpts) New() (GeneralConfig, error) {
err := o.parse()
if err != nil {
return nil, err
}
input, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
o.Config.setDefaults()
if !o.SkipEnv {
err = o.Secrets.setEnv()
if err != nil {
return nil, err
}
}
if fn := o.OverrideFn; fn != nil {
fn(&o.Config, &o.Secrets)
}
effective, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
secrets, err := o.Secrets.TOMLString()
if err != nil |
cfg := &generalConfig{
inputTOML: input,
effectiveTOML: effective,
secretsTOML: secrets,
c: &o.Config,
secrets: &o.Secrets,
}
if lvl := o.Config.Log.Level; lvl != nil {
cfg.logLevelDefault = zapcore.Level(*lvl)
}
return cfg, nil
}
func (o *GeneralConfigOpts) parse() (err error) {
for _, c := range o.ConfigStrings {
err := o.parseConfig(c)
if err != nil {
return err
}
}
for _, s := range o.SecretsStrings {
err := o.parseSecrets(s)
if err != nil {
return err
}
}
o.Secrets.setDefaults()
return
}
func (g *generalConfig) EVMConfigs() evmcfg.EVMConfigs {
return g.c.EVM
}
func (g *generalConfig) CosmosConfigs() cosmos.CosmosConfigs {
return g.c.Cosmos
}
func (g *generalConfig) SolanaConfigs() solana.SolanaConfigs {
return g.c.Solana
}
func (g *generalConfig) StarknetConfigs() starknet.StarknetConfigs {
return g.c.Starknet
}
func (g *generalConfig) Validate() error {
return g.validate(g.secrets.Validate)
}
func (g *generalConfig) validate(secretsValidationFn func() error) error {
err := multierr.Combine(
validateEnv(),
g.c.Validate(),
secretsValidationFn(),
)
_, errList := utils.MultiErrorList(err)
return errList
}
func (g *generalConfig) ValidateDB() error {
return g.validate(g.secrets.ValidateDB)
}
//go:embed legacy.env
var emptyStringsEnv string
// validateEnv returns an error if any legacy environment variables are set, unless a v2 equivalent exists with the same value.
func validateEnv() (err error) {
defer func() {
if err != nil {
_, err = utils.MultiErrorList(err)
err = fmt.Errorf("invalid environment: %w", err)
}
}()
for _, kv := range strings.Split(emptyStringsEnv, "\n") {
if strings.TrimSpace(kv) == "" {
continue
}
i := strings.Index(kv, "=")
if i == -1 {
return errors.Errorf("malformed .env file line: %s", kv)
}
k := kv[:i]
_, ok := os.LookupEnv(k)
if ok {
err = multierr.Append(err, fmt.Errorf("environment variable %s must not be set: %v", k, v2.ErrUnsupported))
}
}
return
}
func (g *generalConfig) LogConfiguration(log coreconfig.LogfFn) {
log("# Secrets:\n%s\n", g.secretsTOML)
log("# Input Configuration:\n%s\n", g.inputTOML)
log("# Effective Configuration, with defaults applied:\n%s\n", g.effectiveTOML)
}
// ConfigTOML implements chainlink.ConfigV2
func (g *generalConfig) ConfigTOML() (user, effective string) {
return g.inputTOML, g.effectiveTOML
}
func (g *generalConfig) Feature() coreconfig.Feature {
return &featureConfig{c: g.c.Feature}
}
func (g *generalConfig) FeatureFeedsManager() bool {
return *g.c.Feature.FeedsManager
}
func (g *generalConfig) OCR() config.OCR {
return &ocrConfig{c: g.c.OCR}
}
func (g *generalConfig) OCR2Enabled() bool {
return *g.c.OCR2.Enabled
}
func (g *generalConfig) FeatureLogPoller() bool {
return *g.c.Feature.LogPoller
}
func (g *generalConfig) FeatureUICSAKeys() bool {
return *g.c.Feature.UICSAKeys
}
func (g *generalConfig) AutoPprof() config.AutoPprof {
return &autoPprofConfig{c: g.c.AutoPprof, rootDir: g.RootDir}
}
func (g *generalConfig) EVMEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) EVMRPCEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
if len(c.Nodes) > 0 {
return true
}
}
}
return false
}
func (g *generalConfig) DefaultChainID() *big.Int {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return (*big.Int)(c.ChainID)
}
}
return nil
}
func (g *generalConfig) SolanaEnabled() bool {
for _, c := range g.c.Solana {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) CosmosEnabled() bool {
for _, c := range g.c.Cosmos {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) StarkNetEnabled() bool {
for _, c := range g.c.Starknet {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) WebServer() config.WebServer {
return &webServerConfig{c: g.c.WebServer, rootDir: g.RootDir}
}
func (g *generalConfig) AutoPprofBlockProfileRate() int {
return int(*g.c.AutoPprof.BlockProfileRate)
}
func (g *generalConfig) AutoPprofCPUProfileRate() int {
return int(*g.c.AutoPprof.CPUProfileRate)
}
func (g *generalConfig) AutoPprofGatherDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherDuration.Duration())
}
func (g *generalConfig) AutoPprofGatherTraceDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherTraceDuration.Duration())
}
func (g *generalConfig) AutoPprofGoroutineThreshold() int {
return int(*g.c.AutoPprof.GoroutineThreshold)
}
func (g *generalConfig) AutoPprofMaxProfileSize() utils.FileSize {
return *g.c.AutoPprof.MaxProfileSize
}
func (g *generalConfig) AutoPprofMemProfileRate() int {
return int(*g.c.AutoPprof.MemProfileRate)
}
func (g *generalConfig) AutoPprofMemThreshold() utils.FileSize {
return *g.c.AutoPprof.MemThreshold
}
func (g *generalConfig) AutoPprofMutexProfileFraction() int {
return int(*g.c.AutoPprof.MutexProfileFraction)
}
func (g *generalConfig) AutoPprofPollInterval() models.Duration {
return *g.c.AutoPprof.PollInterval
}
func (g *generalConfig) AutoPprofProfileRoot() string {
s := *g.c.AutoPprof.ProfileRoot
if s == "" {
s = filepath.Join(g.RootDir(), "pprof")
}
return s
}
func (g *generalConfig) Database() coreconfig.Database {
return &databaseConfig{c: g.c.Database, s: g.secrets.Secrets.Database, logSQL: g.logSQL}
}
func (g *generalConfig) ShutdownGracePeriod() time.Duration {
return g.c.ShutdownGracePeriod.Duration()
}
func (g *generalConfig) Explorer() config.Explorer {
return &explorerConfig{s: g.secrets.Explorer, explorerURL: g.c.ExplorerURL}
}
func (g *generalConfig) ExplorerURL() *url.URL {
u := (*url.URL)(g.c.ExplorerURL)
if *u == zeroURL {
u = nil
}
return u
}
func (g *generalConfig) FluxMonitor() config.FluxMonitor {
return &fluxMonitorConfig{c: g.c.FluxMonitor}
}
func (g *generalConfig) InsecureFastScrypt() bool {
return *g.c.InsecureFastScrypt
}
func (g *generalConfig) JobPipelineReaperInterval() time.Duration {
return g.c.JobPipeline.ReaperInterval.Duration()
}
func (g *generalConfig) JobPipelineResultWriteQueueDepth() uint64 {
return uint64(*g.c.JobPipeline.ResultWriteQueueDepth)
}
func (g *generalConfig) JobPipeline() coreconfig.JobPipeline {
return &jobPipelineConfig{c: g.c.JobPipeline}
}
func (g *generalConfig) Keeper() config.Keeper {
return &keeperConfig{c: g.c.Keeper}
}
func (g *generalConfig) Log() config.Log {
return &logConfig{c: g.c.Log, rootDir: g.RootDir, level: g.logLevel, defaultLevel: g.logLevelDefault}
}
func (g *generalConfig) OCR2() config.OCR2 {
return &ocr2Config{c: g.c.OCR2}
}
func (g *generalConfig) P2P() config.P2P {
return &p2p{c: g.c.P2P}
}
func (g *generalConfig) P2PNetworkingStack() (n ocrnetworking.NetworkingStack) {
return g.c.P2P.NetworkStack()
}
func (g *generalConfig) P2PNetworkingStackRaw() string {
return g.c.P2P.NetworkStack().String()
}
func (g *generalConfig) P2PPeerID() p2pkey.PeerID {
return *g.c.P2P.PeerID
}
func (g *generalConfig) P2PPeerIDRaw() string {
return g.c.P2P.PeerID.String()
}
func (g *generalConfig) P2PIncomingMessageBufferSize() int {
return int(*g.c.P2P.IncomingMessageBufferSize)
}
func (g *generalConfig) P2POutgoingMessageBufferSize() int {
return int(*g.c.P2P.OutgoingMessageBufferSize)
}
func (g *generalConfig) Pyroscope() config.Pyroscope {
return &pyroscopeConfig{c: g.c.Pyroscope, s: g.secrets.Pyroscope}
}
func (g *generalConfig) RootDir() string {
d := *g.c.RootDir
h, err := parse.HomeDir(d)
if err != nil {
panic(err) // never happens since we validate that the RootDir is expandable in config.Core.ValidateConfig().
}
return h
}
func (g *generalConfig) TelemetryIngress() coreconfig.TelemetryIngress {
return &telemetryIngressConfig{
c: g.c.TelemetryIngress,
}
}
func (g *generalConfig) AuditLogger() coreconfig.AuditLogger {
return auditLoggerConfig{c: g.c.AuditLogger}
}
func (g *generalConfig) Insecure() config.Insecure {
return &insecureConfig{c: g.c.Insecure}
}
func (g *generalConfig) Sentry() coreconfig.Sentry {
return sentryConfig{g.c.Sentry}
}
func (g *generalConfig) Password() coreconfig.Password {
return &passwordConfig{keystore: g.keystorePassword, vrf: g.vrfPassword}
}
func (g *generalConfig) Prometheus() coreconfig.Prometheus {
return &prometheusConfig{s: g.secrets.Prometheus}
}
func (g *generalConfig) Mercury() coreconfig.Mercury {
return &mercuryConfig{s: g.secrets.Mercury}
}
func (g *generalConfig) Threshold() coreconfig.Threshold {
return &thresholdConfig{s: g.secrets.Threshold}
}
var (
zeroURL = url.URL{}
zeroSha256Hash = models.Sha256Hash{}
)
| {
return nil, err
} | conditional_block |
config_general.go | package chainlink
import (
_ "embed"
"fmt"
"math/big"
"net/url"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/pkg/errors"
"go.uber.org/multierr"
"go.uber.org/zap/zapcore"
ocrnetworking "github.com/smartcontractkit/libocr/networking"
"github.com/smartcontractkit/chainlink/v2/core/chains/cosmos"
evmcfg "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/chains/solana"
"github.com/smartcontractkit/chainlink/v2/core/chains/starknet"
"github.com/smartcontractkit/chainlink/v2/core/config"
coreconfig "github.com/smartcontractkit/chainlink/v2/core/config"
"github.com/smartcontractkit/chainlink/v2/core/config/env"
"github.com/smartcontractkit/chainlink/v2/core/config/parse"
v2 "github.com/smartcontractkit/chainlink/v2/core/config/toml"
"github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey"
"github.com/smartcontractkit/chainlink/v2/core/store/models"
"github.com/smartcontractkit/chainlink/v2/core/utils"
configutils "github.com/smartcontractkit/chainlink/v2/core/utils/config"
)
// generalConfig is a wrapper to adapt Config to the config.GeneralConfig interface.
type generalConfig struct {
inputTOML string // user input, normalized via de/re-serialization
effectiveTOML string // with default values included
secretsTOML string // with env overdies includes, redacted
c *Config // all fields non-nil (unless the legacy method signature return a pointer)
secrets *Secrets
logLevelDefault zapcore.Level
appIDOnce sync.Once
logMu sync.RWMutex // for the mutable fields Log.Level & Log.SQL
passwordMu sync.RWMutex // passwords are set after initialization
}
// GeneralConfigOpts holds configuration options for creating a coreconfig.GeneralConfig via New().
//
// See ParseTOML to initilialize Config and Secrets from TOML.
type GeneralConfigOpts struct {
ConfigStrings []string
SecretsStrings []string
Config
Secrets
// OverrideFn is a *test-only* hook to override effective values.
OverrideFn func(*Config, *Secrets)
SkipEnv bool
}
func (o *GeneralConfigOpts) Setup(configFiles []string, secretsFiles []string) error {
configs := []string{}
for _, fileName := range configFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read config file: %s", fileName)
}
configs = append(configs, string(b))
}
if configTOML := env.Config.Get(); configTOML != "" {
configs = append(configs, configTOML)
}
o.ConfigStrings = configs
secrets := []string{}
for _, fileName := range secretsFiles {
b, err := os.ReadFile(fileName)
if err != nil {
return errors.Wrapf(err, "failed to read secrets file: %s", fileName)
}
secrets = append(secrets, string(b))
}
o.SecretsStrings = secrets
return nil
}
// parseConfig sets Config from the given TOML string, overriding any existing duplicate Config fields.
func (o *GeneralConfigOpts) parseConfig(config string) error {
var c Config
if err2 := configutils.DecodeTOML(strings.NewReader(config), &c); err2 != nil {
return fmt.Errorf("failed to decode config TOML: %w", err2)
}
// Overrides duplicate fields
if err4 := o.Config.SetFrom(&c); err4 != nil {
return fmt.Errorf("invalid configuration: %w", err4)
}
return nil
}
// parseSecrets sets Secrets from the given TOML string. Errors on overrides
func (o *GeneralConfigOpts) parseSecrets(secrets string) error {
var s Secrets
if err2 := configutils.DecodeTOML(strings.NewReader(secrets), &s); err2 != nil {
return fmt.Errorf("failed to decode secrets TOML: %w", err2)
}
// merge fields and err on overrides
if err4 := o.Secrets.SetFrom(&s); err4 != nil {
return fmt.Errorf("invalid secrets: %w", err4)
}
return nil
}
// New returns a coreconfig.GeneralConfig for the given options.
func (o GeneralConfigOpts) New() (GeneralConfig, error) {
err := o.parse()
if err != nil {
return nil, err
}
input, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
o.Config.setDefaults()
if !o.SkipEnv {
err = o.Secrets.setEnv()
if err != nil {
return nil, err
}
}
if fn := o.OverrideFn; fn != nil {
fn(&o.Config, &o.Secrets)
}
effective, err := o.Config.TOMLString()
if err != nil {
return nil, err
}
secrets, err := o.Secrets.TOMLString()
if err != nil {
return nil, err
}
cfg := &generalConfig{
inputTOML: input,
effectiveTOML: effective,
secretsTOML: secrets,
c: &o.Config,
secrets: &o.Secrets,
}
if lvl := o.Config.Log.Level; lvl != nil {
cfg.logLevelDefault = zapcore.Level(*lvl)
}
return cfg, nil
}
func (o *GeneralConfigOpts) parse() (err error) {
for _, c := range o.ConfigStrings {
err := o.parseConfig(c)
if err != nil {
return err
}
}
for _, s := range o.SecretsStrings {
err := o.parseSecrets(s)
if err != nil {
return err
}
}
o.Secrets.setDefaults()
return
}
func (g *generalConfig) EVMConfigs() evmcfg.EVMConfigs {
return g.c.EVM
}
func (g *generalConfig) CosmosConfigs() cosmos.CosmosConfigs {
return g.c.Cosmos
}
func (g *generalConfig) SolanaConfigs() solana.SolanaConfigs {
return g.c.Solana
}
func (g *generalConfig) StarknetConfigs() starknet.StarknetConfigs {
return g.c.Starknet
}
func (g *generalConfig) Validate() error {
return g.validate(g.secrets.Validate)
}
func (g *generalConfig) validate(secretsValidationFn func() error) error {
err := multierr.Combine(
validateEnv(),
g.c.Validate(),
secretsValidationFn(),
)
_, errList := utils.MultiErrorList(err)
return errList
}
func (g *generalConfig) ValidateDB() error {
return g.validate(g.secrets.ValidateDB)
}
//go:embed legacy.env
var emptyStringsEnv string
// validateEnv returns an error if any legacy environment variables are set, unless a v2 equivalent exists with the same value.
func validateEnv() (err error) {
defer func() {
if err != nil {
_, err = utils.MultiErrorList(err)
err = fmt.Errorf("invalid environment: %w", err)
}
}()
for _, kv := range strings.Split(emptyStringsEnv, "\n") {
if strings.TrimSpace(kv) == "" {
continue
}
i := strings.Index(kv, "=")
if i == -1 {
return errors.Errorf("malformed .env file line: %s", kv)
}
k := kv[:i]
_, ok := os.LookupEnv(k) | }
func (g *generalConfig) LogConfiguration(log coreconfig.LogfFn) {
log("# Secrets:\n%s\n", g.secretsTOML)
log("# Input Configuration:\n%s\n", g.inputTOML)
log("# Effective Configuration, with defaults applied:\n%s\n", g.effectiveTOML)
}
// ConfigTOML implements chainlink.ConfigV2
func (g *generalConfig) ConfigTOML() (user, effective string) {
return g.inputTOML, g.effectiveTOML
}
func (g *generalConfig) Feature() coreconfig.Feature {
return &featureConfig{c: g.c.Feature}
}
func (g *generalConfig) FeatureFeedsManager() bool {
return *g.c.Feature.FeedsManager
}
func (g *generalConfig) OCR() config.OCR {
return &ocrConfig{c: g.c.OCR}
}
func (g *generalConfig) OCR2Enabled() bool {
return *g.c.OCR2.Enabled
}
func (g *generalConfig) FeatureLogPoller() bool {
return *g.c.Feature.LogPoller
}
func (g *generalConfig) FeatureUICSAKeys() bool {
return *g.c.Feature.UICSAKeys
}
func (g *generalConfig) AutoPprof() config.AutoPprof {
return &autoPprofConfig{c: g.c.AutoPprof, rootDir: g.RootDir}
}
func (g *generalConfig) EVMEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) EVMRPCEnabled() bool {
for _, c := range g.c.EVM {
if c.IsEnabled() {
if len(c.Nodes) > 0 {
return true
}
}
}
return false
}
func (g *generalConfig) DefaultChainID() *big.Int {
for _, c := range g.c.EVM {
if c.IsEnabled() {
return (*big.Int)(c.ChainID)
}
}
return nil
}
func (g *generalConfig) SolanaEnabled() bool {
for _, c := range g.c.Solana {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) CosmosEnabled() bool {
for _, c := range g.c.Cosmos {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) StarkNetEnabled() bool {
for _, c := range g.c.Starknet {
if c.IsEnabled() {
return true
}
}
return false
}
func (g *generalConfig) WebServer() config.WebServer {
return &webServerConfig{c: g.c.WebServer, rootDir: g.RootDir}
}
func (g *generalConfig) AutoPprofBlockProfileRate() int {
return int(*g.c.AutoPprof.BlockProfileRate)
}
func (g *generalConfig) AutoPprofCPUProfileRate() int {
return int(*g.c.AutoPprof.CPUProfileRate)
}
func (g *generalConfig) AutoPprofGatherDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherDuration.Duration())
}
func (g *generalConfig) AutoPprofGatherTraceDuration() models.Duration {
return models.MustMakeDuration(g.c.AutoPprof.GatherTraceDuration.Duration())
}
func (g *generalConfig) AutoPprofGoroutineThreshold() int {
return int(*g.c.AutoPprof.GoroutineThreshold)
}
func (g *generalConfig) AutoPprofMaxProfileSize() utils.FileSize {
return *g.c.AutoPprof.MaxProfileSize
}
func (g *generalConfig) AutoPprofMemProfileRate() int {
return int(*g.c.AutoPprof.MemProfileRate)
}
func (g *generalConfig) AutoPprofMemThreshold() utils.FileSize {
return *g.c.AutoPprof.MemThreshold
}
func (g *generalConfig) AutoPprofMutexProfileFraction() int {
return int(*g.c.AutoPprof.MutexProfileFraction)
}
func (g *generalConfig) AutoPprofPollInterval() models.Duration {
return *g.c.AutoPprof.PollInterval
}
func (g *generalConfig) AutoPprofProfileRoot() string {
s := *g.c.AutoPprof.ProfileRoot
if s == "" {
s = filepath.Join(g.RootDir(), "pprof")
}
return s
}
func (g *generalConfig) Database() coreconfig.Database {
return &databaseConfig{c: g.c.Database, s: g.secrets.Secrets.Database, logSQL: g.logSQL}
}
func (g *generalConfig) ShutdownGracePeriod() time.Duration {
return g.c.ShutdownGracePeriod.Duration()
}
func (g *generalConfig) Explorer() config.Explorer {
return &explorerConfig{s: g.secrets.Explorer, explorerURL: g.c.ExplorerURL}
}
func (g *generalConfig) ExplorerURL() *url.URL {
u := (*url.URL)(g.c.ExplorerURL)
if *u == zeroURL {
u = nil
}
return u
}
func (g *generalConfig) FluxMonitor() config.FluxMonitor {
return &fluxMonitorConfig{c: g.c.FluxMonitor}
}
func (g *generalConfig) InsecureFastScrypt() bool {
return *g.c.InsecureFastScrypt
}
func (g *generalConfig) JobPipelineReaperInterval() time.Duration {
return g.c.JobPipeline.ReaperInterval.Duration()
}
func (g *generalConfig) JobPipelineResultWriteQueueDepth() uint64 {
return uint64(*g.c.JobPipeline.ResultWriteQueueDepth)
}
func (g *generalConfig) JobPipeline() coreconfig.JobPipeline {
return &jobPipelineConfig{c: g.c.JobPipeline}
}
func (g *generalConfig) Keeper() config.Keeper {
return &keeperConfig{c: g.c.Keeper}
}
func (g *generalConfig) Log() config.Log {
return &logConfig{c: g.c.Log, rootDir: g.RootDir, level: g.logLevel, defaultLevel: g.logLevelDefault}
}
func (g *generalConfig) OCR2() config.OCR2 {
return &ocr2Config{c: g.c.OCR2}
}
func (g *generalConfig) P2P() config.P2P {
return &p2p{c: g.c.P2P}
}
func (g *generalConfig) P2PNetworkingStack() (n ocrnetworking.NetworkingStack) {
return g.c.P2P.NetworkStack()
}
func (g *generalConfig) P2PNetworkingStackRaw() string {
return g.c.P2P.NetworkStack().String()
}
func (g *generalConfig) P2PPeerID() p2pkey.PeerID {
return *g.c.P2P.PeerID
}
func (g *generalConfig) P2PPeerIDRaw() string {
return g.c.P2P.PeerID.String()
}
func (g *generalConfig) P2PIncomingMessageBufferSize() int {
return int(*g.c.P2P.IncomingMessageBufferSize)
}
func (g *generalConfig) P2POutgoingMessageBufferSize() int {
return int(*g.c.P2P.OutgoingMessageBufferSize)
}
func (g *generalConfig) Pyroscope() config.Pyroscope {
return &pyroscopeConfig{c: g.c.Pyroscope, s: g.secrets.Pyroscope}
}
func (g *generalConfig) RootDir() string {
d := *g.c.RootDir
h, err := parse.HomeDir(d)
if err != nil {
panic(err) // never happens since we validate that the RootDir is expandable in config.Core.ValidateConfig().
}
return h
}
func (g *generalConfig) TelemetryIngress() coreconfig.TelemetryIngress {
return &telemetryIngressConfig{
c: g.c.TelemetryIngress,
}
}
func (g *generalConfig) AuditLogger() coreconfig.AuditLogger {
return auditLoggerConfig{c: g.c.AuditLogger}
}
func (g *generalConfig) Insecure() config.Insecure {
return &insecureConfig{c: g.c.Insecure}
}
func (g *generalConfig) Sentry() coreconfig.Sentry {
return sentryConfig{g.c.Sentry}
}
func (g *generalConfig) Password() coreconfig.Password {
return &passwordConfig{keystore: g.keystorePassword, vrf: g.vrfPassword}
}
func (g *generalConfig) Prometheus() coreconfig.Prometheus {
return &prometheusConfig{s: g.secrets.Prometheus}
}
func (g *generalConfig) Mercury() coreconfig.Mercury {
return &mercuryConfig{s: g.secrets.Mercury}
}
func (g *generalConfig) Threshold() coreconfig.Threshold {
return &thresholdConfig{s: g.secrets.Threshold}
}
var (
zeroURL = url.URL{}
zeroSha256Hash = models.Sha256Hash{}
) | if ok {
err = multierr.Append(err, fmt.Errorf("environment variable %s must not be set: %v", k, v2.ErrUnsupported))
}
}
return | random_line_split |
mod.rs | //! Entity handling types.
//!
//! An **entity** exclusively owns zero or more [component] instances, all of different types, and can dynamically acquire or lose them over its lifetime.
//!
//! **empty entity**: Entity with zero components.
//! **pending entity**: Entity reserved, but not flushed yet (see [`Entities::flush`] docs for reference).
//! **reserved entity**: same as **pending entity**.
//! **invalid entity**: **pending entity** flushed with invalid (see [`Entities::flush_as_invalid`] docs for reference).
//!
//! See [`Entity`] to learn more.
//!
//! [component]: crate::component::Component
//!
//! # Usage
//!
//! Operations involving entities and their components are performed either from a system by submitting commands,
//! or from the outside (or from an exclusive system) by directly using [`World`] methods:
//!
//! |Operation|Command|Method|
//! |:---:|:---:|:---:|
//! |Spawn an entity with components|[`Commands::spawn`]|[`World::spawn`]|
//! |Spawn an entity without components|[`Commands::spawn_empty`]|[`World::spawn_empty`]|
//! |Despawn an entity|[`EntityCommands::despawn`]|[`World::despawn`]|
//! |Insert a component, bundle, or tuple of components and bundles to an entity|[`EntityCommands::insert`]|[`EntityWorldMut::insert`]|
//! |Remove a component, bundle, or tuple of components and bundles from an entity|[`EntityCommands::remove`]|[`EntityWorldMut::remove`]|
//!
//! [`World`]: crate::world::World
//! [`Commands::spawn`]: crate::system::Commands::spawn
//! [`Commands::spawn_empty`]: crate::system::Commands::spawn_empty
//! [`EntityCommands::despawn`]: crate::system::EntityCommands::despawn
//! [`EntityCommands::insert`]: crate::system::EntityCommands::insert
//! [`EntityCommands::remove`]: crate::system::EntityCommands::remove
//! [`World::spawn`]: crate::world::World::spawn
//! [`World::spawn_empty`]: crate::world::World::spawn_empty
//! [`World::despawn`]: crate::world::World::despawn
//! [`EntityWorldMut::insert`]: crate::world::EntityWorldMut::insert
//! [`EntityWorldMut::remove`]: crate::world::EntityWorldMut::remove
mod map_entities;
pub use map_entities::*;
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
storage::{SparseSetIndex, TableId, TableRow},
};
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, fmt, mem, sync::atomic::Ordering};
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicI64 as AtomicIdCursor;
#[cfg(target_has_atomic = "64")]
type IdCursor = i64;
/// Most modern platforms support 64-bit atomics, but some less-common platforms
/// do not. This fallback allows compilation using a 32-bit cursor instead, with
/// the caveat that some conversions may fail (and panic) at runtime.
#[cfg(not(target_has_atomic = "64"))]
use std::sync::atomic::AtomicIsize as AtomicIdCursor;
#[cfg(not(target_has_atomic = "64"))]
type IdCursor = isize;
/// Lightweight identifier of an [entity](crate::entity).
///
/// The identifier is implemented using a [generational index]: a combination of an index and a generation.
/// This allows fast insertion after data removal in an array while minimizing loss of spatial locality.
///
/// These identifiers are only valid on the [`World`] it's sourced from. Attempting to use an `Entity` to
/// fetch entity components or metadata from a different world will either fail or return unexpected results.
///
/// [generational index]: https://lucassardois.medium.com/generational-indices-guide-8e3c5f7fd594
///
/// # Usage
///
/// This data type is returned by iterating a `Query` that has `Entity` as part of its query fetch type parameter ([learn more]).
/// It can also be obtained by calling [`EntityCommands::id`] or [`EntityWorldMut::id`].
///
/// ```
/// # use bevy_ecs::prelude::*;
/// # #[derive(Component)]
/// # struct SomeComponent;
/// fn setup(mut commands: Commands) {
/// // Calling `spawn` returns `EntityCommands`.
/// let entity = commands.spawn(SomeComponent).id();
/// }
///
/// fn exclusive_system(world: &mut World) {
/// // Calling `spawn` returns `EntityWorldMut`.
/// let entity = world.spawn(SomeComponent).id();
/// }
/// #
/// # bevy_ecs::system::assert_is_system(setup);
/// # bevy_ecs::system::assert_is_system(exclusive_system);
/// ```
///
/// It can be used to refer to a specific entity to apply [`EntityCommands`], or to call [`Query::get`] (or similar methods) to access its components.
///
/// ```
/// # use bevy_ecs::prelude::*;
/// #
/// # #[derive(Component)]
/// # struct Expired;
/// #
/// fn dispose_expired_food(mut commands: Commands, query: Query<Entity, With<Expired>>) {
/// for food_entity in &query {
/// commands.entity(food_entity).despawn();
/// }
/// }
/// #
/// # bevy_ecs::system::assert_is_system(dispose_expired_food);
/// ```
///
/// [learn more]: crate::system::Query#entity-id-access
/// [`EntityCommands::id`]: crate::system::EntityCommands::id
/// [`EntityWorldMut::id`]: crate::world::EntityWorldMut::id
/// [`EntityCommands`]: crate::system::EntityCommands
/// [`Query::get`]: crate::system::Query::get
/// [`World`]: crate::world::World
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Entity {
generation: u32,
index: u32,
}
pub(crate) enum AllocAtWithoutReplacement {
Exists(EntityLocation),
DidNotExist,
ExistsWithWrongGeneration,
}
impl Entity {
#[cfg(test)]
pub(crate) const fn new(index: u32, generation: u32) -> Entity {
Entity { index, generation }
}
/// An entity ID with a placeholder value. This may or may not correspond to an actual entity,
/// and should be overwritten by a new value before being used.
///
/// ## Examples
///
/// Initializing a collection (e.g. `array` or `Vec`) with a known size:
///
/// ```no_run
/// # use bevy_ecs::prelude::*;
/// // Create a new array of size 10 filled with invalid entity ids.
/// let mut entities: [Entity; 10] = [Entity::PLACEHOLDER; 10];
///
/// // ... replace the entities with valid ones.
/// ```
///
/// Deriving [`Reflect`](bevy_reflect::Reflect) for a component that has an `Entity` field:
///
/// ```no_run
/// # use bevy_ecs::{prelude::*, component::*};
/// # use bevy_reflect::Reflect;
/// #[derive(Reflect, Component)]
/// #[reflect(Component)]
/// pub struct MyStruct {
/// pub entity: Entity,
/// }
///
/// impl FromWorld for MyStruct {
/// fn from_world(_world: &mut World) -> Self {
/// Self {
/// entity: Entity::PLACEHOLDER,
/// }
/// }
/// }
/// ```
pub const PLACEHOLDER: Self = Self::from_raw(u32::MAX);
/// Creates a new entity ID with the specified `index` and a generation of 0.
///
/// # Note
///
/// Spawning a specific `entity` value is __rarely the right choice__. Most apps should favor
/// [`Commands::spawn`](crate::system::Commands::spawn). This method should generally
/// only be used for sharing entities across apps, and only when they have a scheme
/// worked out to share an index space (which doesn't happen by default).
///
/// In general, one should not try to synchronize the ECS by attempting to ensure that
/// `Entity` lines up between instances, but instead insert a secondary identifier as
/// a component.
pub const fn from_raw(index: u32) -> Entity {
Entity {
index,
generation: 0,
}
}
/// Convert to a form convenient for passing outside of rust.
///
/// Only useful for identifying entities within the same instance of an application. Do not use
/// for serialization between runs.
///
/// No particular structure is guaranteed for the returned bits.
pub const fn to_bits(self) -> u64 {
(self.generation as u64) << 32 | self.index as u64
}
/// Reconstruct an `Entity` previously destructured with [`Entity::to_bits`].
///
/// Only useful when applied to results from `to_bits` in the same instance of an application.
pub const fn from_bits(bits: u64) -> Self {
Self {
generation: (bits >> 32) as u32,
index: bits as u32,
}
}
/// Return a transiently unique identifier.
///
/// No two simultaneously-live entities share the same index, but dead entities' indices may collide
/// with both live and dead entities. Useful for compactly representing entities within a
/// specific snapshot of the world, such as when serializing.
#[inline]
pub const fn index(self) -> u32 {
self.index
}
/// Returns the generation of this Entity's index. The generation is incremented each time an
/// entity with a given index is despawned. This serves as a "count" of the number of times a
/// given index has been reused (index, generation) pairs uniquely identify a given Entity.
#[inline]
pub const fn generation(self) -> u32 {
self.generation
}
}
impl Serialize for Entity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u64(self.to_bits())
}
}
impl<'de> Deserialize<'de> for Entity {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let id: u64 = serde::de::Deserialize::deserialize(deserializer)?;
Ok(Entity::from_bits(id))
}
}
impl fmt::Debug for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}v{}", self.index, self.generation)
}
}
impl SparseSetIndex for Entity {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index() as usize
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Entity::from_raw(value as u32)
}
}
/// An [`Iterator`] returning a sequence of [`Entity`] values from
/// [`Entities::reserve_entities`](crate::entity::Entities::reserve_entities).
pub struct ReserveEntitiesIterator<'a> {
// Metas, so we can recover the current generation for anything in the freelist.
meta: &'a [EntityMeta],
// Reserved indices formerly in the freelist to hand out.
index_iter: std::slice::Iter<'a, u32>,
// New Entity indices to hand out, outside the range of meta.len().
index_range: std::ops::Range<u32>,
}
impl<'a> Iterator for ReserveEntitiesIterator<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
self.index_iter
.next()
.map(|&index| Entity {
generation: self.meta[index as usize].generation,
index,
})
.or_else(|| {
self.index_range.next().map(|index| Entity {
generation: 0,
index,
})
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.index_iter.len() + self.index_range.len();
(len, Some(len))
}
}
impl<'a> core::iter::ExactSizeIterator for ReserveEntitiesIterator<'a> {}
impl<'a> core::iter::FusedIterator for ReserveEntitiesIterator<'a> {}
/// A [`World`]'s internal metadata store on all of its entities.
///
/// Contains metadata on:
/// - The generation of every entity.
/// - The alive/dead status of a particular entity. (i.e. "has entity 3 been despawned?")
/// - The location of the entity's components in memory (via [`EntityLocation`])
///
/// [`World`]: crate::world::World
#[derive(Debug)]
pub struct Entities {
meta: Vec<EntityMeta>,
/// The `pending` and `free_cursor` fields describe three sets of Entity IDs
/// that have been freed or are in the process of being allocated:
///
/// - The `freelist` IDs, previously freed by `free()`. These IDs are available to any of
/// [`alloc`], [`reserve_entity`] or [`reserve_entities`]. Allocation will always prefer
/// these over brand new IDs.
///
/// - The `reserved` list of IDs that were once in the freelist, but got reserved by
/// [`reserve_entities`] or [`reserve_entity`]. They are now waiting for [`flush`] to make them
/// fully allocated.
///
/// - The count of new IDs that do not yet exist in `self.meta`, but which we have handed out
/// and reserved. [`flush`] will allocate room for them in `self.meta`.
///
/// The contents of `pending` look like this:
///
/// ```txt
/// ----------------------------
/// | freelist | reserved |
/// ----------------------------
/// ^ ^
/// free_cursor pending.len()
/// ```
///
/// As IDs are allocated, `free_cursor` is atomically decremented, moving
/// items from the freelist into the reserved list by sliding over the boundary.
///
/// Once the freelist runs out, `free_cursor` starts going negative.
/// The more negative it is, the more IDs have been reserved starting exactly at
/// the end of `meta.len()`.
///
/// This formulation allows us to reserve any number of IDs first from the freelist
/// and then from the new IDs, using only a single atomic subtract.
///
/// Once [`flush`] is done, `free_cursor` will equal `pending.len()`.
///
/// [`alloc`]: Entities::alloc
/// [`reserve_entity`]: Entities::reserve_entity
/// [`reserve_entities`]: Entities::reserve_entities
/// [`flush`]: Entities::flush
pending: Vec<u32>,
free_cursor: AtomicIdCursor,
/// Stores the number of free entities for [`len`](Entities::len)
len: u32,
}
impl Entities {
pub(crate) const fn new() -> Self {
Entities {
meta: Vec::new(),
pending: Vec::new(),
free_cursor: AtomicIdCursor::new(0),
len: 0,
}
}
/// Reserve entity IDs concurrently.
///
/// Storage for entity generation and location is lazily allocated by calling [`flush`](Entities::flush).
pub fn reserve_entities(&self, count: u32) -> ReserveEntitiesIterator {
// Use one atomic subtract to grab a range of new IDs. The range might be
// entirely nonnegative, meaning all IDs come from the freelist, or entirely
// negative, meaning they are all new IDs to allocate, or a mix of both.
let range_end = self
.free_cursor
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
.fetch_sub(IdCursor::try_from(count).unwrap(), Ordering::Relaxed);
let range_start = range_end - IdCursor::try_from(count).unwrap();
let freelist_range = range_start.max(0) as usize..range_end.max(0) as usize;
let (new_id_start, new_id_end) = if range_start >= 0 {
// We satisfied all requests from the freelist.
(0, 0)
} else {
// We need to allocate some new Entity IDs outside of the range of self.meta.
//
// `range_start` covers some negative territory, e.g. `-3..6`.
// Since the nonnegative values `0..6` are handled by the freelist, that
// means we need to handle the negative range here.
//
// In this example, we truncate the end to 0, leaving us with `-3..0`.
// Then we negate these values to indicate how far beyond the end of `meta.end()`
// to go, yielding `meta.len()+0 .. meta.len()+3`.
let base = self.meta.len() as IdCursor;
let new_id_end = u32::try_from(base - range_start).expect("too many entities");
// `new_id_end` is in range, so no need to check `start`.
let new_id_start = (base - range_end.min(0)) as u32;
(new_id_start, new_id_end)
};
ReserveEntitiesIterator {
meta: &self.meta[..],
index_iter: self.pending[freelist_range].iter(),
index_range: new_id_start..new_id_end,
}
}
/// Reserve one entity ID concurrently.
///
/// Equivalent to `self.reserve_entities(1).next().unwrap()`, but more efficient.
pub fn reserve_entity(&self) -> Entity {
let n = self.free_cursor.fetch_sub(1, Ordering::Relaxed);
if n > 0 {
// Allocate from the freelist.
let index = self.pending[(n - 1) as usize];
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
// Grab a new ID, outside the range of `meta.len()`. `flush()` must
// eventually be called to make it valid.
//
// As `self.free_cursor` goes more and more negative, we return IDs farther
// and farther beyond `meta.len()`.
Entity {
generation: 0,
index: u32::try_from(self.meta.len() as IdCursor - n).expect("too many entities"),
}
}
}
/// Check that we do not have pending work requiring `flush()` to be called.
fn verify_flushed(&mut self) {
debug_assert!(
!self.needs_flush(),
"flush() needs to be called before this operation is legal"
);
}
/// Allocate an entity ID directly.
pub fn alloc(&mut self) -> Entity {
self.verify_flushed();
self.len += 1;
if let Some(index) = self.pending.pop() {
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
let index = u32::try_from(self.meta.len()).expect("too many entities");
self.meta.push(EntityMeta::EMPTY);
Entity {
generation: 0,
index,
}
}
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any. Location should be
/// written immediately.
pub fn | (&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let loc = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
None
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
None
} else {
Some(mem::replace(
&mut self.meta[entity.index as usize].location,
EntityMeta::EMPTY.location,
))
};
self.meta[entity.index as usize].generation = entity.generation;
loc
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any.
pub(crate) fn alloc_at_without_replacement(
&mut self,
entity: Entity,
) -> AllocAtWithoutReplacement {
self.verify_flushed();
let result = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else {
let current_meta = &self.meta[entity.index as usize];
if current_meta.location.archetype_id == ArchetypeId::INVALID {
AllocAtWithoutReplacement::DidNotExist
} else if current_meta.generation == entity.generation {
AllocAtWithoutReplacement::Exists(current_meta.location)
} else {
return AllocAtWithoutReplacement::ExistsWithWrongGeneration;
}
};
self.meta[entity.index as usize].generation = entity.generation;
result
}
/// Destroy an entity, allowing it to be reused.
///
/// Must not be called while reserved entities are awaiting `flush()`.
pub fn free(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let meta = &mut self.meta[entity.index as usize];
if meta.generation != entity.generation {
return None;
}
meta.generation += 1;
let loc = mem::replace(&mut meta.location, EntityMeta::EMPTY.location);
self.pending.push(entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len -= 1;
Some(loc)
}
/// Ensure at least `n` allocations can succeed without reallocating.
pub fn reserve(&mut self, additional: u32) {
self.verify_flushed();
let freelist_size = *self.free_cursor.get_mut();
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
let shortfall = IdCursor::try_from(additional).unwrap() - freelist_size;
if shortfall > 0 {
self.meta.reserve(shortfall as usize);
}
}
/// Returns true if the [`Entities`] contains [`entity`](Entity).
// This will return false for entities which have been freed, even if
// not reallocated since the generation is incremented in `free`
pub fn contains(&self, entity: Entity) -> bool {
self.resolve_from_id(entity.index())
.map_or(false, |e| e.generation() == entity.generation)
}
/// Clears all [`Entity`] from the World.
pub fn clear(&mut self) {
self.meta.clear();
self.pending.clear();
*self.free_cursor.get_mut() = 0;
self.len = 0;
}
/// Returns the location of an [`Entity`].
/// Note: for pending entities, returns `Some(EntityLocation::INVALID)`.
#[inline]
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
if let Some(meta) = self.meta.get(entity.index as usize) {
if meta.generation != entity.generation
|| meta.location.archetype_id == ArchetypeId::INVALID
{
return None;
}
Some(meta.location)
} else {
None
}
}
/// Updates the location of an [`Entity`]. This must be called when moving the components of
/// the entity around in storage.
///
/// # Safety
/// - `index` must be a valid entity index.
/// - `location` must be valid for the entity at `index` or immediately made valid afterwards
/// before handing control to unknown code.
#[inline]
pub(crate) unsafe fn set(&mut self, index: u32, location: EntityLocation) {
// SAFETY: Caller guarantees that `index` a valid entity index
self.meta.get_unchecked_mut(index as usize).location = location;
}
/// Increments the `generation` of a freed [`Entity`]. The next entity ID allocated with this
/// `index` will count `generation` starting from the prior `generation` + the specified
/// value + 1.
///
/// Does nothing if no entity with this `index` has been allocated yet.
pub(crate) fn reserve_generations(&mut self, index: u32, generations: u32) -> bool {
if (index as usize) >= self.meta.len() {
return false;
}
let meta = &mut self.meta[index as usize];
if meta.location.archetype_id == ArchetypeId::INVALID {
meta.generation += generations;
true
} else {
false
}
}
/// Get the [`Entity`] with a given id, if it exists in this [`Entities`] collection
/// Returns `None` if this [`Entity`] is outside of the range of currently reserved Entities
///
/// Note: This method may return [`Entities`](Entity) which are currently free
/// Note that [`contains`](Entities::contains) will correctly return false for freed
/// entities, since it checks the generation
pub fn resolve_from_id(&self, index: u32) -> Option<Entity> {
let idu = index as usize;
if let Some(&EntityMeta { generation, .. }) = self.meta.get(idu) {
Some(Entity { generation, index })
} else {
// `id` is outside of the meta list - check whether it is reserved but not yet flushed.
let free_cursor = self.free_cursor.load(Ordering::Relaxed);
// If this entity was manually created, then free_cursor might be positive
// Returning None handles that case correctly
let num_pending = usize::try_from(-free_cursor).ok()?;
(idu < self.meta.len() + num_pending).then_some(Entity {
generation: 0,
index,
})
}
}
fn needs_flush(&mut self) -> bool {
*self.free_cursor.get_mut() != self.pending.len() as IdCursor
}
/// Allocates space for entities previously reserved with [`reserve_entity`](Entities::reserve_entity) or
/// [`reserve_entities`](Entities::reserve_entities), then initializes each one using the supplied function.
///
/// # Safety
/// Flush _must_ set the entity location to the correct [`ArchetypeId`] for the given [`Entity`]
/// each time init is called. This _can_ be [`ArchetypeId::INVALID`], provided the [`Entity`]
/// has not been assigned to an [`Archetype`][crate::archetype::Archetype].
///
/// Note: freshly-allocated entities (ones which don't come from the pending list) are guaranteed
/// to be initialized with the invalid archetype.
pub unsafe fn flush(&mut self, mut init: impl FnMut(Entity, &mut EntityLocation)) {
let free_cursor = self.free_cursor.get_mut();
let current_free_cursor = *free_cursor;
let new_free_cursor = if current_free_cursor >= 0 {
current_free_cursor as usize
} else {
let old_meta_len = self.meta.len();
let new_meta_len = old_meta_len + -current_free_cursor as usize;
self.meta.resize(new_meta_len, EntityMeta::EMPTY);
self.len += -current_free_cursor as u32;
for (index, meta) in self.meta.iter_mut().enumerate().skip(old_meta_len) {
init(
Entity {
index: index as u32,
generation: meta.generation,
},
&mut meta.location,
);
}
*free_cursor = 0;
0
};
self.len += (self.pending.len() - new_free_cursor) as u32;
for index in self.pending.drain(new_free_cursor..) {
let meta = &mut self.meta[index as usize];
init(
Entity {
index,
generation: meta.generation,
},
&mut meta.location,
);
}
}
/// Flushes all reserved entities to an "invalid" state. Attempting to retrieve them will return `None`
/// unless they are later populated with a valid archetype.
pub fn flush_as_invalid(&mut self) {
// SAFETY: as per `flush` safety docs, the archetype id can be set to [`ArchetypeId::INVALID`] if
// the [`Entity`] has not been assigned to an [`Archetype`][crate::archetype::Archetype], which is the case here
unsafe {
self.flush(|_entity, location| {
location.archetype_id = ArchetypeId::INVALID;
});
}
}
/// # Safety
///
/// This function is safe if and only if the world this Entities is on has no entities.
pub unsafe fn flush_and_reserve_invalid_assuming_no_entities(&mut self, count: usize) {
let free_cursor = self.free_cursor.get_mut();
*free_cursor = 0;
self.meta.reserve(count);
// the EntityMeta struct only contains integers, and it is valid to have all bytes set to u8::MAX
self.meta.as_mut_ptr().write_bytes(u8::MAX, count);
self.meta.set_len(count);
self.len = count as u32;
}
/// The count of all entities in the [`World`] that have ever been allocated
/// including the entities that are currently freed.
///
/// This does not include entities that have been reserved but have never been
/// allocated yet.
///
/// [`World`]: crate::world::World
#[inline]
pub fn total_count(&self) -> usize {
self.meta.len()
}
/// The count of currently allocated entities.
#[inline]
pub fn len(&self) -> u32 {
self.len
}
/// Checks if any entity is currently active.
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// Safety:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// Metadata for an [`Entity`].
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct EntityMeta {
/// The current generation of the [`Entity`].
pub generation: u32,
/// The current location of the [`Entity`]
pub location: EntityLocation,
}
impl EntityMeta {
/// meta for **pending entity**
const EMPTY: EntityMeta = EntityMeta {
generation: 0,
location: EntityLocation::INVALID,
};
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// SAFETY:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// A location of an entity in an archetype.
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct EntityLocation {
/// The ID of the [`Archetype`] the [`Entity`] belongs to.
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_id: ArchetypeId,
/// The index of the [`Entity`] within its [`Archetype`].
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_row: ArchetypeRow,
/// The ID of the [`Table`] the [`Entity`] belongs to.
///
/// [`Table`]: crate::storage::Table
pub table_id: TableId,
/// The index of the [`Entity`] within its [`Table`].
///
/// [`Table`]: crate::storage::Table
pub table_row: TableRow,
}
impl EntityLocation {
/// location for **pending entity** and **invalid entity**
const INVALID: EntityLocation = EntityLocation {
archetype_id: ArchetypeId::INVALID,
archetype_row: ArchetypeRow::INVALID,
table_id: TableId::INVALID,
table_row: TableRow::INVALID,
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn entity_bits_roundtrip() {
let e = Entity {
generation: 0xDEADBEEF,
index: 0xBAADF00D,
};
assert_eq!(Entity::from_bits(e.to_bits()), e);
}
#[test]
fn reserve_entity_len() {
let mut e = Entities::new();
e.reserve_entity();
// SAFETY: entity_location is left invalid
unsafe { e.flush(|_, _| {}) };
assert_eq!(e.len(), 1);
}
#[test]
fn get_reserved_and_invalid() {
let mut entities = Entities::new();
let e = entities.reserve_entity();
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
// SAFETY: entity_location is left invalid
unsafe {
entities.flush(|_entity, _location| {
// do nothing ... leaving entity location invalid
});
};
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
}
#[test]
fn entity_const() {
const C1: Entity = Entity::from_raw(42);
assert_eq!(42, C1.index);
assert_eq!(0, C1.generation);
const C2: Entity = Entity::from_bits(0x0000_00ff_0000_00cc);
assert_eq!(0x0000_00cc, C2.index);
assert_eq!(0x0000_00ff, C2.generation);
const C3: u32 = Entity::from_raw(33).index();
assert_eq!(33, C3);
const C4: u32 = Entity::from_bits(0x00dd_00ff_0000_0000).generation();
assert_eq!(0x00dd_00ff, C4);
}
#[test]
fn reserve_generations() {
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, 1));
}
#[test]
fn reserve_generations_and_alloc() {
const GENERATIONS: u32 = 10;
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, GENERATIONS));
// The very next entity allocated should be a further generation on the same index
let next_entity = entities.alloc();
assert_eq!(next_entity.index(), entity.index());
assert!(next_entity.generation > entity.generation + GENERATIONS);
}
}
| alloc_at | identifier_name |
mod.rs | //! Entity handling types.
//!
//! An **entity** exclusively owns zero or more [component] instances, all of different types, and can dynamically acquire or lose them over its lifetime.
//!
//! **empty entity**: Entity with zero components.
//! **pending entity**: Entity reserved, but not flushed yet (see [`Entities::flush`] docs for reference).
//! **reserved entity**: same as **pending entity**.
//! **invalid entity**: **pending entity** flushed with invalid (see [`Entities::flush_as_invalid`] docs for reference).
//!
//! See [`Entity`] to learn more.
//!
//! [component]: crate::component::Component
//!
//! # Usage
//!
//! Operations involving entities and their components are performed either from a system by submitting commands,
//! or from the outside (or from an exclusive system) by directly using [`World`] methods:
//!
//! |Operation|Command|Method|
//! |:---:|:---:|:---:|
//! |Spawn an entity with components|[`Commands::spawn`]|[`World::spawn`]|
//! |Spawn an entity without components|[`Commands::spawn_empty`]|[`World::spawn_empty`]|
//! |Despawn an entity|[`EntityCommands::despawn`]|[`World::despawn`]|
//! |Insert a component, bundle, or tuple of components and bundles to an entity|[`EntityCommands::insert`]|[`EntityWorldMut::insert`]|
//! |Remove a component, bundle, or tuple of components and bundles from an entity|[`EntityCommands::remove`]|[`EntityWorldMut::remove`]|
//!
//! [`World`]: crate::world::World
//! [`Commands::spawn`]: crate::system::Commands::spawn
//! [`Commands::spawn_empty`]: crate::system::Commands::spawn_empty
//! [`EntityCommands::despawn`]: crate::system::EntityCommands::despawn
//! [`EntityCommands::insert`]: crate::system::EntityCommands::insert
//! [`EntityCommands::remove`]: crate::system::EntityCommands::remove
//! [`World::spawn`]: crate::world::World::spawn
//! [`World::spawn_empty`]: crate::world::World::spawn_empty
//! [`World::despawn`]: crate::world::World::despawn
//! [`EntityWorldMut::insert`]: crate::world::EntityWorldMut::insert
//! [`EntityWorldMut::remove`]: crate::world::EntityWorldMut::remove
mod map_entities;
pub use map_entities::*;
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
storage::{SparseSetIndex, TableId, TableRow},
};
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, fmt, mem, sync::atomic::Ordering};
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicI64 as AtomicIdCursor;
#[cfg(target_has_atomic = "64")]
type IdCursor = i64;
/// Most modern platforms support 64-bit atomics, but some less-common platforms
/// do not. This fallback allows compilation using a 32-bit cursor instead, with
/// the caveat that some conversions may fail (and panic) at runtime.
#[cfg(not(target_has_atomic = "64"))]
use std::sync::atomic::AtomicIsize as AtomicIdCursor;
#[cfg(not(target_has_atomic = "64"))]
type IdCursor = isize;
/// Lightweight identifier of an [entity](crate::entity).
///
/// The identifier is implemented using a [generational index]: a combination of an index and a generation.
/// This allows fast insertion after data removal in an array while minimizing loss of spatial locality.
///
/// These identifiers are only valid on the [`World`] it's sourced from. Attempting to use an `Entity` to
/// fetch entity components or metadata from a different world will either fail or return unexpected results.
///
/// [generational index]: https://lucassardois.medium.com/generational-indices-guide-8e3c5f7fd594
///
/// # Usage
///
/// This data type is returned by iterating a `Query` that has `Entity` as part of its query fetch type parameter ([learn more]).
/// It can also be obtained by calling [`EntityCommands::id`] or [`EntityWorldMut::id`].
///
/// ```
/// # use bevy_ecs::prelude::*;
/// # #[derive(Component)]
/// # struct SomeComponent;
/// fn setup(mut commands: Commands) {
/// // Calling `spawn` returns `EntityCommands`.
/// let entity = commands.spawn(SomeComponent).id();
/// }
///
/// fn exclusive_system(world: &mut World) {
/// // Calling `spawn` returns `EntityWorldMut`.
/// let entity = world.spawn(SomeComponent).id();
/// }
/// #
/// # bevy_ecs::system::assert_is_system(setup);
/// # bevy_ecs::system::assert_is_system(exclusive_system);
/// ```
///
/// It can be used to refer to a specific entity to apply [`EntityCommands`], or to call [`Query::get`] (or similar methods) to access its components.
///
/// ```
/// # use bevy_ecs::prelude::*;
/// #
/// # #[derive(Component)]
/// # struct Expired;
/// #
/// fn dispose_expired_food(mut commands: Commands, query: Query<Entity, With<Expired>>) {
/// for food_entity in &query {
/// commands.entity(food_entity).despawn();
/// }
/// }
/// #
/// # bevy_ecs::system::assert_is_system(dispose_expired_food);
/// ```
///
/// [learn more]: crate::system::Query#entity-id-access
/// [`EntityCommands::id`]: crate::system::EntityCommands::id
/// [`EntityWorldMut::id`]: crate::world::EntityWorldMut::id
/// [`EntityCommands`]: crate::system::EntityCommands
/// [`Query::get`]: crate::system::Query::get
/// [`World`]: crate::world::World
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Entity {
generation: u32,
index: u32,
}
pub(crate) enum AllocAtWithoutReplacement {
Exists(EntityLocation),
DidNotExist,
ExistsWithWrongGeneration,
}
impl Entity {
#[cfg(test)]
pub(crate) const fn new(index: u32, generation: u32) -> Entity {
Entity { index, generation }
}
/// An entity ID with a placeholder value. This may or may not correspond to an actual entity,
/// and should be overwritten by a new value before being used.
///
/// ## Examples
///
/// Initializing a collection (e.g. `array` or `Vec`) with a known size:
///
/// ```no_run
/// # use bevy_ecs::prelude::*;
/// // Create a new array of size 10 filled with invalid entity ids.
/// let mut entities: [Entity; 10] = [Entity::PLACEHOLDER; 10];
///
/// // ... replace the entities with valid ones.
/// ```
///
/// Deriving [`Reflect`](bevy_reflect::Reflect) for a component that has an `Entity` field:
///
/// ```no_run
/// # use bevy_ecs::{prelude::*, component::*};
/// # use bevy_reflect::Reflect;
/// #[derive(Reflect, Component)]
/// #[reflect(Component)]
/// pub struct MyStruct {
/// pub entity: Entity,
/// }
///
/// impl FromWorld for MyStruct {
/// fn from_world(_world: &mut World) -> Self {
/// Self {
/// entity: Entity::PLACEHOLDER,
/// }
/// }
/// }
/// ```
pub const PLACEHOLDER: Self = Self::from_raw(u32::MAX);
/// Creates a new entity ID with the specified `index` and a generation of 0.
///
/// # Note
///
/// Spawning a specific `entity` value is __rarely the right choice__. Most apps should favor
/// [`Commands::spawn`](crate::system::Commands::spawn). This method should generally
/// only be used for sharing entities across apps, and only when they have a scheme
/// worked out to share an index space (which doesn't happen by default).
///
/// In general, one should not try to synchronize the ECS by attempting to ensure that
/// `Entity` lines up between instances, but instead insert a secondary identifier as
/// a component.
pub const fn from_raw(index: u32) -> Entity {
Entity {
index,
generation: 0,
}
}
/// Convert to a form convenient for passing outside of rust.
///
/// Only useful for identifying entities within the same instance of an application. Do not use
/// for serialization between runs.
///
/// No particular structure is guaranteed for the returned bits.
pub const fn to_bits(self) -> u64 {
(self.generation as u64) << 32 | self.index as u64
}
/// Reconstruct an `Entity` previously destructured with [`Entity::to_bits`].
///
/// Only useful when applied to results from `to_bits` in the same instance of an application.
pub const fn from_bits(bits: u64) -> Self {
Self {
generation: (bits >> 32) as u32,
index: bits as u32,
}
}
/// Return a transiently unique identifier.
///
/// No two simultaneously-live entities share the same index, but dead entities' indices may collide
/// with both live and dead entities. Useful for compactly representing entities within a
/// specific snapshot of the world, such as when serializing.
#[inline]
pub const fn index(self) -> u32 {
self.index
}
/// Returns the generation of this Entity's index. The generation is incremented each time an
/// entity with a given index is despawned. This serves as a "count" of the number of times a
/// given index has been reused (index, generation) pairs uniquely identify a given Entity.
#[inline]
pub const fn generation(self) -> u32 {
self.generation
}
}
impl Serialize for Entity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u64(self.to_bits())
}
}
impl<'de> Deserialize<'de> for Entity {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let id: u64 = serde::de::Deserialize::deserialize(deserializer)?;
Ok(Entity::from_bits(id))
}
}
impl fmt::Debug for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}v{}", self.index, self.generation)
}
}
impl SparseSetIndex for Entity {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index() as usize
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Entity::from_raw(value as u32)
}
}
/// An [`Iterator`] returning a sequence of [`Entity`] values from
/// [`Entities::reserve_entities`](crate::entity::Entities::reserve_entities).
pub struct ReserveEntitiesIterator<'a> {
// Metas, so we can recover the current generation for anything in the freelist.
meta: &'a [EntityMeta],
// Reserved indices formerly in the freelist to hand out.
index_iter: std::slice::Iter<'a, u32>,
// New Entity indices to hand out, outside the range of meta.len().
index_range: std::ops::Range<u32>,
}
impl<'a> Iterator for ReserveEntitiesIterator<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
self.index_iter
.next()
.map(|&index| Entity {
generation: self.meta[index as usize].generation,
index,
})
.or_else(|| {
self.index_range.next().map(|index| Entity {
generation: 0,
index,
})
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.index_iter.len() + self.index_range.len();
(len, Some(len))
}
}
impl<'a> core::iter::ExactSizeIterator for ReserveEntitiesIterator<'a> {}
impl<'a> core::iter::FusedIterator for ReserveEntitiesIterator<'a> {}
/// A [`World`]'s internal metadata store on all of its entities.
///
/// Contains metadata on:
/// - The generation of every entity.
/// - The alive/dead status of a particular entity. (i.e. "has entity 3 been despawned?")
/// - The location of the entity's components in memory (via [`EntityLocation`])
///
/// [`World`]: crate::world::World
#[derive(Debug)]
pub struct Entities {
meta: Vec<EntityMeta>,
/// The `pending` and `free_cursor` fields describe three sets of Entity IDs
/// that have been freed or are in the process of being allocated:
///
/// - The `freelist` IDs, previously freed by `free()`. These IDs are available to any of
/// [`alloc`], [`reserve_entity`] or [`reserve_entities`]. Allocation will always prefer
/// these over brand new IDs.
///
/// - The `reserved` list of IDs that were once in the freelist, but got reserved by
/// [`reserve_entities`] or [`reserve_entity`]. They are now waiting for [`flush`] to make them
/// fully allocated.
///
/// - The count of new IDs that do not yet exist in `self.meta`, but which we have handed out
/// and reserved. [`flush`] will allocate room for them in `self.meta`.
///
/// The contents of `pending` look like this:
///
/// ```txt
/// ----------------------------
/// | freelist | reserved |
/// ----------------------------
/// ^ ^
/// free_cursor pending.len()
/// ```
///
/// As IDs are allocated, `free_cursor` is atomically decremented, moving
/// items from the freelist into the reserved list by sliding over the boundary.
///
/// Once the freelist runs out, `free_cursor` starts going negative.
/// The more negative it is, the more IDs have been reserved starting exactly at
/// the end of `meta.len()`.
///
/// This formulation allows us to reserve any number of IDs first from the freelist
/// and then from the new IDs, using only a single atomic subtract.
///
/// Once [`flush`] is done, `free_cursor` will equal `pending.len()`.
///
/// [`alloc`]: Entities::alloc
/// [`reserve_entity`]: Entities::reserve_entity
/// [`reserve_entities`]: Entities::reserve_entities
/// [`flush`]: Entities::flush
pending: Vec<u32>,
free_cursor: AtomicIdCursor,
/// Stores the number of free entities for [`len`](Entities::len)
len: u32,
}
impl Entities {
pub(crate) const fn new() -> Self {
Entities {
meta: Vec::new(),
pending: Vec::new(),
free_cursor: AtomicIdCursor::new(0),
len: 0,
}
}
/// Reserve entity IDs concurrently.
///
/// Storage for entity generation and location is lazily allocated by calling [`flush`](Entities::flush).
pub fn reserve_entities(&self, count: u32) -> ReserveEntitiesIterator {
// Use one atomic subtract to grab a range of new IDs. The range might be
// entirely nonnegative, meaning all IDs come from the freelist, or entirely
// negative, meaning they are all new IDs to allocate, or a mix of both.
let range_end = self
.free_cursor
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
.fetch_sub(IdCursor::try_from(count).unwrap(), Ordering::Relaxed);
let range_start = range_end - IdCursor::try_from(count).unwrap();
let freelist_range = range_start.max(0) as usize..range_end.max(0) as usize;
let (new_id_start, new_id_end) = if range_start >= 0 {
// We satisfied all requests from the freelist.
(0, 0)
} else {
// We need to allocate some new Entity IDs outside of the range of self.meta.
//
// `range_start` covers some negative territory, e.g. `-3..6`.
// Since the nonnegative values `0..6` are handled by the freelist, that
// means we need to handle the negative range here.
//
// In this example, we truncate the end to 0, leaving us with `-3..0`.
// Then we negate these values to indicate how far beyond the end of `meta.end()`
// to go, yielding `meta.len()+0 .. meta.len()+3`.
let base = self.meta.len() as IdCursor;
let new_id_end = u32::try_from(base - range_start).expect("too many entities");
// `new_id_end` is in range, so no need to check `start`.
let new_id_start = (base - range_end.min(0)) as u32;
(new_id_start, new_id_end)
};
ReserveEntitiesIterator {
meta: &self.meta[..],
index_iter: self.pending[freelist_range].iter(),
index_range: new_id_start..new_id_end,
}
}
/// Reserve one entity ID concurrently.
///
/// Equivalent to `self.reserve_entities(1).next().unwrap()`, but more efficient.
pub fn reserve_entity(&self) -> Entity {
let n = self.free_cursor.fetch_sub(1, Ordering::Relaxed);
if n > 0 {
// Allocate from the freelist.
let index = self.pending[(n - 1) as usize];
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
// Grab a new ID, outside the range of `meta.len()`. `flush()` must
// eventually be called to make it valid.
//
// As `self.free_cursor` goes more and more negative, we return IDs farther
// and farther beyond `meta.len()`.
Entity {
generation: 0,
index: u32::try_from(self.meta.len() as IdCursor - n).expect("too many entities"),
}
}
}
/// Check that we do not have pending work requiring `flush()` to be called.
fn verify_flushed(&mut self) {
debug_assert!(
!self.needs_flush(),
"flush() needs to be called before this operation is legal"
);
}
/// Allocate an entity ID directly.
pub fn alloc(&mut self) -> Entity {
self.verify_flushed();
self.len += 1;
if let Some(index) = self.pending.pop() {
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
let index = u32::try_from(self.meta.len()).expect("too many entities");
self.meta.push(EntityMeta::EMPTY);
Entity {
generation: 0,
index,
}
}
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any. Location should be
/// written immediately.
pub fn alloc_at(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let loc = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
None
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
None
} else {
Some(mem::replace(
&mut self.meta[entity.index as usize].location,
EntityMeta::EMPTY.location,
))
};
self.meta[entity.index as usize].generation = entity.generation;
loc
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any.
pub(crate) fn alloc_at_without_replacement(
&mut self,
entity: Entity,
) -> AllocAtWithoutReplacement {
self.verify_flushed();
let result = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else {
let current_meta = &self.meta[entity.index as usize];
if current_meta.location.archetype_id == ArchetypeId::INVALID {
AllocAtWithoutReplacement::DidNotExist
} else if current_meta.generation == entity.generation {
AllocAtWithoutReplacement::Exists(current_meta.location)
} else {
return AllocAtWithoutReplacement::ExistsWithWrongGeneration;
}
};
self.meta[entity.index as usize].generation = entity.generation;
result
}
/// Destroy an entity, allowing it to be reused.
///
/// Must not be called while reserved entities are awaiting `flush()`.
pub fn free(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let meta = &mut self.meta[entity.index as usize];
if meta.generation != entity.generation {
return None;
}
meta.generation += 1;
let loc = mem::replace(&mut meta.location, EntityMeta::EMPTY.location);
self.pending.push(entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len -= 1;
Some(loc)
}
/// Ensure at least `n` allocations can succeed without reallocating.
pub fn reserve(&mut self, additional: u32) {
self.verify_flushed();
let freelist_size = *self.free_cursor.get_mut();
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
let shortfall = IdCursor::try_from(additional).unwrap() - freelist_size;
if shortfall > 0 {
self.meta.reserve(shortfall as usize);
}
}
/// Returns true if the [`Entities`] contains [`entity`](Entity).
// This will return false for entities which have been freed, even if
// not reallocated since the generation is incremented in `free`
pub fn contains(&self, entity: Entity) -> bool {
self.resolve_from_id(entity.index())
.map_or(false, |e| e.generation() == entity.generation)
}
/// Clears all [`Entity`] from the World.
pub fn clear(&mut self) {
self.meta.clear();
self.pending.clear();
*self.free_cursor.get_mut() = 0;
self.len = 0;
}
/// Returns the location of an [`Entity`].
/// Note: for pending entities, returns `Some(EntityLocation::INVALID)`.
#[inline]
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
if let Some(meta) = self.meta.get(entity.index as usize) {
if meta.generation != entity.generation
|| meta.location.archetype_id == ArchetypeId::INVALID
{
return None;
}
Some(meta.location)
} else {
None
}
}
/// Updates the location of an [`Entity`]. This must be called when moving the components of
/// the entity around in storage.
///
/// # Safety
/// - `index` must be a valid entity index.
/// - `location` must be valid for the entity at `index` or immediately made valid afterwards
/// before handing control to unknown code.
#[inline]
pub(crate) unsafe fn set(&mut self, index: u32, location: EntityLocation) {
// SAFETY: Caller guarantees that `index` a valid entity index
self.meta.get_unchecked_mut(index as usize).location = location;
}
/// Increments the `generation` of a freed [`Entity`]. The next entity ID allocated with this
/// `index` will count `generation` starting from the prior `generation` + the specified
/// value + 1.
///
/// Does nothing if no entity with this `index` has been allocated yet.
pub(crate) fn reserve_generations(&mut self, index: u32, generations: u32) -> bool {
if (index as usize) >= self.meta.len() {
return false;
}
let meta = &mut self.meta[index as usize];
if meta.location.archetype_id == ArchetypeId::INVALID {
meta.generation += generations;
true
} else {
false
}
}
/// Get the [`Entity`] with a given id, if it exists in this [`Entities`] collection
/// Returns `None` if this [`Entity`] is outside of the range of currently reserved Entities
///
/// Note: This method may return [`Entities`](Entity) which are currently free
/// Note that [`contains`](Entities::contains) will correctly return false for freed
/// entities, since it checks the generation
pub fn resolve_from_id(&self, index: u32) -> Option<Entity> {
let idu = index as usize;
if let Some(&EntityMeta { generation, .. }) = self.meta.get(idu) {
Some(Entity { generation, index })
} else {
// `id` is outside of the meta list - check whether it is reserved but not yet flushed.
let free_cursor = self.free_cursor.load(Ordering::Relaxed);
// If this entity was manually created, then free_cursor might be positive
// Returning None handles that case correctly
let num_pending = usize::try_from(-free_cursor).ok()?;
(idu < self.meta.len() + num_pending).then_some(Entity {
generation: 0,
index,
})
}
}
fn needs_flush(&mut self) -> bool {
*self.free_cursor.get_mut() != self.pending.len() as IdCursor
}
/// Allocates space for entities previously reserved with [`reserve_entity`](Entities::reserve_entity) or
/// [`reserve_entities`](Entities::reserve_entities), then initializes each one using the supplied function.
///
/// # Safety
/// Flush _must_ set the entity location to the correct [`ArchetypeId`] for the given [`Entity`]
/// each time init is called. This _can_ be [`ArchetypeId::INVALID`], provided the [`Entity`]
/// has not been assigned to an [`Archetype`][crate::archetype::Archetype].
///
/// Note: freshly-allocated entities (ones which don't come from the pending list) are guaranteed
/// to be initialized with the invalid archetype.
pub unsafe fn flush(&mut self, mut init: impl FnMut(Entity, &mut EntityLocation)) {
let free_cursor = self.free_cursor.get_mut();
let current_free_cursor = *free_cursor;
let new_free_cursor = if current_free_cursor >= 0 {
current_free_cursor as usize
} else {
let old_meta_len = self.meta.len();
let new_meta_len = old_meta_len + -current_free_cursor as usize;
self.meta.resize(new_meta_len, EntityMeta::EMPTY);
self.len += -current_free_cursor as u32;
for (index, meta) in self.meta.iter_mut().enumerate().skip(old_meta_len) {
init(
Entity {
index: index as u32,
generation: meta.generation,
},
&mut meta.location,
);
}
*free_cursor = 0;
0
};
self.len += (self.pending.len() - new_free_cursor) as u32;
for index in self.pending.drain(new_free_cursor..) {
let meta = &mut self.meta[index as usize];
init(
Entity {
index,
generation: meta.generation,
},
&mut meta.location,
);
}
}
/// Flushes all reserved entities to an "invalid" state. Attempting to retrieve them will return `None`
/// unless they are later populated with a valid archetype.
pub fn flush_as_invalid(&mut self) {
// SAFETY: as per `flush` safety docs, the archetype id can be set to [`ArchetypeId::INVALID`] if
// the [`Entity`] has not been assigned to an [`Archetype`][crate::archetype::Archetype], which is the case here
unsafe {
self.flush(|_entity, location| {
location.archetype_id = ArchetypeId::INVALID;
});
}
}
/// # Safety
///
/// This function is safe if and only if the world this Entities is on has no entities.
pub unsafe fn flush_and_reserve_invalid_assuming_no_entities(&mut self, count: usize) {
let free_cursor = self.free_cursor.get_mut();
*free_cursor = 0;
self.meta.reserve(count);
// the EntityMeta struct only contains integers, and it is valid to have all bytes set to u8::MAX
self.meta.as_mut_ptr().write_bytes(u8::MAX, count);
self.meta.set_len(count);
self.len = count as u32;
}
/// The count of all entities in the [`World`] that have ever been allocated
/// including the entities that are currently freed. | /// [`World`]: crate::world::World
#[inline]
pub fn total_count(&self) -> usize {
self.meta.len()
}
/// The count of currently allocated entities.
#[inline]
pub fn len(&self) -> u32 {
self.len
}
/// Checks if any entity is currently active.
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// Safety:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// Metadata for an [`Entity`].
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct EntityMeta {
/// The current generation of the [`Entity`].
pub generation: u32,
/// The current location of the [`Entity`]
pub location: EntityLocation,
}
impl EntityMeta {
/// meta for **pending entity**
const EMPTY: EntityMeta = EntityMeta {
generation: 0,
location: EntityLocation::INVALID,
};
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// SAFETY:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// A location of an entity in an archetype.
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct EntityLocation {
/// The ID of the [`Archetype`] the [`Entity`] belongs to.
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_id: ArchetypeId,
/// The index of the [`Entity`] within its [`Archetype`].
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_row: ArchetypeRow,
/// The ID of the [`Table`] the [`Entity`] belongs to.
///
/// [`Table`]: crate::storage::Table
pub table_id: TableId,
/// The index of the [`Entity`] within its [`Table`].
///
/// [`Table`]: crate::storage::Table
pub table_row: TableRow,
}
impl EntityLocation {
/// location for **pending entity** and **invalid entity**
const INVALID: EntityLocation = EntityLocation {
archetype_id: ArchetypeId::INVALID,
archetype_row: ArchetypeRow::INVALID,
table_id: TableId::INVALID,
table_row: TableRow::INVALID,
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn entity_bits_roundtrip() {
let e = Entity {
generation: 0xDEADBEEF,
index: 0xBAADF00D,
};
assert_eq!(Entity::from_bits(e.to_bits()), e);
}
#[test]
fn reserve_entity_len() {
let mut e = Entities::new();
e.reserve_entity();
// SAFETY: entity_location is left invalid
unsafe { e.flush(|_, _| {}) };
assert_eq!(e.len(), 1);
}
#[test]
fn get_reserved_and_invalid() {
let mut entities = Entities::new();
let e = entities.reserve_entity();
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
// SAFETY: entity_location is left invalid
unsafe {
entities.flush(|_entity, _location| {
// do nothing ... leaving entity location invalid
});
};
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
}
#[test]
fn entity_const() {
const C1: Entity = Entity::from_raw(42);
assert_eq!(42, C1.index);
assert_eq!(0, C1.generation);
const C2: Entity = Entity::from_bits(0x0000_00ff_0000_00cc);
assert_eq!(0x0000_00cc, C2.index);
assert_eq!(0x0000_00ff, C2.generation);
const C3: u32 = Entity::from_raw(33).index();
assert_eq!(33, C3);
const C4: u32 = Entity::from_bits(0x00dd_00ff_0000_0000).generation();
assert_eq!(0x00dd_00ff, C4);
}
#[test]
fn reserve_generations() {
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, 1));
}
#[test]
fn reserve_generations_and_alloc() {
const GENERATIONS: u32 = 10;
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, GENERATIONS));
// The very next entity allocated should be a further generation on the same index
let next_entity = entities.alloc();
assert_eq!(next_entity.index(), entity.index());
assert!(next_entity.generation > entity.generation + GENERATIONS);
}
} | ///
/// This does not include entities that have been reserved but have never been
/// allocated yet.
/// | random_line_split |
mod.rs | //! Entity handling types.
//!
//! An **entity** exclusively owns zero or more [component] instances, all of different types, and can dynamically acquire or lose them over its lifetime.
//!
//! **empty entity**: Entity with zero components.
//! **pending entity**: Entity reserved, but not flushed yet (see [`Entities::flush`] docs for reference).
//! **reserved entity**: same as **pending entity**.
//! **invalid entity**: **pending entity** flushed with invalid (see [`Entities::flush_as_invalid`] docs for reference).
//!
//! See [`Entity`] to learn more.
//!
//! [component]: crate::component::Component
//!
//! # Usage
//!
//! Operations involving entities and their components are performed either from a system by submitting commands,
//! or from the outside (or from an exclusive system) by directly using [`World`] methods:
//!
//! |Operation|Command|Method|
//! |:---:|:---:|:---:|
//! |Spawn an entity with components|[`Commands::spawn`]|[`World::spawn`]|
//! |Spawn an entity without components|[`Commands::spawn_empty`]|[`World::spawn_empty`]|
//! |Despawn an entity|[`EntityCommands::despawn`]|[`World::despawn`]|
//! |Insert a component, bundle, or tuple of components and bundles to an entity|[`EntityCommands::insert`]|[`EntityWorldMut::insert`]|
//! |Remove a component, bundle, or tuple of components and bundles from an entity|[`EntityCommands::remove`]|[`EntityWorldMut::remove`]|
//!
//! [`World`]: crate::world::World
//! [`Commands::spawn`]: crate::system::Commands::spawn
//! [`Commands::spawn_empty`]: crate::system::Commands::spawn_empty
//! [`EntityCommands::despawn`]: crate::system::EntityCommands::despawn
//! [`EntityCommands::insert`]: crate::system::EntityCommands::insert
//! [`EntityCommands::remove`]: crate::system::EntityCommands::remove
//! [`World::spawn`]: crate::world::World::spawn
//! [`World::spawn_empty`]: crate::world::World::spawn_empty
//! [`World::despawn`]: crate::world::World::despawn
//! [`EntityWorldMut::insert`]: crate::world::EntityWorldMut::insert
//! [`EntityWorldMut::remove`]: crate::world::EntityWorldMut::remove
mod map_entities;
pub use map_entities::*;
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
storage::{SparseSetIndex, TableId, TableRow},
};
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, fmt, mem, sync::atomic::Ordering};
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicI64 as AtomicIdCursor;
#[cfg(target_has_atomic = "64")]
type IdCursor = i64;
/// Most modern platforms support 64-bit atomics, but some less-common platforms
/// do not. This fallback allows compilation using a 32-bit cursor instead, with
/// the caveat that some conversions may fail (and panic) at runtime.
#[cfg(not(target_has_atomic = "64"))]
use std::sync::atomic::AtomicIsize as AtomicIdCursor;
#[cfg(not(target_has_atomic = "64"))]
type IdCursor = isize;
/// Lightweight identifier of an [entity](crate::entity).
///
/// The identifier is implemented using a [generational index]: a combination of an index and a generation.
/// This allows fast insertion after data removal in an array while minimizing loss of spatial locality.
///
/// These identifiers are only valid on the [`World`] it's sourced from. Attempting to use an `Entity` to
/// fetch entity components or metadata from a different world will either fail or return unexpected results.
///
/// [generational index]: https://lucassardois.medium.com/generational-indices-guide-8e3c5f7fd594
///
/// # Usage
///
/// This data type is returned by iterating a `Query` that has `Entity` as part of its query fetch type parameter ([learn more]).
/// It can also be obtained by calling [`EntityCommands::id`] or [`EntityWorldMut::id`].
///
/// ```
/// # use bevy_ecs::prelude::*;
/// # #[derive(Component)]
/// # struct SomeComponent;
/// fn setup(mut commands: Commands) {
/// // Calling `spawn` returns `EntityCommands`.
/// let entity = commands.spawn(SomeComponent).id();
/// }
///
/// fn exclusive_system(world: &mut World) {
/// // Calling `spawn` returns `EntityWorldMut`.
/// let entity = world.spawn(SomeComponent).id();
/// }
/// #
/// # bevy_ecs::system::assert_is_system(setup);
/// # bevy_ecs::system::assert_is_system(exclusive_system);
/// ```
///
/// It can be used to refer to a specific entity to apply [`EntityCommands`], or to call [`Query::get`] (or similar methods) to access its components.
///
/// ```
/// # use bevy_ecs::prelude::*;
/// #
/// # #[derive(Component)]
/// # struct Expired;
/// #
/// fn dispose_expired_food(mut commands: Commands, query: Query<Entity, With<Expired>>) {
/// for food_entity in &query {
/// commands.entity(food_entity).despawn();
/// }
/// }
/// #
/// # bevy_ecs::system::assert_is_system(dispose_expired_food);
/// ```
///
/// [learn more]: crate::system::Query#entity-id-access
/// [`EntityCommands::id`]: crate::system::EntityCommands::id
/// [`EntityWorldMut::id`]: crate::world::EntityWorldMut::id
/// [`EntityCommands`]: crate::system::EntityCommands
/// [`Query::get`]: crate::system::Query::get
/// [`World`]: crate::world::World
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Entity {
generation: u32,
index: u32,
}
pub(crate) enum AllocAtWithoutReplacement {
Exists(EntityLocation),
DidNotExist,
ExistsWithWrongGeneration,
}
impl Entity {
#[cfg(test)]
pub(crate) const fn new(index: u32, generation: u32) -> Entity {
Entity { index, generation }
}
/// An entity ID with a placeholder value. This may or may not correspond to an actual entity,
/// and should be overwritten by a new value before being used.
///
/// ## Examples
///
/// Initializing a collection (e.g. `array` or `Vec`) with a known size:
///
/// ```no_run
/// # use bevy_ecs::prelude::*;
/// // Create a new array of size 10 filled with invalid entity ids.
/// let mut entities: [Entity; 10] = [Entity::PLACEHOLDER; 10];
///
/// // ... replace the entities with valid ones.
/// ```
///
/// Deriving [`Reflect`](bevy_reflect::Reflect) for a component that has an `Entity` field:
///
/// ```no_run
/// # use bevy_ecs::{prelude::*, component::*};
/// # use bevy_reflect::Reflect;
/// #[derive(Reflect, Component)]
/// #[reflect(Component)]
/// pub struct MyStruct {
/// pub entity: Entity,
/// }
///
/// impl FromWorld for MyStruct {
/// fn from_world(_world: &mut World) -> Self {
/// Self {
/// entity: Entity::PLACEHOLDER,
/// }
/// }
/// }
/// ```
pub const PLACEHOLDER: Self = Self::from_raw(u32::MAX);
/// Creates a new entity ID with the specified `index` and a generation of 0.
///
/// # Note
///
/// Spawning a specific `entity` value is __rarely the right choice__. Most apps should favor
/// [`Commands::spawn`](crate::system::Commands::spawn). This method should generally
/// only be used for sharing entities across apps, and only when they have a scheme
/// worked out to share an index space (which doesn't happen by default).
///
/// In general, one should not try to synchronize the ECS by attempting to ensure that
/// `Entity` lines up between instances, but instead insert a secondary identifier as
/// a component.
pub const fn from_raw(index: u32) -> Entity {
Entity {
index,
generation: 0,
}
}
/// Convert to a form convenient for passing outside of rust.
///
/// Only useful for identifying entities within the same instance of an application. Do not use
/// for serialization between runs.
///
/// No particular structure is guaranteed for the returned bits.
pub const fn to_bits(self) -> u64 {
(self.generation as u64) << 32 | self.index as u64
}
/// Reconstruct an `Entity` previously destructured with [`Entity::to_bits`].
///
/// Only useful when applied to results from `to_bits` in the same instance of an application.
pub const fn from_bits(bits: u64) -> Self {
Self {
generation: (bits >> 32) as u32,
index: bits as u32,
}
}
/// Return a transiently unique identifier.
///
/// No two simultaneously-live entities share the same index, but dead entities' indices may collide
/// with both live and dead entities. Useful for compactly representing entities within a
/// specific snapshot of the world, such as when serializing.
#[inline]
pub const fn index(self) -> u32 {
self.index
}
/// Returns the generation of this Entity's index. The generation is incremented each time an
/// entity with a given index is despawned. This serves as a "count" of the number of times a
/// given index has been reused (index, generation) pairs uniquely identify a given Entity.
#[inline]
pub const fn generation(self) -> u32 {
self.generation
}
}
impl Serialize for Entity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u64(self.to_bits())
}
}
impl<'de> Deserialize<'de> for Entity {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let id: u64 = serde::de::Deserialize::deserialize(deserializer)?;
Ok(Entity::from_bits(id))
}
}
impl fmt::Debug for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}v{}", self.index, self.generation)
}
}
impl SparseSetIndex for Entity {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index() as usize
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Entity::from_raw(value as u32)
}
}
/// An [`Iterator`] returning a sequence of [`Entity`] values from
/// [`Entities::reserve_entities`](crate::entity::Entities::reserve_entities).
pub struct ReserveEntitiesIterator<'a> {
// Metas, so we can recover the current generation for anything in the freelist.
meta: &'a [EntityMeta],
// Reserved indices formerly in the freelist to hand out.
index_iter: std::slice::Iter<'a, u32>,
// New Entity indices to hand out, outside the range of meta.len().
index_range: std::ops::Range<u32>,
}
impl<'a> Iterator for ReserveEntitiesIterator<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
self.index_iter
.next()
.map(|&index| Entity {
generation: self.meta[index as usize].generation,
index,
})
.or_else(|| {
self.index_range.next().map(|index| Entity {
generation: 0,
index,
})
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.index_iter.len() + self.index_range.len();
(len, Some(len))
}
}
impl<'a> core::iter::ExactSizeIterator for ReserveEntitiesIterator<'a> {}
impl<'a> core::iter::FusedIterator for ReserveEntitiesIterator<'a> {}
/// A [`World`]'s internal metadata store on all of its entities.
///
/// Contains metadata on:
/// - The generation of every entity.
/// - The alive/dead status of a particular entity. (i.e. "has entity 3 been despawned?")
/// - The location of the entity's components in memory (via [`EntityLocation`])
///
/// [`World`]: crate::world::World
#[derive(Debug)]
pub struct Entities {
meta: Vec<EntityMeta>,
/// The `pending` and `free_cursor` fields describe three sets of Entity IDs
/// that have been freed or are in the process of being allocated:
///
/// - The `freelist` IDs, previously freed by `free()`. These IDs are available to any of
/// [`alloc`], [`reserve_entity`] or [`reserve_entities`]. Allocation will always prefer
/// these over brand new IDs.
///
/// - The `reserved` list of IDs that were once in the freelist, but got reserved by
/// [`reserve_entities`] or [`reserve_entity`]. They are now waiting for [`flush`] to make them
/// fully allocated.
///
/// - The count of new IDs that do not yet exist in `self.meta`, but which we have handed out
/// and reserved. [`flush`] will allocate room for them in `self.meta`.
///
/// The contents of `pending` look like this:
///
/// ```txt
/// ----------------------------
/// | freelist | reserved |
/// ----------------------------
/// ^ ^
/// free_cursor pending.len()
/// ```
///
/// As IDs are allocated, `free_cursor` is atomically decremented, moving
/// items from the freelist into the reserved list by sliding over the boundary.
///
/// Once the freelist runs out, `free_cursor` starts going negative.
/// The more negative it is, the more IDs have been reserved starting exactly at
/// the end of `meta.len()`.
///
/// This formulation allows us to reserve any number of IDs first from the freelist
/// and then from the new IDs, using only a single atomic subtract.
///
/// Once [`flush`] is done, `free_cursor` will equal `pending.len()`.
///
/// [`alloc`]: Entities::alloc
/// [`reserve_entity`]: Entities::reserve_entity
/// [`reserve_entities`]: Entities::reserve_entities
/// [`flush`]: Entities::flush
pending: Vec<u32>,
free_cursor: AtomicIdCursor,
/// Stores the number of free entities for [`len`](Entities::len)
len: u32,
}
impl Entities {
pub(crate) const fn new() -> Self {
Entities {
meta: Vec::new(),
pending: Vec::new(),
free_cursor: AtomicIdCursor::new(0),
len: 0,
}
}
/// Reserve entity IDs concurrently.
///
/// Storage for entity generation and location is lazily allocated by calling [`flush`](Entities::flush).
pub fn reserve_entities(&self, count: u32) -> ReserveEntitiesIterator {
// Use one atomic subtract to grab a range of new IDs. The range might be
// entirely nonnegative, meaning all IDs come from the freelist, or entirely
// negative, meaning they are all new IDs to allocate, or a mix of both.
let range_end = self
.free_cursor
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
.fetch_sub(IdCursor::try_from(count).unwrap(), Ordering::Relaxed);
let range_start = range_end - IdCursor::try_from(count).unwrap();
let freelist_range = range_start.max(0) as usize..range_end.max(0) as usize;
let (new_id_start, new_id_end) = if range_start >= 0 {
// We satisfied all requests from the freelist.
(0, 0)
} else {
// We need to allocate some new Entity IDs outside of the range of self.meta.
//
// `range_start` covers some negative territory, e.g. `-3..6`.
// Since the nonnegative values `0..6` are handled by the freelist, that
// means we need to handle the negative range here.
//
// In this example, we truncate the end to 0, leaving us with `-3..0`.
// Then we negate these values to indicate how far beyond the end of `meta.end()`
// to go, yielding `meta.len()+0 .. meta.len()+3`.
let base = self.meta.len() as IdCursor;
let new_id_end = u32::try_from(base - range_start).expect("too many entities");
// `new_id_end` is in range, so no need to check `start`.
let new_id_start = (base - range_end.min(0)) as u32;
(new_id_start, new_id_end)
};
ReserveEntitiesIterator {
meta: &self.meta[..],
index_iter: self.pending[freelist_range].iter(),
index_range: new_id_start..new_id_end,
}
}
/// Reserve one entity ID concurrently.
///
/// Equivalent to `self.reserve_entities(1).next().unwrap()`, but more efficient.
pub fn reserve_entity(&self) -> Entity {
let n = self.free_cursor.fetch_sub(1, Ordering::Relaxed);
if n > 0 {
// Allocate from the freelist.
let index = self.pending[(n - 1) as usize];
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
// Grab a new ID, outside the range of `meta.len()`. `flush()` must
// eventually be called to make it valid.
//
// As `self.free_cursor` goes more and more negative, we return IDs farther
// and farther beyond `meta.len()`.
Entity {
generation: 0,
index: u32::try_from(self.meta.len() as IdCursor - n).expect("too many entities"),
}
}
}
/// Check that we do not have pending work requiring `flush()` to be called.
fn verify_flushed(&mut self) {
debug_assert!(
!self.needs_flush(),
"flush() needs to be called before this operation is legal"
);
}
/// Allocate an entity ID directly.
pub fn alloc(&mut self) -> Entity {
self.verify_flushed();
self.len += 1;
if let Some(index) = self.pending.pop() {
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
let index = u32::try_from(self.meta.len()).expect("too many entities");
self.meta.push(EntityMeta::EMPTY);
Entity {
generation: 0,
index,
}
}
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any. Location should be
/// written immediately.
pub fn alloc_at(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let loc = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
None
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
None
} else {
Some(mem::replace(
&mut self.meta[entity.index as usize].location,
EntityMeta::EMPTY.location,
))
};
self.meta[entity.index as usize].generation = entity.generation;
loc
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any.
pub(crate) fn alloc_at_without_replacement(
&mut self,
entity: Entity,
) -> AllocAtWithoutReplacement {
self.verify_flushed();
let result = if entity.index as usize >= self.meta.len() | else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else {
let current_meta = &self.meta[entity.index as usize];
if current_meta.location.archetype_id == ArchetypeId::INVALID {
AllocAtWithoutReplacement::DidNotExist
} else if current_meta.generation == entity.generation {
AllocAtWithoutReplacement::Exists(current_meta.location)
} else {
return AllocAtWithoutReplacement::ExistsWithWrongGeneration;
}
};
self.meta[entity.index as usize].generation = entity.generation;
result
}
/// Destroy an entity, allowing it to be reused.
///
/// Must not be called while reserved entities are awaiting `flush()`.
pub fn free(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let meta = &mut self.meta[entity.index as usize];
if meta.generation != entity.generation {
return None;
}
meta.generation += 1;
let loc = mem::replace(&mut meta.location, EntityMeta::EMPTY.location);
self.pending.push(entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len -= 1;
Some(loc)
}
/// Ensure at least `n` allocations can succeed without reallocating.
pub fn reserve(&mut self, additional: u32) {
self.verify_flushed();
let freelist_size = *self.free_cursor.get_mut();
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
let shortfall = IdCursor::try_from(additional).unwrap() - freelist_size;
if shortfall > 0 {
self.meta.reserve(shortfall as usize);
}
}
/// Returns true if the [`Entities`] contains [`entity`](Entity).
// This will return false for entities which have been freed, even if
// not reallocated since the generation is incremented in `free`
pub fn contains(&self, entity: Entity) -> bool {
self.resolve_from_id(entity.index())
.map_or(false, |e| e.generation() == entity.generation)
}
/// Clears all [`Entity`] from the World.
pub fn clear(&mut self) {
self.meta.clear();
self.pending.clear();
*self.free_cursor.get_mut() = 0;
self.len = 0;
}
/// Returns the location of an [`Entity`].
/// Note: for pending entities, returns `Some(EntityLocation::INVALID)`.
#[inline]
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
if let Some(meta) = self.meta.get(entity.index as usize) {
if meta.generation != entity.generation
|| meta.location.archetype_id == ArchetypeId::INVALID
{
return None;
}
Some(meta.location)
} else {
None
}
}
/// Updates the location of an [`Entity`]. This must be called when moving the components of
/// the entity around in storage.
///
/// # Safety
/// - `index` must be a valid entity index.
/// - `location` must be valid for the entity at `index` or immediately made valid afterwards
/// before handing control to unknown code.
#[inline]
pub(crate) unsafe fn set(&mut self, index: u32, location: EntityLocation) {
// SAFETY: Caller guarantees that `index` a valid entity index
self.meta.get_unchecked_mut(index as usize).location = location;
}
/// Increments the `generation` of a freed [`Entity`]. The next entity ID allocated with this
/// `index` will count `generation` starting from the prior `generation` + the specified
/// value + 1.
///
/// Does nothing if no entity with this `index` has been allocated yet.
pub(crate) fn reserve_generations(&mut self, index: u32, generations: u32) -> bool {
if (index as usize) >= self.meta.len() {
return false;
}
let meta = &mut self.meta[index as usize];
if meta.location.archetype_id == ArchetypeId::INVALID {
meta.generation += generations;
true
} else {
false
}
}
/// Get the [`Entity`] with a given id, if it exists in this [`Entities`] collection
/// Returns `None` if this [`Entity`] is outside of the range of currently reserved Entities
///
/// Note: This method may return [`Entities`](Entity) which are currently free
/// Note that [`contains`](Entities::contains) will correctly return false for freed
/// entities, since it checks the generation
pub fn resolve_from_id(&self, index: u32) -> Option<Entity> {
let idu = index as usize;
if let Some(&EntityMeta { generation, .. }) = self.meta.get(idu) {
Some(Entity { generation, index })
} else {
// `id` is outside of the meta list - check whether it is reserved but not yet flushed.
let free_cursor = self.free_cursor.load(Ordering::Relaxed);
// If this entity was manually created, then free_cursor might be positive
// Returning None handles that case correctly
let num_pending = usize::try_from(-free_cursor).ok()?;
(idu < self.meta.len() + num_pending).then_some(Entity {
generation: 0,
index,
})
}
}
fn needs_flush(&mut self) -> bool {
*self.free_cursor.get_mut() != self.pending.len() as IdCursor
}
/// Allocates space for entities previously reserved with [`reserve_entity`](Entities::reserve_entity) or
/// [`reserve_entities`](Entities::reserve_entities), then initializes each one using the supplied function.
///
/// # Safety
/// Flush _must_ set the entity location to the correct [`ArchetypeId`] for the given [`Entity`]
/// each time init is called. This _can_ be [`ArchetypeId::INVALID`], provided the [`Entity`]
/// has not been assigned to an [`Archetype`][crate::archetype::Archetype].
///
/// Note: freshly-allocated entities (ones which don't come from the pending list) are guaranteed
/// to be initialized with the invalid archetype.
pub unsafe fn flush(&mut self, mut init: impl FnMut(Entity, &mut EntityLocation)) {
let free_cursor = self.free_cursor.get_mut();
let current_free_cursor = *free_cursor;
let new_free_cursor = if current_free_cursor >= 0 {
current_free_cursor as usize
} else {
let old_meta_len = self.meta.len();
let new_meta_len = old_meta_len + -current_free_cursor as usize;
self.meta.resize(new_meta_len, EntityMeta::EMPTY);
self.len += -current_free_cursor as u32;
for (index, meta) in self.meta.iter_mut().enumerate().skip(old_meta_len) {
init(
Entity {
index: index as u32,
generation: meta.generation,
},
&mut meta.location,
);
}
*free_cursor = 0;
0
};
self.len += (self.pending.len() - new_free_cursor) as u32;
for index in self.pending.drain(new_free_cursor..) {
let meta = &mut self.meta[index as usize];
init(
Entity {
index,
generation: meta.generation,
},
&mut meta.location,
);
}
}
/// Flushes all reserved entities to an "invalid" state. Attempting to retrieve them will return `None`
/// unless they are later populated with a valid archetype.
pub fn flush_as_invalid(&mut self) {
// SAFETY: as per `flush` safety docs, the archetype id can be set to [`ArchetypeId::INVALID`] if
// the [`Entity`] has not been assigned to an [`Archetype`][crate::archetype::Archetype], which is the case here
unsafe {
self.flush(|_entity, location| {
location.archetype_id = ArchetypeId::INVALID;
});
}
}
/// # Safety
///
/// This function is safe if and only if the world this Entities is on has no entities.
pub unsafe fn flush_and_reserve_invalid_assuming_no_entities(&mut self, count: usize) {
let free_cursor = self.free_cursor.get_mut();
*free_cursor = 0;
self.meta.reserve(count);
// the EntityMeta struct only contains integers, and it is valid to have all bytes set to u8::MAX
self.meta.as_mut_ptr().write_bytes(u8::MAX, count);
self.meta.set_len(count);
self.len = count as u32;
}
/// The count of all entities in the [`World`] that have ever been allocated
/// including the entities that are currently freed.
///
/// This does not include entities that have been reserved but have never been
/// allocated yet.
///
/// [`World`]: crate::world::World
#[inline]
pub fn total_count(&self) -> usize {
self.meta.len()
}
/// The count of currently allocated entities.
#[inline]
pub fn len(&self) -> u32 {
self.len
}
/// Checks if any entity is currently active.
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// Safety:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// Metadata for an [`Entity`].
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct EntityMeta {
/// The current generation of the [`Entity`].
pub generation: u32,
/// The current location of the [`Entity`]
pub location: EntityLocation,
}
impl EntityMeta {
/// meta for **pending entity**
const EMPTY: EntityMeta = EntityMeta {
generation: 0,
location: EntityLocation::INVALID,
};
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// SAFETY:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// A location of an entity in an archetype.
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct EntityLocation {
/// The ID of the [`Archetype`] the [`Entity`] belongs to.
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_id: ArchetypeId,
/// The index of the [`Entity`] within its [`Archetype`].
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_row: ArchetypeRow,
/// The ID of the [`Table`] the [`Entity`] belongs to.
///
/// [`Table`]: crate::storage::Table
pub table_id: TableId,
/// The index of the [`Entity`] within its [`Table`].
///
/// [`Table`]: crate::storage::Table
pub table_row: TableRow,
}
impl EntityLocation {
/// location for **pending entity** and **invalid entity**
const INVALID: EntityLocation = EntityLocation {
archetype_id: ArchetypeId::INVALID,
archetype_row: ArchetypeRow::INVALID,
table_id: TableId::INVALID,
table_row: TableRow::INVALID,
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn entity_bits_roundtrip() {
let e = Entity {
generation: 0xDEADBEEF,
index: 0xBAADF00D,
};
assert_eq!(Entity::from_bits(e.to_bits()), e);
}
#[test]
fn reserve_entity_len() {
let mut e = Entities::new();
e.reserve_entity();
// SAFETY: entity_location is left invalid
unsafe { e.flush(|_, _| {}) };
assert_eq!(e.len(), 1);
}
#[test]
fn get_reserved_and_invalid() {
let mut entities = Entities::new();
let e = entities.reserve_entity();
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
// SAFETY: entity_location is left invalid
unsafe {
entities.flush(|_entity, _location| {
// do nothing ... leaving entity location invalid
});
};
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
}
#[test]
fn entity_const() {
const C1: Entity = Entity::from_raw(42);
assert_eq!(42, C1.index);
assert_eq!(0, C1.generation);
const C2: Entity = Entity::from_bits(0x0000_00ff_0000_00cc);
assert_eq!(0x0000_00cc, C2.index);
assert_eq!(0x0000_00ff, C2.generation);
const C3: u32 = Entity::from_raw(33).index();
assert_eq!(33, C3);
const C4: u32 = Entity::from_bits(0x00dd_00ff_0000_0000).generation();
assert_eq!(0x00dd_00ff, C4);
}
#[test]
fn reserve_generations() {
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, 1));
}
#[test]
fn reserve_generations_and_alloc() {
const GENERATIONS: u32 = 10;
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, GENERATIONS));
// The very next entity allocated should be a further generation on the same index
let next_entity = entities.alloc();
assert_eq!(next_entity.index(), entity.index());
assert!(next_entity.generation > entity.generation + GENERATIONS);
}
}
| {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} | conditional_block |
mod.rs | //! Entity handling types.
//!
//! An **entity** exclusively owns zero or more [component] instances, all of different types, and can dynamically acquire or lose them over its lifetime.
//!
//! **empty entity**: Entity with zero components.
//! **pending entity**: Entity reserved, but not flushed yet (see [`Entities::flush`] docs for reference).
//! **reserved entity**: same as **pending entity**.
//! **invalid entity**: **pending entity** flushed with invalid (see [`Entities::flush_as_invalid`] docs for reference).
//!
//! See [`Entity`] to learn more.
//!
//! [component]: crate::component::Component
//!
//! # Usage
//!
//! Operations involving entities and their components are performed either from a system by submitting commands,
//! or from the outside (or from an exclusive system) by directly using [`World`] methods:
//!
//! |Operation|Command|Method|
//! |:---:|:---:|:---:|
//! |Spawn an entity with components|[`Commands::spawn`]|[`World::spawn`]|
//! |Spawn an entity without components|[`Commands::spawn_empty`]|[`World::spawn_empty`]|
//! |Despawn an entity|[`EntityCommands::despawn`]|[`World::despawn`]|
//! |Insert a component, bundle, or tuple of components and bundles to an entity|[`EntityCommands::insert`]|[`EntityWorldMut::insert`]|
//! |Remove a component, bundle, or tuple of components and bundles from an entity|[`EntityCommands::remove`]|[`EntityWorldMut::remove`]|
//!
//! [`World`]: crate::world::World
//! [`Commands::spawn`]: crate::system::Commands::spawn
//! [`Commands::spawn_empty`]: crate::system::Commands::spawn_empty
//! [`EntityCommands::despawn`]: crate::system::EntityCommands::despawn
//! [`EntityCommands::insert`]: crate::system::EntityCommands::insert
//! [`EntityCommands::remove`]: crate::system::EntityCommands::remove
//! [`World::spawn`]: crate::world::World::spawn
//! [`World::spawn_empty`]: crate::world::World::spawn_empty
//! [`World::despawn`]: crate::world::World::despawn
//! [`EntityWorldMut::insert`]: crate::world::EntityWorldMut::insert
//! [`EntityWorldMut::remove`]: crate::world::EntityWorldMut::remove
mod map_entities;
pub use map_entities::*;
use crate::{
archetype::{ArchetypeId, ArchetypeRow},
storage::{SparseSetIndex, TableId, TableRow},
};
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, fmt, mem, sync::atomic::Ordering};
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicI64 as AtomicIdCursor;
#[cfg(target_has_atomic = "64")]
type IdCursor = i64;
/// Most modern platforms support 64-bit atomics, but some less-common platforms
/// do not. This fallback allows compilation using a 32-bit cursor instead, with
/// the caveat that some conversions may fail (and panic) at runtime.
#[cfg(not(target_has_atomic = "64"))]
use std::sync::atomic::AtomicIsize as AtomicIdCursor;
#[cfg(not(target_has_atomic = "64"))]
type IdCursor = isize;
/// Lightweight identifier of an [entity](crate::entity).
///
/// The identifier is implemented using a [generational index]: a combination of an index and a generation.
/// This allows fast insertion after data removal in an array while minimizing loss of spatial locality.
///
/// These identifiers are only valid on the [`World`] it's sourced from. Attempting to use an `Entity` to
/// fetch entity components or metadata from a different world will either fail or return unexpected results.
///
/// [generational index]: https://lucassardois.medium.com/generational-indices-guide-8e3c5f7fd594
///
/// # Usage
///
/// This data type is returned by iterating a `Query` that has `Entity` as part of its query fetch type parameter ([learn more]).
/// It can also be obtained by calling [`EntityCommands::id`] or [`EntityWorldMut::id`].
///
/// ```
/// # use bevy_ecs::prelude::*;
/// # #[derive(Component)]
/// # struct SomeComponent;
/// fn setup(mut commands: Commands) {
/// // Calling `spawn` returns `EntityCommands`.
/// let entity = commands.spawn(SomeComponent).id();
/// }
///
/// fn exclusive_system(world: &mut World) {
/// // Calling `spawn` returns `EntityWorldMut`.
/// let entity = world.spawn(SomeComponent).id();
/// }
/// #
/// # bevy_ecs::system::assert_is_system(setup);
/// # bevy_ecs::system::assert_is_system(exclusive_system);
/// ```
///
/// It can be used to refer to a specific entity to apply [`EntityCommands`], or to call [`Query::get`] (or similar methods) to access its components.
///
/// ```
/// # use bevy_ecs::prelude::*;
/// #
/// # #[derive(Component)]
/// # struct Expired;
/// #
/// fn dispose_expired_food(mut commands: Commands, query: Query<Entity, With<Expired>>) {
/// for food_entity in &query {
/// commands.entity(food_entity).despawn();
/// }
/// }
/// #
/// # bevy_ecs::system::assert_is_system(dispose_expired_food);
/// ```
///
/// [learn more]: crate::system::Query#entity-id-access
/// [`EntityCommands::id`]: crate::system::EntityCommands::id
/// [`EntityWorldMut::id`]: crate::world::EntityWorldMut::id
/// [`EntityCommands`]: crate::system::EntityCommands
/// [`Query::get`]: crate::system::Query::get
/// [`World`]: crate::world::World
#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Entity {
generation: u32,
index: u32,
}
pub(crate) enum AllocAtWithoutReplacement {
Exists(EntityLocation),
DidNotExist,
ExistsWithWrongGeneration,
}
impl Entity {
#[cfg(test)]
pub(crate) const fn new(index: u32, generation: u32) -> Entity {
Entity { index, generation }
}
/// An entity ID with a placeholder value. This may or may not correspond to an actual entity,
/// and should be overwritten by a new value before being used.
///
/// ## Examples
///
/// Initializing a collection (e.g. `array` or `Vec`) with a known size:
///
/// ```no_run
/// # use bevy_ecs::prelude::*;
/// // Create a new array of size 10 filled with invalid entity ids.
/// let mut entities: [Entity; 10] = [Entity::PLACEHOLDER; 10];
///
/// // ... replace the entities with valid ones.
/// ```
///
/// Deriving [`Reflect`](bevy_reflect::Reflect) for a component that has an `Entity` field:
///
/// ```no_run
/// # use bevy_ecs::{prelude::*, component::*};
/// # use bevy_reflect::Reflect;
/// #[derive(Reflect, Component)]
/// #[reflect(Component)]
/// pub struct MyStruct {
/// pub entity: Entity,
/// }
///
/// impl FromWorld for MyStruct {
/// fn from_world(_world: &mut World) -> Self {
/// Self {
/// entity: Entity::PLACEHOLDER,
/// }
/// }
/// }
/// ```
pub const PLACEHOLDER: Self = Self::from_raw(u32::MAX);
/// Creates a new entity ID with the specified `index` and a generation of 0.
///
/// # Note
///
/// Spawning a specific `entity` value is __rarely the right choice__. Most apps should favor
/// [`Commands::spawn`](crate::system::Commands::spawn). This method should generally
/// only be used for sharing entities across apps, and only when they have a scheme
/// worked out to share an index space (which doesn't happen by default).
///
/// In general, one should not try to synchronize the ECS by attempting to ensure that
/// `Entity` lines up between instances, but instead insert a secondary identifier as
/// a component.
pub const fn from_raw(index: u32) -> Entity {
Entity {
index,
generation: 0,
}
}
/// Convert to a form convenient for passing outside of rust.
///
/// Only useful for identifying entities within the same instance of an application. Do not use
/// for serialization between runs.
///
/// No particular structure is guaranteed for the returned bits.
pub const fn to_bits(self) -> u64 {
(self.generation as u64) << 32 | self.index as u64
}
/// Reconstruct an `Entity` previously destructured with [`Entity::to_bits`].
///
/// Only useful when applied to results from `to_bits` in the same instance of an application.
pub const fn from_bits(bits: u64) -> Self {
Self {
generation: (bits >> 32) as u32,
index: bits as u32,
}
}
/// Return a transiently unique identifier.
///
/// No two simultaneously-live entities share the same index, but dead entities' indices may collide
/// with both live and dead entities. Useful for compactly representing entities within a
/// specific snapshot of the world, such as when serializing.
#[inline]
pub const fn index(self) -> u32 {
self.index
}
/// Returns the generation of this Entity's index. The generation is incremented each time an
/// entity with a given index is despawned. This serves as a "count" of the number of times a
/// given index has been reused (index, generation) pairs uniquely identify a given Entity.
#[inline]
pub const fn generation(self) -> u32 {
self.generation
}
}
impl Serialize for Entity {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_u64(self.to_bits())
}
}
impl<'de> Deserialize<'de> for Entity {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let id: u64 = serde::de::Deserialize::deserialize(deserializer)?;
Ok(Entity::from_bits(id))
}
}
impl fmt::Debug for Entity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}v{}", self.index, self.generation)
}
}
impl SparseSetIndex for Entity {
#[inline]
fn sparse_set_index(&self) -> usize {
self.index() as usize
}
#[inline]
fn get_sparse_set_index(value: usize) -> Self {
Entity::from_raw(value as u32)
}
}
/// An [`Iterator`] returning a sequence of [`Entity`] values from
/// [`Entities::reserve_entities`](crate::entity::Entities::reserve_entities).
pub struct ReserveEntitiesIterator<'a> {
// Metas, so we can recover the current generation for anything in the freelist.
meta: &'a [EntityMeta],
// Reserved indices formerly in the freelist to hand out.
index_iter: std::slice::Iter<'a, u32>,
// New Entity indices to hand out, outside the range of meta.len().
index_range: std::ops::Range<u32>,
}
impl<'a> Iterator for ReserveEntitiesIterator<'a> {
type Item = Entity;
fn next(&mut self) -> Option<Self::Item> {
self.index_iter
.next()
.map(|&index| Entity {
generation: self.meta[index as usize].generation,
index,
})
.or_else(|| {
self.index_range.next().map(|index| Entity {
generation: 0,
index,
})
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.index_iter.len() + self.index_range.len();
(len, Some(len))
}
}
impl<'a> core::iter::ExactSizeIterator for ReserveEntitiesIterator<'a> {}
impl<'a> core::iter::FusedIterator for ReserveEntitiesIterator<'a> {}
/// A [`World`]'s internal metadata store on all of its entities.
///
/// Contains metadata on:
/// - The generation of every entity.
/// - The alive/dead status of a particular entity. (i.e. "has entity 3 been despawned?")
/// - The location of the entity's components in memory (via [`EntityLocation`])
///
/// [`World`]: crate::world::World
#[derive(Debug)]
pub struct Entities {
meta: Vec<EntityMeta>,
/// The `pending` and `free_cursor` fields describe three sets of Entity IDs
/// that have been freed or are in the process of being allocated:
///
/// - The `freelist` IDs, previously freed by `free()`. These IDs are available to any of
/// [`alloc`], [`reserve_entity`] or [`reserve_entities`]. Allocation will always prefer
/// these over brand new IDs.
///
/// - The `reserved` list of IDs that were once in the freelist, but got reserved by
/// [`reserve_entities`] or [`reserve_entity`]. They are now waiting for [`flush`] to make them
/// fully allocated.
///
/// - The count of new IDs that do not yet exist in `self.meta`, but which we have handed out
/// and reserved. [`flush`] will allocate room for them in `self.meta`.
///
/// The contents of `pending` look like this:
///
/// ```txt
/// ----------------------------
/// | freelist | reserved |
/// ----------------------------
/// ^ ^
/// free_cursor pending.len()
/// ```
///
/// As IDs are allocated, `free_cursor` is atomically decremented, moving
/// items from the freelist into the reserved list by sliding over the boundary.
///
/// Once the freelist runs out, `free_cursor` starts going negative.
/// The more negative it is, the more IDs have been reserved starting exactly at
/// the end of `meta.len()`.
///
/// This formulation allows us to reserve any number of IDs first from the freelist
/// and then from the new IDs, using only a single atomic subtract.
///
/// Once [`flush`] is done, `free_cursor` will equal `pending.len()`.
///
/// [`alloc`]: Entities::alloc
/// [`reserve_entity`]: Entities::reserve_entity
/// [`reserve_entities`]: Entities::reserve_entities
/// [`flush`]: Entities::flush
pending: Vec<u32>,
free_cursor: AtomicIdCursor,
/// Stores the number of free entities for [`len`](Entities::len)
len: u32,
}
impl Entities {
pub(crate) const fn new() -> Self {
Entities {
meta: Vec::new(),
pending: Vec::new(),
free_cursor: AtomicIdCursor::new(0),
len: 0,
}
}
/// Reserve entity IDs concurrently.
///
/// Storage for entity generation and location is lazily allocated by calling [`flush`](Entities::flush).
pub fn reserve_entities(&self, count: u32) -> ReserveEntitiesIterator {
// Use one atomic subtract to grab a range of new IDs. The range might be
// entirely nonnegative, meaning all IDs come from the freelist, or entirely
// negative, meaning they are all new IDs to allocate, or a mix of both.
let range_end = self
.free_cursor
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
.fetch_sub(IdCursor::try_from(count).unwrap(), Ordering::Relaxed);
let range_start = range_end - IdCursor::try_from(count).unwrap();
let freelist_range = range_start.max(0) as usize..range_end.max(0) as usize;
let (new_id_start, new_id_end) = if range_start >= 0 {
// We satisfied all requests from the freelist.
(0, 0)
} else {
// We need to allocate some new Entity IDs outside of the range of self.meta.
//
// `range_start` covers some negative territory, e.g. `-3..6`.
// Since the nonnegative values `0..6` are handled by the freelist, that
// means we need to handle the negative range here.
//
// In this example, we truncate the end to 0, leaving us with `-3..0`.
// Then we negate these values to indicate how far beyond the end of `meta.end()`
// to go, yielding `meta.len()+0 .. meta.len()+3`.
let base = self.meta.len() as IdCursor;
let new_id_end = u32::try_from(base - range_start).expect("too many entities");
// `new_id_end` is in range, so no need to check `start`.
let new_id_start = (base - range_end.min(0)) as u32;
(new_id_start, new_id_end)
};
ReserveEntitiesIterator {
meta: &self.meta[..],
index_iter: self.pending[freelist_range].iter(),
index_range: new_id_start..new_id_end,
}
}
/// Reserve one entity ID concurrently.
///
/// Equivalent to `self.reserve_entities(1).next().unwrap()`, but more efficient.
pub fn reserve_entity(&self) -> Entity {
let n = self.free_cursor.fetch_sub(1, Ordering::Relaxed);
if n > 0 {
// Allocate from the freelist.
let index = self.pending[(n - 1) as usize];
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
// Grab a new ID, outside the range of `meta.len()`. `flush()` must
// eventually be called to make it valid.
//
// As `self.free_cursor` goes more and more negative, we return IDs farther
// and farther beyond `meta.len()`.
Entity {
generation: 0,
index: u32::try_from(self.meta.len() as IdCursor - n).expect("too many entities"),
}
}
}
/// Check that we do not have pending work requiring `flush()` to be called.
fn verify_flushed(&mut self) {
debug_assert!(
!self.needs_flush(),
"flush() needs to be called before this operation is legal"
);
}
/// Allocate an entity ID directly.
pub fn alloc(&mut self) -> Entity {
self.verify_flushed();
self.len += 1;
if let Some(index) = self.pending.pop() {
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
Entity {
generation: self.meta[index as usize].generation,
index,
}
} else {
let index = u32::try_from(self.meta.len()).expect("too many entities");
self.meta.push(EntityMeta::EMPTY);
Entity {
generation: 0,
index,
}
}
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any. Location should be
/// written immediately.
pub fn alloc_at(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let loc = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
None
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
None
} else {
Some(mem::replace(
&mut self.meta[entity.index as usize].location,
EntityMeta::EMPTY.location,
))
};
self.meta[entity.index as usize].generation = entity.generation;
loc
}
/// Allocate a specific entity ID, overwriting its generation.
///
/// Returns the location of the entity currently using the given ID, if any.
pub(crate) fn alloc_at_without_replacement(
&mut self,
entity: Entity,
) -> AllocAtWithoutReplacement {
self.verify_flushed();
let result = if entity.index as usize >= self.meta.len() {
self.pending.extend((self.meta.len() as u32)..entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.meta
.resize(entity.index as usize + 1, EntityMeta::EMPTY);
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else if let Some(index) = self.pending.iter().position(|item| *item == entity.index) {
self.pending.swap_remove(index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len += 1;
AllocAtWithoutReplacement::DidNotExist
} else {
let current_meta = &self.meta[entity.index as usize];
if current_meta.location.archetype_id == ArchetypeId::INVALID {
AllocAtWithoutReplacement::DidNotExist
} else if current_meta.generation == entity.generation {
AllocAtWithoutReplacement::Exists(current_meta.location)
} else {
return AllocAtWithoutReplacement::ExistsWithWrongGeneration;
}
};
self.meta[entity.index as usize].generation = entity.generation;
result
}
/// Destroy an entity, allowing it to be reused.
///
/// Must not be called while reserved entities are awaiting `flush()`.
pub fn free(&mut self, entity: Entity) -> Option<EntityLocation> {
self.verify_flushed();
let meta = &mut self.meta[entity.index as usize];
if meta.generation != entity.generation {
return None;
}
meta.generation += 1;
let loc = mem::replace(&mut meta.location, EntityMeta::EMPTY.location);
self.pending.push(entity.index);
let new_free_cursor = self.pending.len() as IdCursor;
*self.free_cursor.get_mut() = new_free_cursor;
self.len -= 1;
Some(loc)
}
/// Ensure at least `n` allocations can succeed without reallocating.
pub fn reserve(&mut self, additional: u32) {
self.verify_flushed();
let freelist_size = *self.free_cursor.get_mut();
// Unwrap: these conversions can only fail on platforms that don't support 64-bit atomics
// and use AtomicIsize instead (see note on `IdCursor`).
let shortfall = IdCursor::try_from(additional).unwrap() - freelist_size;
if shortfall > 0 {
self.meta.reserve(shortfall as usize);
}
}
/// Returns true if the [`Entities`] contains [`entity`](Entity).
// This will return false for entities which have been freed, even if
// not reallocated since the generation is incremented in `free`
pub fn contains(&self, entity: Entity) -> bool {
self.resolve_from_id(entity.index())
.map_or(false, |e| e.generation() == entity.generation)
}
/// Clears all [`Entity`] from the World.
pub fn clear(&mut self) {
self.meta.clear();
self.pending.clear();
*self.free_cursor.get_mut() = 0;
self.len = 0;
}
/// Returns the location of an [`Entity`].
/// Note: for pending entities, returns `Some(EntityLocation::INVALID)`.
#[inline]
pub fn get(&self, entity: Entity) -> Option<EntityLocation> {
if let Some(meta) = self.meta.get(entity.index as usize) {
if meta.generation != entity.generation
|| meta.location.archetype_id == ArchetypeId::INVALID
{
return None;
}
Some(meta.location)
} else {
None
}
}
/// Updates the location of an [`Entity`]. This must be called when moving the components of
/// the entity around in storage.
///
/// # Safety
/// - `index` must be a valid entity index.
/// - `location` must be valid for the entity at `index` or immediately made valid afterwards
/// before handing control to unknown code.
#[inline]
pub(crate) unsafe fn set(&mut self, index: u32, location: EntityLocation) |
/// Increments the `generation` of a freed [`Entity`]. The next entity ID allocated with this
/// `index` will count `generation` starting from the prior `generation` + the specified
/// value + 1.
///
/// Does nothing if no entity with this `index` has been allocated yet.
pub(crate) fn reserve_generations(&mut self, index: u32, generations: u32) -> bool {
if (index as usize) >= self.meta.len() {
return false;
}
let meta = &mut self.meta[index as usize];
if meta.location.archetype_id == ArchetypeId::INVALID {
meta.generation += generations;
true
} else {
false
}
}
/// Get the [`Entity`] with a given id, if it exists in this [`Entities`] collection
/// Returns `None` if this [`Entity`] is outside of the range of currently reserved Entities
///
/// Note: This method may return [`Entities`](Entity) which are currently free
/// Note that [`contains`](Entities::contains) will correctly return false for freed
/// entities, since it checks the generation
pub fn resolve_from_id(&self, index: u32) -> Option<Entity> {
let idu = index as usize;
if let Some(&EntityMeta { generation, .. }) = self.meta.get(idu) {
Some(Entity { generation, index })
} else {
// `id` is outside of the meta list - check whether it is reserved but not yet flushed.
let free_cursor = self.free_cursor.load(Ordering::Relaxed);
// If this entity was manually created, then free_cursor might be positive
// Returning None handles that case correctly
let num_pending = usize::try_from(-free_cursor).ok()?;
(idu < self.meta.len() + num_pending).then_some(Entity {
generation: 0,
index,
})
}
}
fn needs_flush(&mut self) -> bool {
*self.free_cursor.get_mut() != self.pending.len() as IdCursor
}
/// Allocates space for entities previously reserved with [`reserve_entity`](Entities::reserve_entity) or
/// [`reserve_entities`](Entities::reserve_entities), then initializes each one using the supplied function.
///
/// # Safety
/// Flush _must_ set the entity location to the correct [`ArchetypeId`] for the given [`Entity`]
/// each time init is called. This _can_ be [`ArchetypeId::INVALID`], provided the [`Entity`]
/// has not been assigned to an [`Archetype`][crate::archetype::Archetype].
///
/// Note: freshly-allocated entities (ones which don't come from the pending list) are guaranteed
/// to be initialized with the invalid archetype.
pub unsafe fn flush(&mut self, mut init: impl FnMut(Entity, &mut EntityLocation)) {
let free_cursor = self.free_cursor.get_mut();
let current_free_cursor = *free_cursor;
let new_free_cursor = if current_free_cursor >= 0 {
current_free_cursor as usize
} else {
let old_meta_len = self.meta.len();
let new_meta_len = old_meta_len + -current_free_cursor as usize;
self.meta.resize(new_meta_len, EntityMeta::EMPTY);
self.len += -current_free_cursor as u32;
for (index, meta) in self.meta.iter_mut().enumerate().skip(old_meta_len) {
init(
Entity {
index: index as u32,
generation: meta.generation,
},
&mut meta.location,
);
}
*free_cursor = 0;
0
};
self.len += (self.pending.len() - new_free_cursor) as u32;
for index in self.pending.drain(new_free_cursor..) {
let meta = &mut self.meta[index as usize];
init(
Entity {
index,
generation: meta.generation,
},
&mut meta.location,
);
}
}
/// Flushes all reserved entities to an "invalid" state. Attempting to retrieve them will return `None`
/// unless they are later populated with a valid archetype.
pub fn flush_as_invalid(&mut self) {
// SAFETY: as per `flush` safety docs, the archetype id can be set to [`ArchetypeId::INVALID`] if
// the [`Entity`] has not been assigned to an [`Archetype`][crate::archetype::Archetype], which is the case here
unsafe {
self.flush(|_entity, location| {
location.archetype_id = ArchetypeId::INVALID;
});
}
}
/// # Safety
///
/// This function is safe if and only if the world this Entities is on has no entities.
pub unsafe fn flush_and_reserve_invalid_assuming_no_entities(&mut self, count: usize) {
let free_cursor = self.free_cursor.get_mut();
*free_cursor = 0;
self.meta.reserve(count);
// the EntityMeta struct only contains integers, and it is valid to have all bytes set to u8::MAX
self.meta.as_mut_ptr().write_bytes(u8::MAX, count);
self.meta.set_len(count);
self.len = count as u32;
}
/// The count of all entities in the [`World`] that have ever been allocated
/// including the entities that are currently freed.
///
/// This does not include entities that have been reserved but have never been
/// allocated yet.
///
/// [`World`]: crate::world::World
#[inline]
pub fn total_count(&self) -> usize {
self.meta.len()
}
/// The count of currently allocated entities.
#[inline]
pub fn len(&self) -> u32 {
self.len
}
/// Checks if any entity is currently active.
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// Safety:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// Metadata for an [`Entity`].
#[derive(Copy, Clone, Debug)]
#[repr(C)]
struct EntityMeta {
/// The current generation of the [`Entity`].
pub generation: u32,
/// The current location of the [`Entity`]
pub location: EntityLocation,
}
impl EntityMeta {
/// meta for **pending entity**
const EMPTY: EntityMeta = EntityMeta {
generation: 0,
location: EntityLocation::INVALID,
};
}
// This type is repr(C) to ensure that the layout and values within it can be safe to fully fill
// with u8::MAX, as required by [`Entities::flush_and_reserve_invalid_assuming_no_entities`].
// SAFETY:
// This type must not contain any pointers at any level, and be safe to fully fill with u8::MAX.
/// A location of an entity in an archetype.
#[derive(Copy, Clone, Debug, PartialEq)]
#[repr(C)]
pub struct EntityLocation {
/// The ID of the [`Archetype`] the [`Entity`] belongs to.
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_id: ArchetypeId,
/// The index of the [`Entity`] within its [`Archetype`].
///
/// [`Archetype`]: crate::archetype::Archetype
pub archetype_row: ArchetypeRow,
/// The ID of the [`Table`] the [`Entity`] belongs to.
///
/// [`Table`]: crate::storage::Table
pub table_id: TableId,
/// The index of the [`Entity`] within its [`Table`].
///
/// [`Table`]: crate::storage::Table
pub table_row: TableRow,
}
impl EntityLocation {
/// location for **pending entity** and **invalid entity**
const INVALID: EntityLocation = EntityLocation {
archetype_id: ArchetypeId::INVALID,
archetype_row: ArchetypeRow::INVALID,
table_id: TableId::INVALID,
table_row: TableRow::INVALID,
};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn entity_bits_roundtrip() {
let e = Entity {
generation: 0xDEADBEEF,
index: 0xBAADF00D,
};
assert_eq!(Entity::from_bits(e.to_bits()), e);
}
#[test]
fn reserve_entity_len() {
let mut e = Entities::new();
e.reserve_entity();
// SAFETY: entity_location is left invalid
unsafe { e.flush(|_, _| {}) };
assert_eq!(e.len(), 1);
}
#[test]
fn get_reserved_and_invalid() {
let mut entities = Entities::new();
let e = entities.reserve_entity();
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
// SAFETY: entity_location is left invalid
unsafe {
entities.flush(|_entity, _location| {
// do nothing ... leaving entity location invalid
});
};
assert!(entities.contains(e));
assert!(entities.get(e).is_none());
}
#[test]
fn entity_const() {
const C1: Entity = Entity::from_raw(42);
assert_eq!(42, C1.index);
assert_eq!(0, C1.generation);
const C2: Entity = Entity::from_bits(0x0000_00ff_0000_00cc);
assert_eq!(0x0000_00cc, C2.index);
assert_eq!(0x0000_00ff, C2.generation);
const C3: u32 = Entity::from_raw(33).index();
assert_eq!(33, C3);
const C4: u32 = Entity::from_bits(0x00dd_00ff_0000_0000).generation();
assert_eq!(0x00dd_00ff, C4);
}
#[test]
fn reserve_generations() {
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, 1));
}
#[test]
fn reserve_generations_and_alloc() {
const GENERATIONS: u32 = 10;
let mut entities = Entities::new();
let entity = entities.alloc();
entities.free(entity);
assert!(entities.reserve_generations(entity.index, GENERATIONS));
// The very next entity allocated should be a further generation on the same index
let next_entity = entities.alloc();
assert_eq!(next_entity.index(), entity.index());
assert!(next_entity.generation > entity.generation + GENERATIONS);
}
}
| {
// SAFETY: Caller guarantees that `index` a valid entity index
self.meta.get_unchecked_mut(index as usize).location = location;
} | identifier_body |
windows.rs | //! Windows-specific types for signal handling.
//!
//! This module is only defined on Windows and contains the primary `Event` type
//! for receiving notifications of events. These events are listened for via the
//! `SetConsoleCtrlHandler` function which receives events of the type
//! `CTRL_C_EVENT` and `CTRL_BREAK_EVENT`
#![cfg(windows)]
extern crate mio;
extern crate winapi;
use std::cell::RefCell;
use std::io;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Once, ONCE_INIT};
use futures::future;
use futures::stream::Fuse;
use futures::sync::mpsc;
use futures::sync::oneshot;
use futures::{Async, Future, IntoFuture, Poll, Stream};
use tokio_reactor::{Handle, PollEvented};
use mio::Ready;
use self::winapi::shared::minwindef::*;
use self::winapi::um::wincon::*;
use IoFuture;
extern "system" {
fn SetConsoleCtrlHandler(HandlerRoutine: usize, Add: BOOL) -> BOOL;
}
static INIT: Once = ONCE_INIT;
static mut GLOBAL_STATE: *mut GlobalState = 0 as *mut _;
/// Stream of events discovered via `SetConsoleCtrlHandler`.
///
/// This structure can be used to listen for events of the type `CTRL_C_EVENT`
/// and `CTRL_BREAK_EVENT`. The `Stream` trait is implemented for this struct
/// and will resolve for each notification received by the process. Note that
/// there are few limitations with this as well:
///
/// * A notification to this process notifies *all* `Event` streams for that
/// event type.
/// * Notifications to an `Event` stream **are coalesced** if they aren't
/// processed quickly enough. This means that if two notifications are
/// received back-to-back, then the stream may only receive one item about the
/// two notifications.
pub struct Event {
reg: PollEvented<MyRegistration>,
_finished: oneshot::Sender<()>,
}
struct GlobalState {
ready: mio::SetReadiness,
tx: mpsc::UnboundedSender<Message>,
ctrl_c: GlobalEventState,
ctrl_break: GlobalEventState,
}
struct GlobalEventState {
ready: AtomicBool,
}
enum Message {
NewEvent(DWORD, oneshot::Sender<io::Result<Event>>),
}
struct DriverTask {
handle: Handle,
reg: PollEvented<MyRegistration>,
rx: Fuse<mpsc::UnboundedReceiver<Message>>,
ctrl_c: EventState,
ctrl_break: EventState,
}
struct EventState {
tasks: Vec<(RefCell<oneshot::Receiver<()>>, mio::SetReadiness)>,
}
impl Event {
/// Creates a new stream listening for the `CTRL_C_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_c() -> IoFuture<Event> {
Event::ctrl_c_handle(&Handle::current())
}
/// Creates a new stream listening for the `CTRL_C_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_c_handle(handle: &Handle) -> IoFuture<Event> {
Event::new(CTRL_C_EVENT, handle)
}
/// Creates a new stream listening for the `CTRL_BREAK_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn | () -> IoFuture<Event> {
Event::ctrl_break_handle(&Handle::current())
}
/// Creates a new stream listening for the `CTRL_BREAK_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_break_handle(handle: &Handle) -> IoFuture<Event> {
Event::new(CTRL_BREAK_EVENT, handle)
}
fn new(signum: DWORD, handle: &Handle) -> IoFuture<Event> {
let mut init = None;
INIT.call_once(|| {
init = Some(global_init(handle));
});
let new_signal = future::lazy(move || {
let (tx, rx) = oneshot::channel();
let msg = Message::NewEvent(signum, tx);
let res = unsafe { (*GLOBAL_STATE).tx.clone().unbounded_send(msg) };
res.expect(
"failed to request a new signal stream, did the \
first event loop go away?",
);
rx.then(|r| r.unwrap())
});
match init {
Some(init) => Box::new(init.into_future().and_then(|()| new_signal)),
None => Box::new(new_signal),
}
}
}
impl Stream for Event {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
if !self.reg.poll_read_ready(Ready::readable())?.is_ready() {
return Ok(Async::NotReady);
}
self.reg.clear_read_ready(Ready::readable())?;
self.reg
.get_ref()
.inner
.borrow()
.as_ref()
.unwrap()
.1
.set_readiness(mio::Ready::empty())
.expect("failed to set readiness");
Ok(Async::Ready(Some(())))
}
}
fn global_init(handle: &Handle) -> io::Result<()> {
let (tx, rx) = mpsc::unbounded();
let reg = MyRegistration {
inner: RefCell::new(None),
};
let reg = try!(PollEvented::new_with_handle(reg, handle));
let ready = reg.get_ref().inner.borrow().as_ref().unwrap().1.clone();
unsafe {
let state = Box::new(GlobalState {
ready: ready,
ctrl_c: GlobalEventState {
ready: AtomicBool::new(false),
},
ctrl_break: GlobalEventState {
ready: AtomicBool::new(false),
},
tx: tx,
});
GLOBAL_STATE = Box::into_raw(state);
let rc = SetConsoleCtrlHandler(handler as usize, TRUE);
if rc == 0 {
Box::from_raw(GLOBAL_STATE);
GLOBAL_STATE = 0 as *mut _;
return Err(io::Error::last_os_error());
}
::tokio_executor::spawn(Box::new(DriverTask {
handle: handle.clone(),
rx: rx.fuse(),
reg: reg,
ctrl_c: EventState { tasks: Vec::new() },
ctrl_break: EventState { tasks: Vec::new() },
}));
Ok(())
}
}
impl Future for DriverTask {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<(), ()> {
self.check_event_drops();
self.check_messages();
self.check_events().unwrap();
// TODO: when to finish this task?
Ok(Async::NotReady)
}
}
impl DriverTask {
fn check_event_drops(&mut self) {
self.ctrl_c
.tasks
.retain(|task| !task.0.borrow_mut().poll().is_err());
self.ctrl_break
.tasks
.retain(|task| !task.0.borrow_mut().poll().is_err());
}
fn check_messages(&mut self) {
loop {
// Acquire the next message
let message = match self.rx.poll().unwrap() {
Async::Ready(Some(e)) => e,
Async::Ready(None) | Async::NotReady => break,
};
let (sig, complete) = match message {
Message::NewEvent(sig, complete) => (sig, complete),
};
let event = if sig == CTRL_C_EVENT {
&mut self.ctrl_c
} else {
&mut self.ctrl_break
};
// Acquire the (registration, set_readiness) pair by... assuming
// we're on the event loop (true because of the spawn above).
let reg = MyRegistration {
inner: RefCell::new(None),
};
let reg = match PollEvented::new_with_handle(reg, &self.handle) {
Ok(reg) => reg,
Err(e) => {
drop(complete.send(Err(e)));
continue;
}
};
// Create the `Event` to pass back and then also keep a handle to
// the `SetReadiness` for ourselves internally.
let (tx, rx) = oneshot::channel();
let ready = reg.get_ref().inner.borrow_mut().as_mut().unwrap().1.clone();
drop(complete.send(Ok(Event {
reg: reg,
_finished: tx,
})));
event.tasks.push((RefCell::new(rx), ready));
}
}
fn check_events(&mut self) -> io::Result<()> {
if self.reg.poll_read_ready(Ready::readable())?.is_not_ready() {
return Ok(());
}
self.reg.clear_read_ready(Ready::readable())?;
self.reg
.get_ref()
.inner
.borrow()
.as_ref()
.unwrap()
.1
.set_readiness(mio::Ready::empty())
.unwrap();
if unsafe { (*GLOBAL_STATE).ctrl_c.ready.swap(false, Ordering::SeqCst) } {
for task in self.ctrl_c.tasks.iter() {
task.1.set_readiness(mio::Ready::readable()).unwrap();
}
}
if unsafe {
(*GLOBAL_STATE)
.ctrl_break
.ready
.swap(false, Ordering::SeqCst)
} {
for task in self.ctrl_break.tasks.iter() {
task.1.set_readiness(mio::Ready::readable()).unwrap();
}
}
Ok(())
}
}
unsafe extern "system" fn handler(ty: DWORD) -> BOOL {
let event = match ty {
CTRL_C_EVENT => &(*GLOBAL_STATE).ctrl_c,
CTRL_BREAK_EVENT => &(*GLOBAL_STATE).ctrl_break,
_ => return FALSE,
};
if event.ready.swap(true, Ordering::SeqCst) {
FALSE
} else {
drop((*GLOBAL_STATE).ready.set_readiness(mio::Ready::readable()));
// TODO: this will report that we handled a CTRL_BREAK_EVENT when in
// fact we may not have any streams actually created for that
// event.
TRUE
}
}
struct MyRegistration {
inner: RefCell<Option<(mio::Registration, mio::SetReadiness)>>,
}
impl mio::Evented for MyRegistration {
fn register(
&self,
poll: &mio::Poll,
token: mio::Token,
events: mio::Ready,
opts: mio::PollOpt,
) -> io::Result<()> {
let reg = mio::Registration::new2();
reg.0.register(poll, token, events, opts)?;
*self.inner.borrow_mut() = Some(reg);
Ok(())
}
fn reregister(
&self,
_poll: &mio::Poll,
_token: mio::Token,
_events: mio::Ready,
_opts: mio::PollOpt,
) -> io::Result<()> {
Ok(())
}
fn deregister(&self, _poll: &mio::Poll) -> io::Result<()> {
Ok(())
}
}
| ctrl_break | identifier_name |
windows.rs | //! Windows-specific types for signal handling.
//!
//! This module is only defined on Windows and contains the primary `Event` type
//! for receiving notifications of events. These events are listened for via the
//! `SetConsoleCtrlHandler` function which receives events of the type
//! `CTRL_C_EVENT` and `CTRL_BREAK_EVENT`
#![cfg(windows)]
extern crate mio;
extern crate winapi;
use std::cell::RefCell;
use std::io;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Once, ONCE_INIT};
use futures::future;
use futures::stream::Fuse;
use futures::sync::mpsc;
use futures::sync::oneshot;
use futures::{Async, Future, IntoFuture, Poll, Stream};
use tokio_reactor::{Handle, PollEvented};
use mio::Ready;
use self::winapi::shared::minwindef::*;
use self::winapi::um::wincon::*;
use IoFuture;
extern "system" {
fn SetConsoleCtrlHandler(HandlerRoutine: usize, Add: BOOL) -> BOOL;
}
static INIT: Once = ONCE_INIT;
static mut GLOBAL_STATE: *mut GlobalState = 0 as *mut _;
/// Stream of events discovered via `SetConsoleCtrlHandler`.
///
/// This structure can be used to listen for events of the type `CTRL_C_EVENT`
/// and `CTRL_BREAK_EVENT`. The `Stream` trait is implemented for this struct
/// and will resolve for each notification received by the process. Note that
/// there are few limitations with this as well:
///
/// * A notification to this process notifies *all* `Event` streams for that
/// event type.
/// * Notifications to an `Event` stream **are coalesced** if they aren't
/// processed quickly enough. This means that if two notifications are
/// received back-to-back, then the stream may only receive one item about the
/// two notifications.
pub struct Event {
reg: PollEvented<MyRegistration>,
_finished: oneshot::Sender<()>,
}
struct GlobalState {
ready: mio::SetReadiness,
tx: mpsc::UnboundedSender<Message>,
ctrl_c: GlobalEventState,
ctrl_break: GlobalEventState,
}
struct GlobalEventState {
ready: AtomicBool,
}
enum Message {
NewEvent(DWORD, oneshot::Sender<io::Result<Event>>),
}
struct DriverTask {
handle: Handle,
reg: PollEvented<MyRegistration>,
rx: Fuse<mpsc::UnboundedReceiver<Message>>,
ctrl_c: EventState,
ctrl_break: EventState,
}
struct EventState {
tasks: Vec<(RefCell<oneshot::Receiver<()>>, mio::SetReadiness)>,
}
impl Event {
/// Creates a new stream listening for the `CTRL_C_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_c() -> IoFuture<Event> {
Event::ctrl_c_handle(&Handle::current())
}
/// Creates a new stream listening for the `CTRL_C_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_c_handle(handle: &Handle) -> IoFuture<Event> {
Event::new(CTRL_C_EVENT, handle)
}
/// Creates a new stream listening for the `CTRL_BREAK_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_break() -> IoFuture<Event> {
Event::ctrl_break_handle(&Handle::current())
}
/// Creates a new stream listening for the `CTRL_BREAK_EVENT` events.
///
/// This function will register a handler via `SetConsoleCtrlHandler` and
/// deliver notifications to the returned stream.
pub fn ctrl_break_handle(handle: &Handle) -> IoFuture<Event> {
Event::new(CTRL_BREAK_EVENT, handle)
}
fn new(signum: DWORD, handle: &Handle) -> IoFuture<Event> {
let mut init = None;
INIT.call_once(|| {
init = Some(global_init(handle));
});
let new_signal = future::lazy(move || {
let (tx, rx) = oneshot::channel();
let msg = Message::NewEvent(signum, tx);
let res = unsafe { (*GLOBAL_STATE).tx.clone().unbounded_send(msg) };
res.expect(
"failed to request a new signal stream, did the \
first event loop go away?",
);
rx.then(|r| r.unwrap())
});
match init {
Some(init) => Box::new(init.into_future().and_then(|()| new_signal)),
None => Box::new(new_signal),
}
}
}
impl Stream for Event {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
if !self.reg.poll_read_ready(Ready::readable())?.is_ready() {
return Ok(Async::NotReady); | self.reg.clear_read_ready(Ready::readable())?;
self.reg
.get_ref()
.inner
.borrow()
.as_ref()
.unwrap()
.1
.set_readiness(mio::Ready::empty())
.expect("failed to set readiness");
Ok(Async::Ready(Some(())))
}
}
fn global_init(handle: &Handle) -> io::Result<()> {
let (tx, rx) = mpsc::unbounded();
let reg = MyRegistration {
inner: RefCell::new(None),
};
let reg = try!(PollEvented::new_with_handle(reg, handle));
let ready = reg.get_ref().inner.borrow().as_ref().unwrap().1.clone();
unsafe {
let state = Box::new(GlobalState {
ready: ready,
ctrl_c: GlobalEventState {
ready: AtomicBool::new(false),
},
ctrl_break: GlobalEventState {
ready: AtomicBool::new(false),
},
tx: tx,
});
GLOBAL_STATE = Box::into_raw(state);
let rc = SetConsoleCtrlHandler(handler as usize, TRUE);
if rc == 0 {
Box::from_raw(GLOBAL_STATE);
GLOBAL_STATE = 0 as *mut _;
return Err(io::Error::last_os_error());
}
::tokio_executor::spawn(Box::new(DriverTask {
handle: handle.clone(),
rx: rx.fuse(),
reg: reg,
ctrl_c: EventState { tasks: Vec::new() },
ctrl_break: EventState { tasks: Vec::new() },
}));
Ok(())
}
}
impl Future for DriverTask {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<(), ()> {
self.check_event_drops();
self.check_messages();
self.check_events().unwrap();
// TODO: when to finish this task?
Ok(Async::NotReady)
}
}
impl DriverTask {
fn check_event_drops(&mut self) {
self.ctrl_c
.tasks
.retain(|task| !task.0.borrow_mut().poll().is_err());
self.ctrl_break
.tasks
.retain(|task| !task.0.borrow_mut().poll().is_err());
}
fn check_messages(&mut self) {
loop {
// Acquire the next message
let message = match self.rx.poll().unwrap() {
Async::Ready(Some(e)) => e,
Async::Ready(None) | Async::NotReady => break,
};
let (sig, complete) = match message {
Message::NewEvent(sig, complete) => (sig, complete),
};
let event = if sig == CTRL_C_EVENT {
&mut self.ctrl_c
} else {
&mut self.ctrl_break
};
// Acquire the (registration, set_readiness) pair by... assuming
// we're on the event loop (true because of the spawn above).
let reg = MyRegistration {
inner: RefCell::new(None),
};
let reg = match PollEvented::new_with_handle(reg, &self.handle) {
Ok(reg) => reg,
Err(e) => {
drop(complete.send(Err(e)));
continue;
}
};
// Create the `Event` to pass back and then also keep a handle to
// the `SetReadiness` for ourselves internally.
let (tx, rx) = oneshot::channel();
let ready = reg.get_ref().inner.borrow_mut().as_mut().unwrap().1.clone();
drop(complete.send(Ok(Event {
reg: reg,
_finished: tx,
})));
event.tasks.push((RefCell::new(rx), ready));
}
}
fn check_events(&mut self) -> io::Result<()> {
if self.reg.poll_read_ready(Ready::readable())?.is_not_ready() {
return Ok(());
}
self.reg.clear_read_ready(Ready::readable())?;
self.reg
.get_ref()
.inner
.borrow()
.as_ref()
.unwrap()
.1
.set_readiness(mio::Ready::empty())
.unwrap();
if unsafe { (*GLOBAL_STATE).ctrl_c.ready.swap(false, Ordering::SeqCst) } {
for task in self.ctrl_c.tasks.iter() {
task.1.set_readiness(mio::Ready::readable()).unwrap();
}
}
if unsafe {
(*GLOBAL_STATE)
.ctrl_break
.ready
.swap(false, Ordering::SeqCst)
} {
for task in self.ctrl_break.tasks.iter() {
task.1.set_readiness(mio::Ready::readable()).unwrap();
}
}
Ok(())
}
}
unsafe extern "system" fn handler(ty: DWORD) -> BOOL {
let event = match ty {
CTRL_C_EVENT => &(*GLOBAL_STATE).ctrl_c,
CTRL_BREAK_EVENT => &(*GLOBAL_STATE).ctrl_break,
_ => return FALSE,
};
if event.ready.swap(true, Ordering::SeqCst) {
FALSE
} else {
drop((*GLOBAL_STATE).ready.set_readiness(mio::Ready::readable()));
// TODO: this will report that we handled a CTRL_BREAK_EVENT when in
// fact we may not have any streams actually created for that
// event.
TRUE
}
}
struct MyRegistration {
inner: RefCell<Option<(mio::Registration, mio::SetReadiness)>>,
}
impl mio::Evented for MyRegistration {
fn register(
&self,
poll: &mio::Poll,
token: mio::Token,
events: mio::Ready,
opts: mio::PollOpt,
) -> io::Result<()> {
let reg = mio::Registration::new2();
reg.0.register(poll, token, events, opts)?;
*self.inner.borrow_mut() = Some(reg);
Ok(())
}
fn reregister(
&self,
_poll: &mio::Poll,
_token: mio::Token,
_events: mio::Ready,
_opts: mio::PollOpt,
) -> io::Result<()> {
Ok(())
}
fn deregister(&self, _poll: &mio::Poll) -> io::Result<()> {
Ok(())
}
} | } | random_line_split |
annexe.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import plotly.graph_objs as go
import seaborn as sns
FICHIERS = ["EdStatsCountry.csv","EdStatsCountry-Series.csv","EdStatsData.csv","EdStatsFootNote.csv"
,"EdStatsSeries.csv"]
LOCALISATION ='F:/cour/OC/projet2/'
INDEX = ["secondary","tertiary","school|educationnal","student","inhabitant|household","population","technology|computer|internet"]
VALUES_NOT_WANTED = ["WLD","ARE","LMC","LIC","LMY","UMC","MIC","HIC","NOC","OEC","EUU","EAS","EAP","SAS","OED","ECS","LCN","LAC","LDC","SSF","SSA","ECA","MEA","NAC","HPC","MNA","EMU","ARB","IDN","ZAF"]
COLOR_LIST = ["pastel","muted","colorblind","deep","dark","bright"]
NOT_IN_STUDY_YEARS = ['FIN','NZL','ISL','AUT','SMR','CAN']
def open_csv(num_fichier,index_column="",column_list=""):
if index_column == "" and column_list == "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier])
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column != "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column,names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column == "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
else:
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
print("\n",FICHIERS[num_fichier], " is open")
print("fichier avec ",fichier_lu.shape[0]," lignes et ",fichier_lu.shape[1]," colonnes.")
return fichier_lu
def print_empty_stats(dataframe):
data_number = dataframe.count(axis='index').cumsum().tolist()
pourcentage_values = int(data_number[-1]) / (int(dataframe.shape[0]) * int(dataframe.shape[1])) * 100
print("le dataframe est rempli à ",format(pourcentage_values,".2f"),"%\n")
def print_samples(data_frame,number_of_rows):
display(data_frame.sample(number_of_rows,random_state = 148625))
def index_str_contains(index,dataframe,regex_var):
new_index = index.str.contains(regex_var,case=False,regex=True,na=False)
return new_index
def indicator_name_list(dataframe):
index = dataframe['Indicator Name'].squeeze()
variable_list = []
for regex in INDEX:
index_temp = index_str_contains(index,dataframe,regex)
set_temp = set(dataframe[index_temp]['Indicator Name'].unique())
for variable in variable_list:
set_temp = set_temp - variable
print("Pour le regex ",regex," : ",len(set_temp)," variables de trouvé")
variable_list.append(set_temp)
return variable_list
def to_keep(dataframe,columns_to_keep):
reduct_dataframe = open_csv(dataframe)
for column in reduct_dataframe.columns:
if column not in columns_to_keep:
reduct_dataframe = reduct_dataframe.drop([column],axis = 1)
print("nouveau format du fichier : ",reduct_dataframe.shape[0]," lignes et ",reduct_dataframe.shape[1]," colonnes.")
return reduct_dataframe
def take_needed_rows(dataframe,list_values):
new_dataframe = pd.DataFrame([])
for value in list_values:
new_dataframe = pd.concat([new_dataframe,dataframe.loc[dataframe['Indicator Name'] == value]])
return new_dataframe
def replace_ESC(dataframe, value_or_number=0):
if value_or_number == 0:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],["High","Upper \nmiddle","Lower \nmiddle","High","Low"])
else:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],[5,4,2,5,1])
return new_dataframe
def sns_graph(fichierESC3):
sns.set(font_scale=5)
sns.set_theme(style="darkgrid")
ax = sns.countplot(x="Income Group",order = ["High","Upper \nmiddle","Lower \nmiddle","Low"],\
data = fichierESC3,palette=["tab:red","tab:orange","cornflowerblue","darkblue",]).\
set_title("Numbers of countries by income group")
def ch | ataframe,titre,index=False,year='2001',column='Income Group'):
if index:
countries = dataframe.index.tolist()
z = dataframe[year].tolist()
titre = titre + year
elif not index:
countries = dataframe['Country Code'].tolist()
z = dataframe[column].tolist()
layout = dict(geo={'scope': 'world'})
scl = [[0.0, 'darkblue'],[0.2, 'cornflowerblue'],[0.4, 'cornflowerblue'],\
[0.6, 'orange'],[0.8, 'orange'],[1.0, 'red']]
data = dict(
type='choropleth',
locations=countries,
locationmode='ISO-3',
colorscale=scl,
autocolorscale = False,
marker = dict(line = dict (color = 'rgb(0,0,0)', width = 1)),z=z)
map = go.Figure(data=[data], layout=layout)
map.update_layout(
title={
'text': titre,
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'
},
title_font_size=30)
map.show()
def ratio_epuration(data_frame,ratio):
nb_lignes = data_frame.shape[0]
tab_isna = data_frame.isna().sum()
list_index = tab_isna.index.tolist()
df_epure = pd.DataFrame()
for index,value in enumerate(tab_isna):
if value <= nb_lignes * (1 - ratio):
df_epure[list_index[index]] = data_frame[list_index[index]]
return df_epure
def clean_data(dataframe,ratio):
dataframe2 = dataframe.replace(0,np.nan)
dataframe3 = dataframe2.dropna(axis = 'columns', how = 'all')
dataframe4 = ratio_epuration(dataframe3,ratio)
return dataframe4
def create_range(dataframe,quantity_print,which_one=-1):
if quantity_print == 1:
if which_one == 0:
return [dataframe.columns[0]]
elif which_one == -1:
return [dataframe.columns[-1]]
else:
try:
dataframe[str(which_one)]
except ValueError:
print("Non valid column")
else:
last_elem = int(dataframe.columns[-1])
column_nbr = int(len(dataframe.columns))
if column_nbr % (quantity_print - 1) == 0:
range_step = int(column_nbr / quantity_print)
else:
range_step = int(column_nbr / (quantity_print - 1))
begin_year = last_elem
for step in range(quantity_print-1):
begin_year -= range_step
return range(begin_year,last_elem+1,range_step)
def most_filled_column(dataframe):
mini = dataframe[dataframe.columns[-1]].isna().sum()
column_mini = dataframe.columns[-1]
for column in reversed(dataframe.columns):
isna_sum = dataframe[column].isna().sum()
if mini > isna_sum:
mini = isna_sum
column_mini = column
return column_mini
def fill_dataframe(dataframe):
return dataframe.replace(0,np.nan).transpose().fillna(method='ffill').transpose()
def sort_dataframe(dataframe,sort_year=''):
dataframe2 = fill_dataframe(dataframe)
if sort_year=='':
best_column_to_sort = most_filled_column(dataframe)
else:
best_column_to_sort = sort_year
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
dataframe2 = dataframe2.sort_values(by=[best_column_to_sort],ascending =False)
return dataframe2
def print_top_values(dataframe,title,value1,value2,sort_year=''):
dataframe2 = sort_dataframe(dataframe,sort_year)
if value1 == 0:
dataframe3 = dataframe2.head(value2).transpose()
title = "Top " + str(value2) + " " + title
else:
dataframe3 = dataframe2.head(value2).tail(value2 - value1 + 1).transpose()
title = "Top " + str(value1) + " to " + str(value2) + " " + title
lines = dataframe3.plot.line().set_title(title)
def last_value(dataframe,new_column):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2[new_column] = np.nan
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference([new_column]),1,inplace=True)
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def rank_dataframe(dataframe,new_column):
dataframe2 = last_value(dataframe,new_column)
dataframe2 = dataframe2.sort_values(by=new_column,ascending=False)
maxi = float(dataframe2.iloc[0])
part = maxi/4
part2 = part
part3 = part*2
part4 = part*3
for row in range(dataframe2.shape[0]):
if float(dataframe2.iloc[row]) < part2:
dataframe2.iloc[row] = int(1)
elif float(dataframe2.iloc[row]) < part3:
dataframe2.iloc[row] = int(2)
elif float(dataframe2.iloc[row]) < part4:
dataframe2.iloc[row] = int(3)
else:
dataframe2.iloc[row] = int(4)
return dataframe2.astype(int)
def horizontal_bar_plot_tri(dataframe):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=["Income Group","Internet","Computer"],ascending=False)
sns.set_color_codes("colorblind")
sns.barplot(x="Computer2", y="Country Code", data=dataframe2,label="Computer owner rank", color="b")
sns.set_color_codes("muted")
sns.barplot(x="Internet2", y="Country Code", data=dataframe2,label="Internet user rank", color="b")
sns.set_color_codes("pastel")
sns.barplot(x="Income Group", y="Country Code", data=dataframe2,label="Income Group rank", color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
plt.xlabel( xlabel="Scoring by country",fontsize=18)
ax.set(xlim=(0, 15), ylabel="", xlabel="Scoring by country")
sns.despine(left=True, bottom=True)
def horizontal_bar_plot_mono(dataframe,sort_by,title,xmin,xmax):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=[sort_by],ascending=False)
sns.set_color_codes("pastel")
sns.barplot(x=sort_by, y=dataframe2.index, data=dataframe2,label=sort_by, color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
ax.xaxis.tick_top()
if title == "Study years in selected countries":
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(value),fontsize=15)
else:
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(int(value)),fontsize=15)
plt.xlabel( xlabel=title,fontsize=18)
ax.set(xlim=(xmin, xmax), ylabel="", xlabel=title)
sns.despine(left=True, bottom=True)
def top_countries_with_data(dataframe):
dataframe2 = dataframe.copy()
for country in NOT_IN_STUDY_YEARS:
dataframe2.drop(dataframe2[dataframe2["Country Code"] == country].index,inplace =True)
return dataframe2
def potential_years_study(dataframe1,dataframe2,selected_countries):
dataframe = dataframe1.join(dataframe2,how='outer')
dataframe.fillna(1,inplace=True)
multiple_row = len(dataframe2.columns)
new_col_list = []
if multiple_row>1:
for column in range(len(dataframe2.columns)):
new_col = "potential_"+dataframe2.columns[column][-4:]
new_col_list.append(new_col)
dataframe[new_col] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[column +1]]
else:
dataframe["potential"] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[1]]
dataframe = dataframe.loc[selected_countries,:]
if multiple_row>1:
return dataframe.sort_values(by=[new_col_list[0]],ascending=False)
else:
return dataframe.sort_values(by=['potential'],ascending=False)
def take_value(dataframe,new_column,years):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference(years),1,inplace=True)
for year in years:
dataframe2= dataframe2.rename(columns={year:new_column+"_"+year})
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def transforme_for_scatterplot(dataframe):
df1 = dataframe.reset_index()
df11 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2020"]),1)
df12 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2025"]),1)
df13 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2030"]),1)
df11["year"] = "students_number_2020"
df12["year"] = "students_number_2025"
df13["year"] = "students_number_2030"
df11.rename(columns={"prediction_new_students_2020":"students_number"},inplace=True)
df12.rename(columns={"prediction_new_students_2025":"students_number"},inplace=True)
df13.rename(columns={"prediction_new_students_2030":"students_number"},inplace=True)
return pd.concat([df11,df12,df13])
def create_list_for_scatterplot(begin,end,time):
row_list = []
for mult in range(3):
for num in range(begin-1,end):
row_list.append(int(mult*time+num))
return row_list
def scatterplot_student_number(dataframe,title,mini,maxi):
size_df = int(len(dataframe)/3)
ax = plt.axes()
plt.title("Students number prediction in thousand")
fig = sns.scatterplot(data=dataframe.iloc[create_list_for_scatterplot(mini,maxi,size_df)], x="Country Code", y="students_number",hue="year")
def create_list_for_scatterplot2(begin,end):
row_list = []
for num in range(begin-1,end):
for mult in range(3):
row_list.append(int(mult+3*num))
return row_list
def display_potential_years_study(dataframe_study_year,final_df,mini,maxi):
final_df2 = final_df.copy()
final_df2["potential"] = final_df2["students_number"]
final_df3 = final_df2.merge(dataframe_study_year,left_on="Country Code",right_on=dataframe_study_year.index)
final_df3["potential"] = final_df3["potential"]*final_df3["study_years_expected"]
final_df3 = final_df3.replace("students_number_2020","potential_2020")
final_df3 = final_df3.replace("students_number_2025","potential_2025")
final_df3 = final_df3.replace("students_number_2030","potential_2030")
final_df3 = final_df3.sort_values(by=["potential"],ascending=False)
size_df = int(len(final_df3)/3)
ax = plt.axes()
plt.title("Countries potential")
fig = sns.scatterplot(data=final_df3.iloc[create_list_for_scatterplot2(mini,maxi)], x="Country Code", y="potential",hue="year") | oropleth_map(d | identifier_name |
annexe.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import plotly.graph_objs as go
import seaborn as sns
FICHIERS = ["EdStatsCountry.csv","EdStatsCountry-Series.csv","EdStatsData.csv","EdStatsFootNote.csv"
,"EdStatsSeries.csv"]
LOCALISATION ='F:/cour/OC/projet2/'
INDEX = ["secondary","tertiary","school|educationnal","student","inhabitant|household","population","technology|computer|internet"]
VALUES_NOT_WANTED = ["WLD","ARE","LMC","LIC","LMY","UMC","MIC","HIC","NOC","OEC","EUU","EAS","EAP","SAS","OED","ECS","LCN","LAC","LDC","SSF","SSA","ECA","MEA","NAC","HPC","MNA","EMU","ARB","IDN","ZAF"]
COLOR_LIST = ["pastel","muted","colorblind","deep","dark","bright"]
NOT_IN_STUDY_YEARS = ['FIN','NZL','ISL','AUT','SMR','CAN']
def open_csv(num_fichier,index_column="",column_list=""):
if index_column == "" and column_list == "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier])
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column != "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column,names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column == "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
else:
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
print("\n",FICHIERS[num_fichier], " is open")
print("fichier avec ",fichier_lu.shape[0]," lignes et ",fichier_lu.shape[1]," colonnes.")
return fichier_lu
def print_empty_stats(dataframe):
data_number = dataframe.count(axis='index').cumsum().tolist()
pourcentage_values = int(data_number[-1]) / (int(dataframe.shape[0]) * int(dataframe.shape[1])) * 100
print("le dataframe est rempli à ",format(pourcentage_values,".2f"),"%\n")
def print_samples(data_frame,number_of_rows):
display(data_frame.sample(number_of_rows,random_state = 148625))
def index_str_contains(index,dataframe,regex_var):
new_index = index.str.contains(regex_var,case=False,regex=True,na=False)
return new_index
def indicator_name_list(dataframe):
index = dataframe['Indicator Name'].squeeze()
variable_list = []
for regex in INDEX:
index_temp = index_str_contains(index,dataframe,regex)
set_temp = set(dataframe[index_temp]['Indicator Name'].unique())
for variable in variable_list:
set_temp = set_temp - variable
print("Pour le regex ",regex," : ",len(set_temp)," variables de trouvé")
variable_list.append(set_temp)
return variable_list
def to_keep(dataframe,columns_to_keep):
reduct_dataframe = open_csv(dataframe)
for column in reduct_dataframe.columns:
if column not in columns_to_keep:
reduct_dataframe = reduct_dataframe.drop([column],axis = 1)
print("nouveau format du fichier : ",reduct_dataframe.shape[0]," lignes et ",reduct_dataframe.shape[1]," colonnes.")
return reduct_dataframe
def take_needed_rows(dataframe,list_values):
new_dataframe = pd.DataFrame([])
for value in list_values:
new_dataframe = pd.concat([new_dataframe,dataframe.loc[dataframe['Indicator Name'] == value]])
return new_dataframe
def replace_ESC(dataframe, value_or_number=0):
if value_or_number == 0:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],["High","Upper \nmiddle","Lower \nmiddle","High","Low"])
else:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],[5,4,2,5,1])
return new_dataframe
def sns_graph(fichierESC3):
sns.set(font_scale=5)
sns.set_theme(style="darkgrid")
ax = sns.countplot(x="Income Group",order = ["High","Upper \nmiddle","Lower \nmiddle","Low"],\
data = fichierESC3,palette=["tab:red","tab:orange","cornflowerblue","darkblue",]).\
set_title("Numbers of countries by income group")
def choropleth_map(dataframe,titre,index=False,year='2001',column='Income Group'):
if index:
countries = dataframe.index.tolist()
z = dataframe[year].tolist()
titre = titre + year
elif not index:
countries = dataframe['Country Code'].tolist()
z = dataframe[column].tolist()
layout = dict(geo={'scope': 'world'})
scl = [[0.0, 'darkblue'],[0.2, 'cornflowerblue'],[0.4, 'cornflowerblue'],\
[0.6, 'orange'],[0.8, 'orange'],[1.0, 'red']]
data = dict(
type='choropleth',
locations=countries,
locationmode='ISO-3',
colorscale=scl,
autocolorscale = False,
marker = dict(line = dict (color = 'rgb(0,0,0)', width = 1)),z=z)
map = go.Figure(data=[data], layout=layout)
map.update_layout(
title={
'text': titre,
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'
},
title_font_size=30)
map.show()
def ratio_epuration(data_frame,ratio):
nb_lignes = data_frame.shape[0]
tab_isna = data_frame.isna().sum()
list_index = tab_isna.index.tolist()
df_epure = pd.DataFrame()
for index,value in enumerate(tab_isna):
if value <= nb_lignes * (1 - ratio):
df_epure[list_index[index]] = data_frame[list_index[index]]
return df_epure
def clean_data(dataframe,ratio):
dataframe2 = dataframe.replace(0,np.nan)
dataframe3 = dataframe2.dropna(axis = 'columns', how = 'all')
dataframe4 = ratio_epuration(dataframe3,ratio)
return dataframe4
def create_range(dataframe,quantity_print,which_one=-1):
if quantity_print == 1:
if which_one == 0:
return [dataframe.columns[0]]
elif which_one == -1:
return [dataframe.columns[-1]]
else:
try:
dataframe[str(which_one)]
except ValueError:
print("Non valid column")
else:
last_elem = int(dataframe.columns[-1])
column_nbr = int(len(dataframe.columns))
if column_nbr % (quantity_print - 1) == 0:
range_step = int(column_nbr / quantity_print)
else:
range_step = int(column_nbr / (quantity_print - 1))
begin_year = last_elem
for step in range(quantity_print-1):
begin_year -= range_step
return range(begin_year,last_elem+1,range_step)
def most_filled_column(dataframe):
mini = dataframe[dataframe.columns[-1]].isna().sum()
column_mini = dataframe.columns[-1]
for column in reversed(dataframe.columns):
isna_sum = dataframe[column].isna().sum()
if mini > isna_sum:
mini = isna_sum
column_mini = column
return column_mini
def fill_dataframe(dataframe):
return dataframe.replace(0,np.nan).transpose().fillna(method='ffill').transpose()
def sort_dataframe(dataframe,sort_year=''):
dataframe2 = fill_dataframe(dataframe)
if sort_year=='':
best_column_to_sort = most_filled_column(dataframe)
else:
best_column_to_sort = sort_year
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
dataframe2 = dataframe2.sort_values(by=[best_column_to_sort],ascending =False)
return dataframe2
def print_top_values(dataframe,title,value1,value2,sort_year=''):
dataframe2 = sort_dataframe(dataframe,sort_year)
if value1 == 0:
dataframe3 = dataframe2.head(value2).transpose()
title = "Top " + str(value2) + " " + title
else:
dataframe3 = dataframe2.head(value2).tail(value2 - value1 + 1).transpose()
title = "Top " + str(value1) + " to " + str(value2) + " " + title
lines = dataframe3.plot.line().set_title(title)
def last_value(dataframe,new_column):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2[new_column] = np.nan
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference([new_column]),1,inplace=True)
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def rank_dataframe(dataframe,new_column):
dataframe2 = last_value(dataframe,new_column)
dataframe2 = dataframe2.sort_values(by=new_column,ascending=False)
maxi = float(dataframe2.iloc[0])
part = maxi/4
part2 = part
part3 = part*2
part4 = part*3
for row in range(dataframe2.shape[0]):
if float(dataframe2.iloc[row]) < part2:
dataframe2.iloc[row] = int(1)
elif float(dataframe2.iloc[row]) < part3:
dataframe2.iloc[row] = int(2)
elif float(dataframe2.iloc[row]) < part4:
dataframe2.iloc[row] = int(3)
else:
dataframe2.iloc[row] = int(4)
return dataframe2.astype(int)
def horizontal_bar_plot_tri(dataframe):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=["Income Group","Internet","Computer"],ascending=False)
sns.set_color_codes("colorblind")
sns.barplot(x="Computer2", y="Country Code", data=dataframe2,label="Computer owner rank", color="b")
sns.set_color_codes("muted")
sns.barplot(x="Internet2", y="Country Code", data=dataframe2,label="Internet user rank", color="b")
sns.set_color_codes("pastel")
sns.barplot(x="Income Group", y="Country Code", data=dataframe2,label="Income Group rank", color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
plt.xlabel( xlabel="Scoring by country",fontsize=18)
ax.set(xlim=(0, 15), ylabel="", xlabel="Scoring by country")
sns.despine(left=True, bottom=True)
def horizontal_bar_plot_mono(dataframe,sort_by,title,xmin,xmax):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=[sort_by],ascending=False)
sns.set_color_codes("pastel")
sns.barplot(x=sort_by, y=dataframe2.index, data=dataframe2,label=sort_by, color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
ax.xaxis.tick_top()
if title == "Study years in selected countries":
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(value),fontsize=15)
else:
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(int(value)),fontsize=15)
plt.xlabel( xlabel=title,fontsize=18)
ax.set(xlim=(xmin, xmax), ylabel="", xlabel=title)
sns.despine(left=True, bottom=True)
def top_countries_with_data(dataframe):
dataframe2 = dataframe.copy()
for country in NOT_IN_STUDY_YEARS:
dataframe2.drop(dataframe2[dataframe2["Country Code"] == country].index,inplace =True)
return dataframe2
def potential_years_study(dataframe1,dataframe2,selected_countries):
dataframe = dataframe1.join(dataframe2,how='outer')
dataframe.fillna(1,inplace=True)
multiple_row = len(dataframe2.columns)
new_col_list = []
if multiple_row>1:
for column in range(len(dataframe2.columns)):
new_col = "potential_"+dataframe2.columns[column][-4:]
new_col_list.append(new_col)
dataframe[new_col] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[column +1]]
else:
dataframe["potential"] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[1]]
dataframe = dataframe.loc[selected_countries,:]
if multiple_row>1:
return dataframe.sort_values(by=[new_col_list[0]],ascending=False)
else:
return dataframe.sort_values(by=['potential'],ascending=False)
def take_value(dataframe,new_column,years):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference(years),1,inplace=True)
for year in years:
da | for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def transforme_for_scatterplot(dataframe):
df1 = dataframe.reset_index()
df11 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2020"]),1)
df12 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2025"]),1)
df13 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2030"]),1)
df11["year"] = "students_number_2020"
df12["year"] = "students_number_2025"
df13["year"] = "students_number_2030"
df11.rename(columns={"prediction_new_students_2020":"students_number"},inplace=True)
df12.rename(columns={"prediction_new_students_2025":"students_number"},inplace=True)
df13.rename(columns={"prediction_new_students_2030":"students_number"},inplace=True)
return pd.concat([df11,df12,df13])
def create_list_for_scatterplot(begin,end,time):
row_list = []
for mult in range(3):
for num in range(begin-1,end):
row_list.append(int(mult*time+num))
return row_list
def scatterplot_student_number(dataframe,title,mini,maxi):
size_df = int(len(dataframe)/3)
ax = plt.axes()
plt.title("Students number prediction in thousand")
fig = sns.scatterplot(data=dataframe.iloc[create_list_for_scatterplot(mini,maxi,size_df)], x="Country Code", y="students_number",hue="year")
def create_list_for_scatterplot2(begin,end):
row_list = []
for num in range(begin-1,end):
for mult in range(3):
row_list.append(int(mult+3*num))
return row_list
def display_potential_years_study(dataframe_study_year,final_df,mini,maxi):
final_df2 = final_df.copy()
final_df2["potential"] = final_df2["students_number"]
final_df3 = final_df2.merge(dataframe_study_year,left_on="Country Code",right_on=dataframe_study_year.index)
final_df3["potential"] = final_df3["potential"]*final_df3["study_years_expected"]
final_df3 = final_df3.replace("students_number_2020","potential_2020")
final_df3 = final_df3.replace("students_number_2025","potential_2025")
final_df3 = final_df3.replace("students_number_2030","potential_2030")
final_df3 = final_df3.sort_values(by=["potential"],ascending=False)
size_df = int(len(final_df3)/3)
ax = plt.axes()
plt.title("Countries potential")
fig = sns.scatterplot(data=final_df3.iloc[create_list_for_scatterplot2(mini,maxi)], x="Country Code", y="potential",hue="year") | taframe2= dataframe2.rename(columns={year:new_column+"_"+year})
| conditional_block |
annexe.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import plotly.graph_objs as go
import seaborn as sns
FICHIERS = ["EdStatsCountry.csv","EdStatsCountry-Series.csv","EdStatsData.csv","EdStatsFootNote.csv"
,"EdStatsSeries.csv"]
LOCALISATION ='F:/cour/OC/projet2/'
INDEX = ["secondary","tertiary","school|educationnal","student","inhabitant|household","population","technology|computer|internet"]
VALUES_NOT_WANTED = ["WLD","ARE","LMC","LIC","LMY","UMC","MIC","HIC","NOC","OEC","EUU","EAS","EAP","SAS","OED","ECS","LCN","LAC","LDC","SSF","SSA","ECA","MEA","NAC","HPC","MNA","EMU","ARB","IDN","ZAF"]
COLOR_LIST = ["pastel","muted","colorblind","deep","dark","bright"]
NOT_IN_STUDY_YEARS = ['FIN','NZL','ISL','AUT','SMR','CAN']
def open_csv(num_fichier,index_column="",column_list=""):
if index_column == "" and column_list == "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier])
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column != "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column,names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column == "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
else:
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
print("\n",FICHIERS[num_fichier], " is open")
print("fichier avec ",fichier_lu.shape[0]," lignes et ",fichier_lu.shape[1]," colonnes.")
return fichier_lu
def print_empty_stats(dataframe):
data_number = dataframe.count(axis='index').cumsum().tolist()
pourcentage_values = int(data_number[-1]) / (int(dataframe.shape[0]) * int(dataframe.shape[1])) * 100
print("le dataframe est rempli à ",format(pourcentage_values,".2f"),"%\n")
def print_samples(data_frame,number_of_rows):
display(data_frame.sample(number_of_rows,random_state = 148625))
def index_str_contains(index,dataframe,regex_var):
new_index = index.str.contains(regex_var,case=False,regex=True,na=False)
return new_index
def indicator_name_list(dataframe):
index = dataframe['Indicator Name'].squeeze()
variable_list = []
for regex in INDEX:
index_temp = index_str_contains(index,dataframe,regex)
set_temp = set(dataframe[index_temp]['Indicator Name'].unique())
for variable in variable_list:
set_temp = set_temp - variable
print("Pour le regex ",regex," : ",len(set_temp)," variables de trouvé")
variable_list.append(set_temp)
return variable_list
def to_keep(dataframe,columns_to_keep):
reduct_dataframe = open_csv(dataframe)
for column in reduct_dataframe.columns:
if column not in columns_to_keep:
reduct_dataframe = reduct_dataframe.drop([column],axis = 1)
print("nouveau format du fichier : ",reduct_dataframe.shape[0]," lignes et ",reduct_dataframe.shape[1]," colonnes.")
return reduct_dataframe
def take_needed_rows(dataframe,list_values):
new_dataframe = pd.DataFrame([])
for value in list_values:
new_dataframe = pd.concat([new_dataframe,dataframe.loc[dataframe['Indicator Name'] == value]])
return new_dataframe
def replace_ESC(dataframe, value_or_number=0):
if value_or_number == 0:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],["High","Upper \nmiddle","Lower \nmiddle","High","Low"])
else:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],[5,4,2,5,1])
return new_dataframe
def sns_graph(fichierESC3):
sns.set(font_scale=5)
sns.set_theme(style="darkgrid")
ax = sns.countplot(x="Income Group",order = ["High","Upper \nmiddle","Lower \nmiddle","Low"],\
data = fichierESC3,palette=["tab:red","tab:orange","cornflowerblue","darkblue",]).\
set_title("Numbers of countries by income group")
def choropleth_map(dataframe,titre,index=False,year='2001',column='Income Group'):
if index:
countries = dataframe.index.tolist()
z = dataframe[year].tolist()
titre = titre + year
elif not index:
countries = dataframe['Country Code'].tolist()
z = dataframe[column].tolist()
layout = dict(geo={'scope': 'world'})
scl = [[0.0, 'darkblue'],[0.2, 'cornflowerblue'],[0.4, 'cornflowerblue'],\
[0.6, 'orange'],[0.8, 'orange'],[1.0, 'red']]
data = dict(
type='choropleth',
locations=countries,
locationmode='ISO-3',
colorscale=scl,
autocolorscale = False,
marker = dict(line = dict (color = 'rgb(0,0,0)', width = 1)),z=z)
map = go.Figure(data=[data], layout=layout)
map.update_layout(
title={
'text': titre,
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'
},
title_font_size=30)
map.show()
def ratio_epuration(data_frame,ratio):
nb_lignes = data_frame.shape[0]
tab_isna = data_frame.isna().sum()
list_index = tab_isna.index.tolist()
df_epure = pd.DataFrame()
for index,value in enumerate(tab_isna):
if value <= nb_lignes * (1 - ratio):
df_epure[list_index[index]] = data_frame[list_index[index]]
return df_epure
def clean_data(dataframe,ratio):
dataframe2 = dataframe.replace(0,np.nan)
dataframe3 = dataframe2.dropna(axis = 'columns', how = 'all')
dataframe4 = ratio_epuration(dataframe3,ratio)
return dataframe4
def create_range(dataframe,quantity_print,which_one=-1):
if quantity_print == 1:
if which_one == 0:
return [dataframe.columns[0]]
elif which_one == -1:
return [dataframe.columns[-1]]
else:
try:
dataframe[str(which_one)]
except ValueError:
print("Non valid column")
else:
last_elem = int(dataframe.columns[-1])
column_nbr = int(len(dataframe.columns))
if column_nbr % (quantity_print - 1) == 0:
range_step = int(column_nbr / quantity_print)
else:
range_step = int(column_nbr / (quantity_print - 1))
begin_year = last_elem
for step in range(quantity_print-1):
begin_year -= range_step
return range(begin_year,last_elem+1,range_step)
def most_filled_column(dataframe):
mini = dataframe[dataframe.columns[-1]].isna().sum()
column_mini = dataframe.columns[-1]
for column in reversed(dataframe.columns):
isna_sum = dataframe[column].isna().sum()
if mini > isna_sum:
mini = isna_sum
column_mini = column
return column_mini
def fill_dataframe(dataframe):
return dataframe.replace(0,np.nan).transpose().fillna(method='ffill').transpose()
def sort_dataframe(dataframe,sort_year=''):
dataframe2 = fill_dataframe(dataframe)
if sort_year=='':
best_column_to_sort = most_filled_column(dataframe)
else:
best_column_to_sort = sort_year
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
dataframe2 = dataframe2.sort_values(by=[best_column_to_sort],ascending =False)
return dataframe2
def print_top_values(dataframe,title,value1,value2,sort_year=''):
dataframe2 = sort_dataframe(dataframe,sort_year)
if value1 == 0:
dataframe3 = dataframe2.head(value2).transpose()
title = "Top " + str(value2) + " " + title
else:
dataframe3 = dataframe2.head(value2).tail(value2 - value1 + 1).transpose()
title = "Top " + str(value1) + " to " + str(value2) + " " + title
lines = dataframe3.plot.line().set_title(title)
def last_value(dataframe,new_column):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2[new_column] = np.nan
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference([new_column]),1,inplace=True)
for code in VALUES_NOT_WANTED:
try: | except:
pass
return dataframe2
def rank_dataframe(dataframe,new_column):
dataframe2 = last_value(dataframe,new_column)
dataframe2 = dataframe2.sort_values(by=new_column,ascending=False)
maxi = float(dataframe2.iloc[0])
part = maxi/4
part2 = part
part3 = part*2
part4 = part*3
for row in range(dataframe2.shape[0]):
if float(dataframe2.iloc[row]) < part2:
dataframe2.iloc[row] = int(1)
elif float(dataframe2.iloc[row]) < part3:
dataframe2.iloc[row] = int(2)
elif float(dataframe2.iloc[row]) < part4:
dataframe2.iloc[row] = int(3)
else:
dataframe2.iloc[row] = int(4)
return dataframe2.astype(int)
def horizontal_bar_plot_tri(dataframe):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=["Income Group","Internet","Computer"],ascending=False)
sns.set_color_codes("colorblind")
sns.barplot(x="Computer2", y="Country Code", data=dataframe2,label="Computer owner rank", color="b")
sns.set_color_codes("muted")
sns.barplot(x="Internet2", y="Country Code", data=dataframe2,label="Internet user rank", color="b")
sns.set_color_codes("pastel")
sns.barplot(x="Income Group", y="Country Code", data=dataframe2,label="Income Group rank", color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
plt.xlabel( xlabel="Scoring by country",fontsize=18)
ax.set(xlim=(0, 15), ylabel="", xlabel="Scoring by country")
sns.despine(left=True, bottom=True)
def horizontal_bar_plot_mono(dataframe,sort_by,title,xmin,xmax):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=[sort_by],ascending=False)
sns.set_color_codes("pastel")
sns.barplot(x=sort_by, y=dataframe2.index, data=dataframe2,label=sort_by, color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
ax.xaxis.tick_top()
if title == "Study years in selected countries":
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(value),fontsize=15)
else:
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(int(value)),fontsize=15)
plt.xlabel( xlabel=title,fontsize=18)
ax.set(xlim=(xmin, xmax), ylabel="", xlabel=title)
sns.despine(left=True, bottom=True)
def top_countries_with_data(dataframe):
dataframe2 = dataframe.copy()
for country in NOT_IN_STUDY_YEARS:
dataframe2.drop(dataframe2[dataframe2["Country Code"] == country].index,inplace =True)
return dataframe2
def potential_years_study(dataframe1,dataframe2,selected_countries):
dataframe = dataframe1.join(dataframe2,how='outer')
dataframe.fillna(1,inplace=True)
multiple_row = len(dataframe2.columns)
new_col_list = []
if multiple_row>1:
for column in range(len(dataframe2.columns)):
new_col = "potential_"+dataframe2.columns[column][-4:]
new_col_list.append(new_col)
dataframe[new_col] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[column +1]]
else:
dataframe["potential"] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[1]]
dataframe = dataframe.loc[selected_countries,:]
if multiple_row>1:
return dataframe.sort_values(by=[new_col_list[0]],ascending=False)
else:
return dataframe.sort_values(by=['potential'],ascending=False)
def take_value(dataframe,new_column,years):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference(years),1,inplace=True)
for year in years:
dataframe2= dataframe2.rename(columns={year:new_column+"_"+year})
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def transforme_for_scatterplot(dataframe):
df1 = dataframe.reset_index()
df11 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2020"]),1)
df12 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2025"]),1)
df13 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2030"]),1)
df11["year"] = "students_number_2020"
df12["year"] = "students_number_2025"
df13["year"] = "students_number_2030"
df11.rename(columns={"prediction_new_students_2020":"students_number"},inplace=True)
df12.rename(columns={"prediction_new_students_2025":"students_number"},inplace=True)
df13.rename(columns={"prediction_new_students_2030":"students_number"},inplace=True)
return pd.concat([df11,df12,df13])
def create_list_for_scatterplot(begin,end,time):
row_list = []
for mult in range(3):
for num in range(begin-1,end):
row_list.append(int(mult*time+num))
return row_list
def scatterplot_student_number(dataframe,title,mini,maxi):
size_df = int(len(dataframe)/3)
ax = plt.axes()
plt.title("Students number prediction in thousand")
fig = sns.scatterplot(data=dataframe.iloc[create_list_for_scatterplot(mini,maxi,size_df)], x="Country Code", y="students_number",hue="year")
def create_list_for_scatterplot2(begin,end):
row_list = []
for num in range(begin-1,end):
for mult in range(3):
row_list.append(int(mult+3*num))
return row_list
def display_potential_years_study(dataframe_study_year,final_df,mini,maxi):
final_df2 = final_df.copy()
final_df2["potential"] = final_df2["students_number"]
final_df3 = final_df2.merge(dataframe_study_year,left_on="Country Code",right_on=dataframe_study_year.index)
final_df3["potential"] = final_df3["potential"]*final_df3["study_years_expected"]
final_df3 = final_df3.replace("students_number_2020","potential_2020")
final_df3 = final_df3.replace("students_number_2025","potential_2025")
final_df3 = final_df3.replace("students_number_2030","potential_2030")
final_df3 = final_df3.sort_values(by=["potential"],ascending=False)
size_df = int(len(final_df3)/3)
ax = plt.axes()
plt.title("Countries potential")
fig = sns.scatterplot(data=final_df3.iloc[create_list_for_scatterplot2(mini,maxi)], x="Country Code", y="potential",hue="year") | dataframe2 = dataframe2.drop([code],axis = 0) | random_line_split |
annexe.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import plotly.graph_objs as go
import seaborn as sns
FICHIERS = ["EdStatsCountry.csv","EdStatsCountry-Series.csv","EdStatsData.csv","EdStatsFootNote.csv"
,"EdStatsSeries.csv"]
LOCALISATION ='F:/cour/OC/projet2/'
INDEX = ["secondary","tertiary","school|educationnal","student","inhabitant|household","population","technology|computer|internet"]
VALUES_NOT_WANTED = ["WLD","ARE","LMC","LIC","LMY","UMC","MIC","HIC","NOC","OEC","EUU","EAS","EAP","SAS","OED","ECS","LCN","LAC","LDC","SSF","SSA","ECA","MEA","NAC","HPC","MNA","EMU","ARB","IDN","ZAF"]
COLOR_LIST = ["pastel","muted","colorblind","deep","dark","bright"]
NOT_IN_STUDY_YEARS = ['FIN','NZL','ISL','AUT','SMR','CAN']
def open_csv(num_fichier,index_column="",column_list=""):
if index_column == "" and column_list == "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier])
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column != "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column,names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
elif index_column == "" and column_list != "":
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],names=column_list)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
else:
fichier_lu = pd.read_csv(LOCALISATION + FICHIERS[num_fichier],index_col=index_column)
fichier_lu = fichier_lu.dropna(axis = 'columns', how = 'all')
print("\n",FICHIERS[num_fichier], " is open")
print("fichier avec ",fichier_lu.shape[0]," lignes et ",fichier_lu.shape[1]," colonnes.")
return fichier_lu
def print_empty_stats(dataframe):
data_number = dataframe.count(axis='index').cumsum().tolist()
pourcentage_values = int(data_number[-1]) / (int(dataframe.shape[0]) * int(dataframe.shape[1])) * 100
print("le dataframe est rempli à ",format(pourcentage_values,".2f"),"%\n")
def print_samples(data_frame,number_of_rows):
display(data_frame.sample(number_of_rows,random_state = 148625))
def index_str_contains(index,dataframe,regex_var):
new_index = index.str.contains(regex_var,case=False,regex=True,na=False)
return new_index
def indicator_name_list(dataframe):
index = dataframe['Indicator Name'].squeeze()
variable_list = []
for regex in INDEX:
index_temp = index_str_contains(index,dataframe,regex)
set_temp = set(dataframe[index_temp]['Indicator Name'].unique())
for variable in variable_list:
set_temp = set_temp - variable
print("Pour le regex ",regex," : ",len(set_temp)," variables de trouvé")
variable_list.append(set_temp)
return variable_list
def to_keep(dataframe,columns_to_keep):
reduct_dataframe = open_csv(dataframe)
for column in reduct_dataframe.columns:
if column not in columns_to_keep:
reduct_dataframe = reduct_dataframe.drop([column],axis = 1)
print("nouveau format du fichier : ",reduct_dataframe.shape[0]," lignes et ",reduct_dataframe.shape[1]," colonnes.")
return reduct_dataframe
def take_needed_rows(dataframe,list_values):
new_dataframe = pd.DataFrame([])
for value in list_values:
new_dataframe = pd.concat([new_dataframe,dataframe.loc[dataframe['Indicator Name'] == value]])
return new_dataframe
def replace_ESC(dataframe, value_or_number=0):
if value_or_number == 0:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],["High","Upper \nmiddle","Lower \nmiddle","High","Low"])
else:
new_dataframe = dataframe.replace(["High income: nonOECD","Upper middle income","Lower middle income","High income: OECD","Low income"],[5,4,2,5,1])
return new_dataframe
def sns_graph(fichierESC3):
sns.set(font_scale=5)
sns.set_theme(style="darkgrid")
ax = sns.countplot(x="Income Group",order = ["High","Upper \nmiddle","Lower \nmiddle","Low"],\
data = fichierESC3,palette=["tab:red","tab:orange","cornflowerblue","darkblue",]).\
set_title("Numbers of countries by income group")
def choropleth_map(dataframe,titre,index=False,year='2001',column='Income Group'):
if index:
countries = dataframe.index.tolist()
z = dataframe[year].tolist()
titre = titre + year
elif not index:
countries = dataframe['Country Code'].tolist()
z = dataframe[column].tolist()
layout = dict(geo={'scope': 'world'})
scl = [[0.0, 'darkblue'],[0.2, 'cornflowerblue'],[0.4, 'cornflowerblue'],\
[0.6, 'orange'],[0.8, 'orange'],[1.0, 'red']]
data = dict(
type='choropleth',
locations=countries,
locationmode='ISO-3',
colorscale=scl,
autocolorscale = False,
marker = dict(line = dict (color = 'rgb(0,0,0)', width = 1)),z=z)
map = go.Figure(data=[data], layout=layout)
map.update_layout(
title={
'text': titre,
'y':0.9,
'x':0.5,
'xanchor': 'center',
'yanchor': 'top'
},
title_font_size=30)
map.show()
def ratio_epuration(data_frame,ratio):
nb_lignes = data_frame.shape[0]
tab_isna = data_frame.isna().sum()
list_index = tab_isna.index.tolist()
df_epure = pd.DataFrame()
for index,value in enumerate(tab_isna):
if value <= nb_lignes * (1 - ratio):
df_epure[list_index[index]] = data_frame[list_index[index]]
return df_epure
def clean_data(dataframe,ratio):
dataframe2 = dataframe.replace(0,np.nan)
dataframe3 = dataframe2.dropna(axis = 'columns', how = 'all')
dataframe4 = ratio_epuration(dataframe3,ratio)
return dataframe4
def create_range(dataframe,quantity_print,which_one=-1):
if quantity_print == 1:
if which_one == 0:
return [dataframe.columns[0]]
elif which_one == -1:
return [dataframe.columns[-1]]
else:
try:
dataframe[str(which_one)]
except ValueError:
print("Non valid column")
else:
last_elem = int(dataframe.columns[-1])
column_nbr = int(len(dataframe.columns))
if column_nbr % (quantity_print - 1) == 0:
range_step = int(column_nbr / quantity_print)
else:
range_step = int(column_nbr / (quantity_print - 1))
begin_year = last_elem
for step in range(quantity_print-1):
begin_year -= range_step
return range(begin_year,last_elem+1,range_step)
def most_filled_column(dataframe):
mini = dataframe[dataframe.columns[-1]].isna().sum()
column_mini = dataframe.columns[-1]
for column in reversed(dataframe.columns):
isna_sum = dataframe[column].isna().sum()
if mini > isna_sum:
mini = isna_sum
column_mini = column
return column_mini
def fill_dataframe(dataframe):
return dataframe.replace(0,np.nan).transpose().fillna(method='ffill').transpose()
def sort_dataframe(dataframe,sort_year=''):
dataframe2 = fill_dataframe(dataframe)
if sort_year=='':
best_column_to_sort = most_filled_column(dataframe)
else:
best_column_to_sort = sort_year
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
dataframe2 = dataframe2.sort_values(by=[best_column_to_sort],ascending =False)
return dataframe2
def print_top_values(dataframe,title,value1,value2,sort_year=''):
dataframe2 = sort_dataframe(dataframe,sort_year)
if value1 == 0:
dataframe3 = dataframe2.head(value2).transpose()
title = "Top " + str(value2) + " " + title
else:
dataframe3 = dataframe2.head(value2).tail(value2 - value1 + 1).transpose()
title = "Top " + str(value1) + " to " + str(value2) + " " + title
lines = dataframe3.plot.line().set_title(title)
def last_value(dataframe,new_column):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2[new_column] = np.nan
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference([new_column]),1,inplace=True)
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def rank_dataframe(dataframe,new_column):
dataframe2 = last_value(dataframe,new_column)
dataframe2 = dataframe2.sort_values(by=new_column,ascending=False)
maxi = float(dataframe2.iloc[0])
part = maxi/4
part2 = part
part3 = part*2
part4 = part*3
for row in range(dataframe2.shape[0]):
if float(dataframe2.iloc[row]) < part2:
dataframe2.iloc[row] = int(1)
elif float(dataframe2.iloc[row]) < part3:
dataframe2.iloc[row] = int(2)
elif float(dataframe2.iloc[row]) < part4:
dataframe2.iloc[row] = int(3)
else:
dataframe2.iloc[row] = int(4)
return dataframe2.astype(int)
def horizontal_bar_plot_tri(dataframe):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=["Income Group","Internet","Computer"],ascending=False)
sns.set_color_codes("colorblind")
sns.barplot(x="Computer2", y="Country Code", data=dataframe2,label="Computer owner rank", color="b")
sns.set_color_codes("muted")
sns.barplot(x="Internet2", y="Country Code", data=dataframe2,label="Internet user rank", color="b")
sns.set_color_codes("pastel")
sns.barplot(x="Income Group", y="Country Code", data=dataframe2,label="Income Group rank", color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
plt.xlabel( xlabel="Scoring by country",fontsize=18)
ax.set(xlim=(0, 15), ylabel="", xlabel="Scoring by country")
sns.despine(left=True, bottom=True)
def horizontal_bar_plot_mono(dataframe,sort_by,title,xmin,xmax):
sns.set_theme(style="whitegrid")
f, ax = plt.subplots(figsize=(6, 15))
dataframe2 = dataframe.sort_values(by=[sort_by],ascending=False)
sns.set_color_codes("pastel")
sns.barplot(x=sort_by, y=dataframe2.index, data=dataframe2,label=sort_by, color="b")
ax.legend(ncol=1, loc="lower right", frameon=True,fontsize='large')
ax.xaxis.tick_top()
if title == "Study years in selected countries":
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(value),fontsize=15)
else:
for i,value in enumerate(dataframe2[sort_by]):
ax.text(value+3/xmin, i + 0.2,str(int(value)),fontsize=15)
plt.xlabel( xlabel=title,fontsize=18)
ax.set(xlim=(xmin, xmax), ylabel="", xlabel=title)
sns.despine(left=True, bottom=True)
def top_countries_with_data(dataframe):
da | def potential_years_study(dataframe1,dataframe2,selected_countries):
dataframe = dataframe1.join(dataframe2,how='outer')
dataframe.fillna(1,inplace=True)
multiple_row = len(dataframe2.columns)
new_col_list = []
if multiple_row>1:
for column in range(len(dataframe2.columns)):
new_col = "potential_"+dataframe2.columns[column][-4:]
new_col_list.append(new_col)
dataframe[new_col] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[column +1]]
else:
dataframe["potential"] = dataframe[dataframe.columns[0]] * dataframe[dataframe.columns[1]]
dataframe = dataframe.loc[selected_countries,:]
if multiple_row>1:
return dataframe.sort_values(by=[new_col_list[0]],ascending=False)
else:
return dataframe.sort_values(by=['potential'],ascending=False)
def take_value(dataframe,new_column,years):
dataframe2= dataframe.copy()
dataframe2.dropna(axis = 'columns', how = 'all',inplace=True)
dataframe2 = dataframe2.replace(0,np.nan)
dataframe2.transpose().fillna(method='ffill',inplace=True)
dataframe2.drop(dataframe2.columns.difference(years),1,inplace=True)
for year in years:
dataframe2= dataframe2.rename(columns={year:new_column+"_"+year})
for code in VALUES_NOT_WANTED:
try:
dataframe2 = dataframe2.drop([code],axis = 0)
except:
pass
return dataframe2
def transforme_for_scatterplot(dataframe):
df1 = dataframe.reset_index()
df11 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2020"]),1)
df12 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2025"]),1)
df13 = df1.drop(df1.columns.difference(["Country Code","prediction_new_students_2030"]),1)
df11["year"] = "students_number_2020"
df12["year"] = "students_number_2025"
df13["year"] = "students_number_2030"
df11.rename(columns={"prediction_new_students_2020":"students_number"},inplace=True)
df12.rename(columns={"prediction_new_students_2025":"students_number"},inplace=True)
df13.rename(columns={"prediction_new_students_2030":"students_number"},inplace=True)
return pd.concat([df11,df12,df13])
def create_list_for_scatterplot(begin,end,time):
row_list = []
for mult in range(3):
for num in range(begin-1,end):
row_list.append(int(mult*time+num))
return row_list
def scatterplot_student_number(dataframe,title,mini,maxi):
size_df = int(len(dataframe)/3)
ax = plt.axes()
plt.title("Students number prediction in thousand")
fig = sns.scatterplot(data=dataframe.iloc[create_list_for_scatterplot(mini,maxi,size_df)], x="Country Code", y="students_number",hue="year")
def create_list_for_scatterplot2(begin,end):
row_list = []
for num in range(begin-1,end):
for mult in range(3):
row_list.append(int(mult+3*num))
return row_list
def display_potential_years_study(dataframe_study_year,final_df,mini,maxi):
final_df2 = final_df.copy()
final_df2["potential"] = final_df2["students_number"]
final_df3 = final_df2.merge(dataframe_study_year,left_on="Country Code",right_on=dataframe_study_year.index)
final_df3["potential"] = final_df3["potential"]*final_df3["study_years_expected"]
final_df3 = final_df3.replace("students_number_2020","potential_2020")
final_df3 = final_df3.replace("students_number_2025","potential_2025")
final_df3 = final_df3.replace("students_number_2030","potential_2030")
final_df3 = final_df3.sort_values(by=["potential"],ascending=False)
size_df = int(len(final_df3)/3)
ax = plt.axes()
plt.title("Countries potential")
fig = sns.scatterplot(data=final_df3.iloc[create_list_for_scatterplot2(mini,maxi)], x="Country Code", y="potential",hue="year") | taframe2 = dataframe.copy()
for country in NOT_IN_STUDY_YEARS:
dataframe2.drop(dataframe2[dataframe2["Country Code"] == country].index,inplace =True)
return dataframe2
| identifier_body |
node.go | // Copyright 2020 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package node defines the concept of a Bee node
// by bootstrapping and injecting all necessary
// dependencies.
package node
import (
"context"
"crypto/ecdsa"
"encoding/hex"
"errors"
"fmt"
"io"
"log"
"math/big"
"net"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethersphere/bee/pkg/accounting"
"github.com/ethersphere/bee/pkg/addressbook"
"github.com/ethersphere/bee/pkg/api"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/debugapi"
"github.com/ethersphere/bee/pkg/feeds/factory"
"github.com/ethersphere/bee/pkg/hive"
"github.com/ethersphere/bee/pkg/localstore"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/metrics"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/p2p"
"github.com/ethersphere/bee/pkg/p2p/libp2p"
"github.com/ethersphere/bee/pkg/pingpong"
"github.com/ethersphere/bee/pkg/pinning"
"github.com/ethersphere/bee/pkg/postage"
"github.com/ethersphere/bee/pkg/postage/batchservice"
"github.com/ethersphere/bee/pkg/postage/batchstore"
"github.com/ethersphere/bee/pkg/postage/listener"
"github.com/ethersphere/bee/pkg/postage/postagecontract"
"github.com/ethersphere/bee/pkg/pricer"
"github.com/ethersphere/bee/pkg/pricing"
"github.com/ethersphere/bee/pkg/pss"
"github.com/ethersphere/bee/pkg/puller"
"github.com/ethersphere/bee/pkg/pullsync"
"github.com/ethersphere/bee/pkg/pullsync/pullstorage"
"github.com/ethersphere/bee/pkg/pusher"
"github.com/ethersphere/bee/pkg/pushsync"
"github.com/ethersphere/bee/pkg/recovery"
"github.com/ethersphere/bee/pkg/resolver/multiresolver"
"github.com/ethersphere/bee/pkg/retrieval"
"github.com/ethersphere/bee/pkg/settlement/pseudosettle"
"github.com/ethersphere/bee/pkg/settlement/swap"
"github.com/ethersphere/bee/pkg/settlement/swap/chequebook"
"github.com/ethersphere/bee/pkg/settlement/swap/priceoracle"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/bee/pkg/steward"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/tags"
"github.com/ethersphere/bee/pkg/topology"
"github.com/ethersphere/bee/pkg/topology/kademlia"
"github.com/ethersphere/bee/pkg/topology/lightnode"
"github.com/ethersphere/bee/pkg/tracing"
"github.com/ethersphere/bee/pkg/transaction"
"github.com/ethersphere/bee/pkg/traversal"
"github.com/hashicorp/go-multierror"
ma "github.com/multiformats/go-multiaddr"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
type Bee struct {
p2pService io.Closer
p2pHalter p2p.Halter
p2pCancel context.CancelFunc
apiCloser io.Closer
apiServer *http.Server
debugAPIServer *http.Server
resolverCloser io.Closer
errorLogWriter *io.PipeWriter
tracerCloser io.Closer
tagsCloser io.Closer
stateStoreCloser io.Closer
localstoreCloser io.Closer
topologyCloser io.Closer
topologyHalter topology.Halter
pusherCloser io.Closer
pullerCloser io.Closer
accountingCloser io.Closer
pullSyncCloser io.Closer
pssCloser io.Closer
ethClientCloser func()
transactionMonitorCloser io.Closer
transactionCloser io.Closer
recoveryHandleCleanup func()
listenerCloser io.Closer
postageServiceCloser io.Closer
priceOracleCloser io.Closer
shutdownInProgress bool
shutdownMutex sync.Mutex
}
type Options struct {
DataDir string
CacheCapacity uint64
DBOpenFilesLimit uint64
DBWriteBufferSize uint64
DBBlockCacheCapacity uint64
DBDisableSeeksCompaction bool
APIAddr string
DebugAPIAddr string
Addr string
NATAddr string
EnableWS bool
EnableQUIC bool
WelcomeMessage string
Bootnodes []string
CORSAllowedOrigins []string
Logger logging.Logger
Standalone bool
TracingEnabled bool
TracingEndpoint string
TracingServiceName string
GlobalPinningEnabled bool
PaymentThreshold string
PaymentTolerance string
PaymentEarly string
ResolverConnectionCfgs []multiresolver.ConnectionConfig
GatewayMode bool
BootnodeMode bool
SwapEndpoint string
SwapFactoryAddress string
SwapLegacyFactoryAddresses []string
SwapInitialDeposit string
SwapEnable bool
FullNodeMode bool
Transaction string
PostageContractAddress string
PriceOracleAddress string
BlockTime uint64
DeployGasPrice string
WarmupTime time.Duration
}
const (
refreshRate = int64(6000000)
basePrice = 10000
)
func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey, signer crypto.Signer, networkID uint64, logger logging.Logger, libp2pPrivateKey, pssPrivateKey *ecdsa.PrivateKey, o Options) (b *Bee, err error) |
func (b *Bee) Shutdown(ctx context.Context) error {
var mErr error
// if a shutdown is already in process, return here
b.shutdownMutex.Lock()
if b.shutdownInProgress {
b.shutdownMutex.Unlock()
return ErrShutdownInProgress
}
b.shutdownInProgress = true
b.shutdownMutex.Unlock()
// halt kademlia while shutting down other
// components.
b.topologyHalter.Halt()
// halt p2p layer from accepting new connections
// while shutting down other components
b.p2pHalter.Halt()
// tryClose is a convenient closure which decrease
// repetitive io.Closer tryClose procedure.
tryClose := func(c io.Closer, errMsg string) {
if c == nil {
return
}
if err := c.Close(); err != nil {
mErr = multierror.Append(mErr, fmt.Errorf("%s: %w", errMsg, err))
}
}
tryClose(b.apiCloser, "api")
var eg errgroup.Group
if b.apiServer != nil {
eg.Go(func() error {
if err := b.apiServer.Shutdown(ctx); err != nil {
return fmt.Errorf("api server: %w", err)
}
return nil
})
}
if b.debugAPIServer != nil {
eg.Go(func() error {
if err := b.debugAPIServer.Shutdown(ctx); err != nil {
return fmt.Errorf("debug api server: %w", err)
}
return nil
})
}
if err := eg.Wait(); err != nil {
mErr = multierror.Append(mErr, err)
}
if b.recoveryHandleCleanup != nil {
b.recoveryHandleCleanup()
}
var wg sync.WaitGroup
wg.Add(5)
go func() {
defer wg.Done()
tryClose(b.pssCloser, "pss")
}()
go func() {
defer wg.Done()
tryClose(b.pusherCloser, "pusher")
}()
go func() {
defer wg.Done()
tryClose(b.pullerCloser, "puller")
}()
go func() {
defer wg.Done()
tryClose(b.accountingCloser, "accounting")
}()
b.p2pCancel()
go func() {
defer wg.Done()
tryClose(b.pullSyncCloser, "pull sync")
}()
wg.Wait()
tryClose(b.p2pService, "p2p server")
tryClose(b.priceOracleCloser, "price oracle service")
wg.Add(3)
go func() {
defer wg.Done()
tryClose(b.transactionMonitorCloser, "transaction monitor")
tryClose(b.transactionCloser, "transaction")
}()
go func() {
defer wg.Done()
tryClose(b.listenerCloser, "listener")
}()
go func() {
defer wg.Done()
tryClose(b.postageServiceCloser, "postage service")
}()
wg.Wait()
if c := b.ethClientCloser; c != nil {
c()
}
tryClose(b.tracerCloser, "tracer")
tryClose(b.tagsCloser, "tag persistence")
tryClose(b.topologyCloser, "topology driver")
tryClose(b.stateStoreCloser, "statestore")
tryClose(b.localstoreCloser, "localstore")
tryClose(b.errorLogWriter, "error log writer")
tryClose(b.resolverCloser, "resolver service")
return mErr
}
func getTxHash(stateStore storage.StateStorer, logger logging.Logger, o Options) ([]byte, error) {
if o.Standalone {
return nil, nil // in standalone mode tx hash is not used
}
if o.Transaction != "" {
txHashTrimmed := strings.TrimPrefix(o.Transaction, "0x")
if len(txHashTrimmed) != 64 {
return nil, errors.New("invalid length")
}
txHash, err := hex.DecodeString(txHashTrimmed)
if err != nil {
return nil, err
}
logger.Infof("using the provided transaction hash %x", txHash)
return txHash, nil
}
var txHash common.Hash
key := chequebook.ChequebookDeploymentKey
if err := stateStore.Get(key, &txHash); err != nil {
if errors.Is(err, storage.ErrNotFound) {
return nil, errors.New("chequebook deployment transaction hash not found. Please specify the transaction hash manually.")
}
return nil, err
}
logger.Infof("using the chequebook transaction hash %x", txHash)
return txHash.Bytes(), nil
}
// pidKiller is used to issue a forced shut down of the node from sub modules. The issue with using the
// node's Shutdown method is that it only shuts down the node and does not exit the start process
// which is waiting on the os.Signals. This is not desirable, but currently bee node cannot handle
// rate-limiting blockchain API calls properly. We will shut down the node in this case to allow the
// user to rectify the API issues (by adjusting limits or using a different one). There is no platform
// agnostic way to trigger os.Signals in go unfortunately. Which is why we will use the process.Kill
// approach which works on windows as well.
type pidKiller struct {
node *Bee
}
var ErrShutdownInProgress error = errors.New("shutdown in progress")
func (p *pidKiller) Shutdown(ctx context.Context) error {
err := p.node.Shutdown(ctx)
if err != nil {
return err
}
ps, err := os.FindProcess(syscall.Getpid())
if err != nil {
return err
}
return ps.Kill()
}
| {
tracer, tracerCloser, err := tracing.NewTracer(&tracing.Options{
Enabled: o.TracingEnabled,
Endpoint: o.TracingEndpoint,
ServiceName: o.TracingServiceName,
})
if err != nil {
return nil, fmt.Errorf("tracer: %w", err)
}
p2pCtx, p2pCancel := context.WithCancel(context.Background())
defer func() {
// if there's been an error on this function
// we'd like to cancel the p2p context so that
// incoming connections will not be possible
if err != nil {
p2pCancel()
}
}()
// light nodes have zero warmup time for pull/pushsync protocols
warmupTime := o.WarmupTime
if !o.FullNodeMode {
warmupTime = 0
}
b = &Bee{
p2pCancel: p2pCancel,
errorLogWriter: logger.WriterLevel(logrus.ErrorLevel),
tracerCloser: tracerCloser,
}
var debugAPIService *debugapi.Service
if o.DebugAPIAddr != "" {
overlayEthAddress, err := signer.EthereumAddress()
if err != nil {
return nil, fmt.Errorf("eth address: %w", err)
}
// set up basic debug api endpoints for debugging and /health endpoint
debugAPIService = debugapi.New(swarmAddress, publicKey, pssPrivateKey.PublicKey, overlayEthAddress, logger, tracer, o.CORSAllowedOrigins)
debugAPIListener, err := net.Listen("tcp", o.DebugAPIAddr)
if err != nil {
return nil, fmt.Errorf("debug api listener: %w", err)
}
debugAPIServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: debugAPIService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("debug api address: %s", debugAPIListener.Addr())
if err := debugAPIServer.Serve(debugAPIListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("debug api server: %v", err)
logger.Error("unable to serve debug api")
}
}()
b.debugAPIServer = debugAPIServer
}
stateStore, err := InitStateStore(logger, o.DataDir)
if err != nil {
return nil, err
}
b.stateStoreCloser = stateStore
err = CheckOverlayWithStore(swarmAddress, stateStore)
if err != nil {
return nil, err
}
addressbook := addressbook.New(stateStore)
var (
swapBackend *ethclient.Client
overlayEthAddress common.Address
chainID int64
transactionService transaction.Service
transactionMonitor transaction.Monitor
chequebookFactory chequebook.Factory
chequebookService chequebook.Service
chequeStore chequebook.ChequeStore
cashoutService chequebook.CashoutService
)
if !o.Standalone {
swapBackend, overlayEthAddress, chainID, transactionMonitor, transactionService, err = InitChain(
p2pCtx,
logger,
stateStore,
o.SwapEndpoint,
signer,
o.BlockTime,
)
if err != nil {
return nil, fmt.Errorf("init chain: %w", err)
}
b.ethClientCloser = swapBackend.Close
b.transactionCloser = tracerCloser
b.transactionMonitorCloser = transactionMonitor
}
if o.SwapEnable {
chequebookFactory, err = InitChequebookFactory(
logger,
swapBackend,
chainID,
transactionService,
o.SwapFactoryAddress,
o.SwapLegacyFactoryAddresses,
)
if err != nil {
return nil, err
}
if err = chequebookFactory.VerifyBytecode(p2pCtx); err != nil {
return nil, fmt.Errorf("factory fail: %w", err)
}
chequebookService, err = InitChequebookService(
p2pCtx,
logger,
stateStore,
signer,
chainID,
swapBackend,
overlayEthAddress,
transactionService,
chequebookFactory,
o.SwapInitialDeposit,
o.DeployGasPrice,
)
if err != nil {
return nil, err
}
chequeStore, cashoutService = initChequeStoreCashout(
stateStore,
swapBackend,
chequebookFactory,
chainID,
overlayEthAddress,
transactionService,
)
}
lightNodes := lightnode.NewContainer(swarmAddress)
txHash, err := getTxHash(stateStore, logger, o)
if err != nil {
return nil, fmt.Errorf("invalid transaction hash: %w", err)
}
senderMatcher := transaction.NewMatcher(swapBackend, types.NewEIP155Signer(big.NewInt(chainID)))
p2ps, err := libp2p.New(p2pCtx, signer, networkID, swarmAddress, addr, addressbook, stateStore, lightNodes, senderMatcher, logger, tracer, libp2p.Options{
PrivateKey: libp2pPrivateKey,
NATAddr: o.NATAddr,
EnableWS: o.EnableWS,
EnableQUIC: o.EnableQUIC,
Standalone: o.Standalone,
WelcomeMessage: o.WelcomeMessage,
FullNode: o.FullNodeMode,
Transaction: txHash,
})
if err != nil {
return nil, fmt.Errorf("p2p service: %w", err)
}
b.p2pService = p2ps
b.p2pHalter = p2ps
// localstore depends on batchstore
var path string
if o.DataDir != "" {
logger.Infof("using datadir in: '%s'", o.DataDir)
path = filepath.Join(o.DataDir, "localstore")
}
lo := &localstore.Options{
Capacity: o.CacheCapacity,
OpenFilesLimit: o.DBOpenFilesLimit,
BlockCacheCapacity: o.DBBlockCacheCapacity,
WriteBufferSize: o.DBWriteBufferSize,
DisableSeeksCompaction: o.DBDisableSeeksCompaction,
}
storer, err := localstore.New(path, swarmAddress.Bytes(), stateStore, lo, logger)
if err != nil {
return nil, fmt.Errorf("localstore: %w", err)
}
b.localstoreCloser = storer
batchStore, err := batchstore.New(stateStore, storer.UnreserveBatch)
if err != nil {
return nil, fmt.Errorf("batchstore: %w", err)
}
validStamp := postage.ValidStamp(batchStore)
post, err := postage.NewService(stateStore, batchStore, chainID)
if err != nil {
return nil, fmt.Errorf("postage service load: %w", err)
}
b.postageServiceCloser = post
var (
postageContractService postagecontract.Interface
batchSvc postage.EventUpdater
eventListener postage.Listener
)
var postageSyncStart uint64 = 0
if !o.Standalone {
postageContractAddress, startBlock, found := listener.DiscoverAddresses(chainID)
if o.PostageContractAddress != "" {
if !common.IsHexAddress(o.PostageContractAddress) {
return nil, errors.New("malformed postage stamp address")
}
postageContractAddress = common.HexToAddress(o.PostageContractAddress)
} else if !found {
return nil, errors.New("no known postage stamp addresses for this network")
}
if found {
postageSyncStart = startBlock
}
eventListener = listener.New(logger, swapBackend, postageContractAddress, o.BlockTime, &pidKiller{node: b})
b.listenerCloser = eventListener
batchSvc = batchservice.New(stateStore, batchStore, logger, eventListener, overlayEthAddress.Bytes(), post)
erc20Address, err := postagecontract.LookupERC20Address(p2pCtx, transactionService, postageContractAddress)
if err != nil {
return nil, err
}
postageContractService = postagecontract.New(
overlayEthAddress,
postageContractAddress,
erc20Address,
transactionService,
post,
)
}
if !o.Standalone {
if natManager := p2ps.NATManager(); natManager != nil {
// wait for nat manager to init
logger.Debug("initializing NAT manager")
select {
case <-natManager.Ready():
// this is magic sleep to give NAT time to sync the mappings
// this is a hack, kind of alchemy and should be improved
time.Sleep(3 * time.Second)
logger.Debug("NAT manager initialized")
case <-time.After(10 * time.Second):
logger.Warning("NAT manager init timeout")
}
}
}
// Construct protocols.
pingPong := pingpong.New(p2ps, logger, tracer)
if err = p2ps.AddProtocol(pingPong.Protocol()); err != nil {
return nil, fmt.Errorf("pingpong service: %w", err)
}
hive := hive.New(p2ps, addressbook, networkID, logger)
if err = p2ps.AddProtocol(hive.Protocol()); err != nil {
return nil, fmt.Errorf("hive service: %w", err)
}
var bootnodes []ma.Multiaddr
if o.Standalone {
logger.Info("Starting node in standalone mode, no p2p connections will be made or accepted")
} else {
for _, a := range o.Bootnodes {
addr, err := ma.NewMultiaddr(a)
if err != nil {
logger.Debugf("multiaddress fail %s: %v", a, err)
logger.Warningf("invalid bootnode address %s", a)
continue
}
bootnodes = append(bootnodes, addr)
}
}
var swapService *swap.Service
metricsDB, err := shed.NewDBWrap(stateStore.DB())
if err != nil {
return nil, fmt.Errorf("unable to create metrics storage for kademlia: %w", err)
}
kad := kademlia.New(swarmAddress, addressbook, hive, p2ps, metricsDB, logger, kademlia.Options{Bootnodes: bootnodes, StandaloneMode: o.Standalone, BootnodeMode: o.BootnodeMode})
b.topologyCloser = kad
b.topologyHalter = kad
hive.SetAddPeersHandler(kad.AddPeers)
p2ps.SetPickyNotifier(kad)
batchStore.SetRadiusSetter(kad)
if batchSvc != nil {
syncedChan, err := batchSvc.Start(postageSyncStart)
if err != nil {
return nil, fmt.Errorf("unable to start batch service: %w", err)
}
// wait for the postage contract listener to sync
logger.Info("waiting to sync postage contract data, this may take a while... more info available in Debug loglevel")
// arguably this is not a very nice solution since we dont support
// interrupts at this stage of the application lifecycle. some changes
// would be needed on the cmd level to support context cancellation at
// this stage
<-syncedChan
}
minThreshold := big.NewInt(2 * refreshRate)
paymentThreshold, ok := new(big.Int).SetString(o.PaymentThreshold, 10)
if !ok {
return nil, fmt.Errorf("invalid payment threshold: %s", paymentThreshold)
}
pricer := pricer.NewFixedPricer(swarmAddress, basePrice)
if paymentThreshold.Cmp(minThreshold) < 0 {
return nil, fmt.Errorf("payment threshold below minimum generally accepted value, need at least %s", minThreshold)
}
pricing := pricing.New(p2ps, logger, paymentThreshold, minThreshold)
if err = p2ps.AddProtocol(pricing.Protocol()); err != nil {
return nil, fmt.Errorf("pricing service: %w", err)
}
addrs, err := p2ps.Addresses()
if err != nil {
return nil, fmt.Errorf("get server addresses: %w", err)
}
for _, addr := range addrs {
logger.Debugf("p2p address: %s", addr)
}
paymentTolerance, ok := new(big.Int).SetString(o.PaymentTolerance, 10)
if !ok {
return nil, fmt.Errorf("invalid payment tolerance: %s", paymentTolerance)
}
paymentEarly, ok := new(big.Int).SetString(o.PaymentEarly, 10)
if !ok {
return nil, fmt.Errorf("invalid payment early: %s", paymentEarly)
}
acc, err := accounting.NewAccounting(
paymentThreshold,
paymentTolerance,
paymentEarly,
logger,
stateStore,
pricing,
big.NewInt(refreshRate),
p2ps,
)
if err != nil {
return nil, fmt.Errorf("accounting: %w", err)
}
b.accountingCloser = acc
pseudosettleService := pseudosettle.New(p2ps, logger, stateStore, acc, big.NewInt(refreshRate), p2ps)
if err = p2ps.AddProtocol(pseudosettleService.Protocol()); err != nil {
return nil, fmt.Errorf("pseudosettle service: %w", err)
}
acc.SetRefreshFunc(pseudosettleService.Pay)
if o.SwapEnable {
var priceOracle priceoracle.Service
swapService, priceOracle, err = InitSwap(
p2ps,
logger,
stateStore,
networkID,
overlayEthAddress,
chequebookService,
chequeStore,
cashoutService,
acc,
o.PriceOracleAddress,
chainID,
transactionService,
)
if err != nil {
return nil, err
}
b.priceOracleCloser = priceOracle
acc.SetPayFunc(swapService.Pay)
}
pricing.SetPaymentThresholdObserver(acc)
retrieve := retrieval.New(swarmAddress, storer, p2ps, kad, logger, acc, pricer, tracer)
tagService := tags.NewTags(stateStore, logger)
b.tagsCloser = tagService
pssService := pss.New(pssPrivateKey, logger)
b.pssCloser = pssService
var ns storage.Storer
if o.GlobalPinningEnabled {
// create recovery callback for content repair
recoverFunc := recovery.NewCallback(pssService)
ns = netstore.New(storer, validStamp, recoverFunc, retrieve, logger)
} else {
ns = netstore.New(storer, validStamp, nil, retrieve, logger)
}
traversalService := traversal.New(ns)
pinningService := pinning.NewService(storer, stateStore, traversalService)
pushSyncProtocol := pushsync.New(swarmAddress, p2ps, storer, kad, tagService, o.FullNodeMode, pssService.TryUnwrap, validStamp, logger, acc, pricer, signer, tracer, warmupTime)
// set the pushSyncer in the PSS
pssService.SetPushSyncer(pushSyncProtocol)
if o.GlobalPinningEnabled {
// register function for chunk repair upon receiving a trojan message
chunkRepairHandler := recovery.NewRepairHandler(ns, logger, pushSyncProtocol)
b.recoveryHandleCleanup = pssService.Register(recovery.Topic, chunkRepairHandler)
}
pusherService := pusher.New(networkID, storer, kad, pushSyncProtocol, tagService, logger, tracer, warmupTime)
b.pusherCloser = pusherService
pullStorage := pullstorage.New(storer)
pullSyncProtocol := pullsync.New(p2ps, pullStorage, pssService.TryUnwrap, validStamp, logger)
b.pullSyncCloser = pullSyncProtocol
var pullerService *puller.Puller
if o.FullNodeMode {
pullerService := puller.New(stateStore, kad, pullSyncProtocol, logger, puller.Options{}, warmupTime)
b.pullerCloser = pullerService
}
retrieveProtocolSpec := retrieve.Protocol()
pushSyncProtocolSpec := pushSyncProtocol.Protocol()
pullSyncProtocolSpec := pullSyncProtocol.Protocol()
if o.FullNodeMode {
logger.Info("starting in full mode")
} else {
logger.Info("starting in light mode")
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, retrieveProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pushSyncProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pullSyncProtocolSpec)
}
if err = p2ps.AddProtocol(retrieveProtocolSpec); err != nil {
return nil, fmt.Errorf("retrieval service: %w", err)
}
if err = p2ps.AddProtocol(pushSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pushsync service: %w", err)
}
if err = p2ps.AddProtocol(pullSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pullsync protocol: %w", err)
}
multiResolver := multiresolver.NewMultiResolver(
multiresolver.WithConnectionConfigs(o.ResolverConnectionCfgs),
multiresolver.WithLogger(o.Logger),
)
b.resolverCloser = multiResolver
var apiService api.Service
if o.APIAddr != "" {
// API server
feedFactory := factory.New(ns)
steward := steward.New(storer, traversalService, pushSyncProtocol)
apiService = api.New(tagService, ns, multiResolver, pssService, traversalService, pinningService, feedFactory, post, postageContractService, steward, signer, logger, tracer, api.Options{
CORSAllowedOrigins: o.CORSAllowedOrigins,
GatewayMode: o.GatewayMode,
WsPingPeriod: 60 * time.Second,
})
apiListener, err := net.Listen("tcp", o.APIAddr)
if err != nil {
return nil, fmt.Errorf("api listener: %w", err)
}
apiServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: apiService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("api address: %s", apiListener.Addr())
if err := apiServer.Serve(apiListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("api server: %v", err)
logger.Error("unable to serve api")
}
}()
b.apiServer = apiServer
b.apiCloser = apiService
}
if debugAPIService != nil {
// register metrics from components
debugAPIService.MustRegisterMetrics(p2ps.Metrics()...)
debugAPIService.MustRegisterMetrics(pingPong.Metrics()...)
debugAPIService.MustRegisterMetrics(acc.Metrics()...)
debugAPIService.MustRegisterMetrics(storer.Metrics()...)
debugAPIService.MustRegisterMetrics(kad.Metrics()...)
if pullerService != nil {
debugAPIService.MustRegisterMetrics(pullerService.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pushSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pusherService.Metrics()...)
debugAPIService.MustRegisterMetrics(pullSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pullStorage.Metrics()...)
debugAPIService.MustRegisterMetrics(retrieve.Metrics()...)
debugAPIService.MustRegisterMetrics(lightNodes.Metrics()...)
if bs, ok := batchStore.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(bs.Metrics()...)
}
if eventListener != nil {
if ls, ok := eventListener.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(ls.Metrics()...)
}
}
if pssServiceMetrics, ok := pssService.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(pssServiceMetrics.Metrics()...)
}
if apiService != nil {
debugAPIService.MustRegisterMetrics(apiService.Metrics()...)
}
if l, ok := logger.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(l.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pseudosettleService.Metrics()...)
if swapService != nil {
debugAPIService.MustRegisterMetrics(swapService.Metrics()...)
}
// inject dependencies and configure full debug api http path routes
debugAPIService.Configure(p2ps, pingPong, kad, lightNodes, storer, tagService, acc, pseudosettleService, o.SwapEnable, swapService, chequebookService, batchStore, transactionService)
}
if err := kad.Start(p2pCtx); err != nil {
return nil, err
}
p2ps.Ready()
return b, nil
} | identifier_body |
node.go | // Copyright 2020 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package node defines the concept of a Bee node
// by bootstrapping and injecting all necessary
// dependencies.
package node
import (
"context"
"crypto/ecdsa"
"encoding/hex"
"errors"
"fmt"
"io"
"log"
"math/big"
"net"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethersphere/bee/pkg/accounting"
"github.com/ethersphere/bee/pkg/addressbook"
"github.com/ethersphere/bee/pkg/api"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/debugapi"
"github.com/ethersphere/bee/pkg/feeds/factory"
"github.com/ethersphere/bee/pkg/hive"
"github.com/ethersphere/bee/pkg/localstore"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/metrics"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/p2p"
"github.com/ethersphere/bee/pkg/p2p/libp2p"
"github.com/ethersphere/bee/pkg/pingpong"
"github.com/ethersphere/bee/pkg/pinning"
"github.com/ethersphere/bee/pkg/postage"
"github.com/ethersphere/bee/pkg/postage/batchservice"
"github.com/ethersphere/bee/pkg/postage/batchstore"
"github.com/ethersphere/bee/pkg/postage/listener"
"github.com/ethersphere/bee/pkg/postage/postagecontract"
"github.com/ethersphere/bee/pkg/pricer"
"github.com/ethersphere/bee/pkg/pricing"
"github.com/ethersphere/bee/pkg/pss"
"github.com/ethersphere/bee/pkg/puller"
"github.com/ethersphere/bee/pkg/pullsync"
"github.com/ethersphere/bee/pkg/pullsync/pullstorage"
"github.com/ethersphere/bee/pkg/pusher"
"github.com/ethersphere/bee/pkg/pushsync"
"github.com/ethersphere/bee/pkg/recovery"
"github.com/ethersphere/bee/pkg/resolver/multiresolver"
"github.com/ethersphere/bee/pkg/retrieval"
"github.com/ethersphere/bee/pkg/settlement/pseudosettle"
"github.com/ethersphere/bee/pkg/settlement/swap"
"github.com/ethersphere/bee/pkg/settlement/swap/chequebook"
"github.com/ethersphere/bee/pkg/settlement/swap/priceoracle"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/bee/pkg/steward"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/tags"
"github.com/ethersphere/bee/pkg/topology"
"github.com/ethersphere/bee/pkg/topology/kademlia"
"github.com/ethersphere/bee/pkg/topology/lightnode"
"github.com/ethersphere/bee/pkg/tracing"
"github.com/ethersphere/bee/pkg/transaction"
"github.com/ethersphere/bee/pkg/traversal"
"github.com/hashicorp/go-multierror"
ma "github.com/multiformats/go-multiaddr"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
type Bee struct {
p2pService io.Closer
p2pHalter p2p.Halter
p2pCancel context.CancelFunc
apiCloser io.Closer
apiServer *http.Server
debugAPIServer *http.Server
resolverCloser io.Closer
errorLogWriter *io.PipeWriter
tracerCloser io.Closer
tagsCloser io.Closer
stateStoreCloser io.Closer
localstoreCloser io.Closer
topologyCloser io.Closer
topologyHalter topology.Halter
pusherCloser io.Closer
pullerCloser io.Closer
accountingCloser io.Closer
pullSyncCloser io.Closer
pssCloser io.Closer
ethClientCloser func()
transactionMonitorCloser io.Closer
transactionCloser io.Closer
recoveryHandleCleanup func()
listenerCloser io.Closer
postageServiceCloser io.Closer
priceOracleCloser io.Closer
shutdownInProgress bool
shutdownMutex sync.Mutex
}
type Options struct {
DataDir string
CacheCapacity uint64
DBOpenFilesLimit uint64
DBWriteBufferSize uint64
DBBlockCacheCapacity uint64
DBDisableSeeksCompaction bool
APIAddr string
DebugAPIAddr string
Addr string
NATAddr string
EnableWS bool
EnableQUIC bool
WelcomeMessage string
Bootnodes []string
CORSAllowedOrigins []string
Logger logging.Logger
Standalone bool
TracingEnabled bool
TracingEndpoint string
TracingServiceName string
GlobalPinningEnabled bool
PaymentThreshold string
PaymentTolerance string
PaymentEarly string
ResolverConnectionCfgs []multiresolver.ConnectionConfig
GatewayMode bool
BootnodeMode bool
SwapEndpoint string
SwapFactoryAddress string
SwapLegacyFactoryAddresses []string
SwapInitialDeposit string
SwapEnable bool
FullNodeMode bool
Transaction string
PostageContractAddress string
PriceOracleAddress string
BlockTime uint64
DeployGasPrice string
WarmupTime time.Duration
}
const (
refreshRate = int64(6000000)
basePrice = 10000
)
func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey, signer crypto.Signer, networkID uint64, logger logging.Logger, libp2pPrivateKey, pssPrivateKey *ecdsa.PrivateKey, o Options) (b *Bee, err error) {
tracer, tracerCloser, err := tracing.NewTracer(&tracing.Options{
Enabled: o.TracingEnabled,
Endpoint: o.TracingEndpoint,
ServiceName: o.TracingServiceName,
})
if err != nil {
return nil, fmt.Errorf("tracer: %w", err)
}
p2pCtx, p2pCancel := context.WithCancel(context.Background())
defer func() {
// if there's been an error on this function
// we'd like to cancel the p2p context so that
// incoming connections will not be possible
if err != nil {
p2pCancel()
}
}()
// light nodes have zero warmup time for pull/pushsync protocols
warmupTime := o.WarmupTime
if !o.FullNodeMode {
warmupTime = 0
}
b = &Bee{
p2pCancel: p2pCancel,
errorLogWriter: logger.WriterLevel(logrus.ErrorLevel),
tracerCloser: tracerCloser,
}
var debugAPIService *debugapi.Service
if o.DebugAPIAddr != "" {
overlayEthAddress, err := signer.EthereumAddress()
if err != nil {
return nil, fmt.Errorf("eth address: %w", err)
}
// set up basic debug api endpoints for debugging and /health endpoint
debugAPIService = debugapi.New(swarmAddress, publicKey, pssPrivateKey.PublicKey, overlayEthAddress, logger, tracer, o.CORSAllowedOrigins)
debugAPIListener, err := net.Listen("tcp", o.DebugAPIAddr)
if err != nil {
return nil, fmt.Errorf("debug api listener: %w", err)
}
debugAPIServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: debugAPIService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("debug api address: %s", debugAPIListener.Addr())
if err := debugAPIServer.Serve(debugAPIListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("debug api server: %v", err)
logger.Error("unable to serve debug api")
}
}()
b.debugAPIServer = debugAPIServer
}
stateStore, err := InitStateStore(logger, o.DataDir)
if err != nil {
return nil, err
}
b.stateStoreCloser = stateStore
err = CheckOverlayWithStore(swarmAddress, stateStore)
if err != nil {
return nil, err
}
addressbook := addressbook.New(stateStore)
var (
swapBackend *ethclient.Client
overlayEthAddress common.Address
chainID int64
transactionService transaction.Service
transactionMonitor transaction.Monitor
chequebookFactory chequebook.Factory
chequebookService chequebook.Service
chequeStore chequebook.ChequeStore
cashoutService chequebook.CashoutService
)
if !o.Standalone {
swapBackend, overlayEthAddress, chainID, transactionMonitor, transactionService, err = InitChain(
p2pCtx,
logger,
stateStore,
o.SwapEndpoint,
signer,
o.BlockTime,
)
if err != nil {
return nil, fmt.Errorf("init chain: %w", err)
}
b.ethClientCloser = swapBackend.Close
b.transactionCloser = tracerCloser
b.transactionMonitorCloser = transactionMonitor
}
if o.SwapEnable {
chequebookFactory, err = InitChequebookFactory(
logger,
swapBackend,
chainID,
transactionService,
o.SwapFactoryAddress,
o.SwapLegacyFactoryAddresses,
)
if err != nil {
return nil, err
}
if err = chequebookFactory.VerifyBytecode(p2pCtx); err != nil {
return nil, fmt.Errorf("factory fail: %w", err)
}
chequebookService, err = InitChequebookService(
p2pCtx,
logger,
stateStore,
signer,
chainID,
swapBackend,
overlayEthAddress,
transactionService,
chequebookFactory,
o.SwapInitialDeposit,
o.DeployGasPrice,
)
if err != nil {
return nil, err
}
chequeStore, cashoutService = initChequeStoreCashout(
stateStore,
swapBackend,
chequebookFactory,
chainID,
overlayEthAddress,
transactionService,
)
}
lightNodes := lightnode.NewContainer(swarmAddress)
txHash, err := getTxHash(stateStore, logger, o)
if err != nil {
return nil, fmt.Errorf("invalid transaction hash: %w", err)
}
senderMatcher := transaction.NewMatcher(swapBackend, types.NewEIP155Signer(big.NewInt(chainID)))
p2ps, err := libp2p.New(p2pCtx, signer, networkID, swarmAddress, addr, addressbook, stateStore, lightNodes, senderMatcher, logger, tracer, libp2p.Options{
PrivateKey: libp2pPrivateKey,
NATAddr: o.NATAddr,
EnableWS: o.EnableWS,
EnableQUIC: o.EnableQUIC,
Standalone: o.Standalone,
WelcomeMessage: o.WelcomeMessage,
FullNode: o.FullNodeMode,
Transaction: txHash,
})
if err != nil {
return nil, fmt.Errorf("p2p service: %w", err)
}
b.p2pService = p2ps
b.p2pHalter = p2ps
// localstore depends on batchstore
var path string
if o.DataDir != "" {
logger.Infof("using datadir in: '%s'", o.DataDir)
path = filepath.Join(o.DataDir, "localstore")
}
lo := &localstore.Options{
Capacity: o.CacheCapacity,
OpenFilesLimit: o.DBOpenFilesLimit,
BlockCacheCapacity: o.DBBlockCacheCapacity,
WriteBufferSize: o.DBWriteBufferSize,
DisableSeeksCompaction: o.DBDisableSeeksCompaction,
}
storer, err := localstore.New(path, swarmAddress.Bytes(), stateStore, lo, logger)
if err != nil {
return nil, fmt.Errorf("localstore: %w", err)
}
b.localstoreCloser = storer
batchStore, err := batchstore.New(stateStore, storer.UnreserveBatch)
if err != nil {
return nil, fmt.Errorf("batchstore: %w", err)
}
validStamp := postage.ValidStamp(batchStore)
post, err := postage.NewService(stateStore, batchStore, chainID)
if err != nil {
return nil, fmt.Errorf("postage service load: %w", err)
}
b.postageServiceCloser = post
var (
postageContractService postagecontract.Interface
batchSvc postage.EventUpdater
eventListener postage.Listener
)
var postageSyncStart uint64 = 0
if !o.Standalone {
postageContractAddress, startBlock, found := listener.DiscoverAddresses(chainID)
if o.PostageContractAddress != "" {
if !common.IsHexAddress(o.PostageContractAddress) {
return nil, errors.New("malformed postage stamp address")
}
postageContractAddress = common.HexToAddress(o.PostageContractAddress)
} else if !found {
return nil, errors.New("no known postage stamp addresses for this network")
}
if found {
postageSyncStart = startBlock
}
eventListener = listener.New(logger, swapBackend, postageContractAddress, o.BlockTime, &pidKiller{node: b})
b.listenerCloser = eventListener
batchSvc = batchservice.New(stateStore, batchStore, logger, eventListener, overlayEthAddress.Bytes(), post)
erc20Address, err := postagecontract.LookupERC20Address(p2pCtx, transactionService, postageContractAddress)
if err != nil {
return nil, err
}
postageContractService = postagecontract.New(
overlayEthAddress,
postageContractAddress,
erc20Address,
transactionService,
post,
)
}
if !o.Standalone {
if natManager := p2ps.NATManager(); natManager != nil {
// wait for nat manager to init
logger.Debug("initializing NAT manager")
select {
case <-natManager.Ready():
// this is magic sleep to give NAT time to sync the mappings
// this is a hack, kind of alchemy and should be improved
time.Sleep(3 * time.Second)
logger.Debug("NAT manager initialized")
case <-time.After(10 * time.Second):
logger.Warning("NAT manager init timeout")
}
}
}
// Construct protocols.
pingPong := pingpong.New(p2ps, logger, tracer)
if err = p2ps.AddProtocol(pingPong.Protocol()); err != nil {
return nil, fmt.Errorf("pingpong service: %w", err)
}
hive := hive.New(p2ps, addressbook, networkID, logger)
if err = p2ps.AddProtocol(hive.Protocol()); err != nil {
return nil, fmt.Errorf("hive service: %w", err)
}
var bootnodes []ma.Multiaddr
if o.Standalone {
logger.Info("Starting node in standalone mode, no p2p connections will be made or accepted")
} else {
for _, a := range o.Bootnodes {
addr, err := ma.NewMultiaddr(a)
if err != nil {
logger.Debugf("multiaddress fail %s: %v", a, err)
logger.Warningf("invalid bootnode address %s", a)
continue
}
bootnodes = append(bootnodes, addr)
}
}
var swapService *swap.Service
metricsDB, err := shed.NewDBWrap(stateStore.DB())
if err != nil {
return nil, fmt.Errorf("unable to create metrics storage for kademlia: %w", err)
}
kad := kademlia.New(swarmAddress, addressbook, hive, p2ps, metricsDB, logger, kademlia.Options{Bootnodes: bootnodes, StandaloneMode: o.Standalone, BootnodeMode: o.BootnodeMode})
b.topologyCloser = kad
b.topologyHalter = kad
hive.SetAddPeersHandler(kad.AddPeers)
p2ps.SetPickyNotifier(kad)
batchStore.SetRadiusSetter(kad)
if batchSvc != nil {
syncedChan, err := batchSvc.Start(postageSyncStart)
if err != nil {
return nil, fmt.Errorf("unable to start batch service: %w", err)
}
// wait for the postage contract listener to sync
logger.Info("waiting to sync postage contract data, this may take a while... more info available in Debug loglevel")
// arguably this is not a very nice solution since we dont support
// interrupts at this stage of the application lifecycle. some changes
// would be needed on the cmd level to support context cancellation at
// this stage
<-syncedChan
}
minThreshold := big.NewInt(2 * refreshRate)
paymentThreshold, ok := new(big.Int).SetString(o.PaymentThreshold, 10)
if !ok {
return nil, fmt.Errorf("invalid payment threshold: %s", paymentThreshold)
}
pricer := pricer.NewFixedPricer(swarmAddress, basePrice)
if paymentThreshold.Cmp(minThreshold) < 0 {
return nil, fmt.Errorf("payment threshold below minimum generally accepted value, need at least %s", minThreshold)
}
pricing := pricing.New(p2ps, logger, paymentThreshold, minThreshold)
if err = p2ps.AddProtocol(pricing.Protocol()); err != nil {
return nil, fmt.Errorf("pricing service: %w", err)
}
addrs, err := p2ps.Addresses()
if err != nil {
return nil, fmt.Errorf("get server addresses: %w", err)
}
for _, addr := range addrs {
logger.Debugf("p2p address: %s", addr)
}
paymentTolerance, ok := new(big.Int).SetString(o.PaymentTolerance, 10)
if !ok |
paymentEarly, ok := new(big.Int).SetString(o.PaymentEarly, 10)
if !ok {
return nil, fmt.Errorf("invalid payment early: %s", paymentEarly)
}
acc, err := accounting.NewAccounting(
paymentThreshold,
paymentTolerance,
paymentEarly,
logger,
stateStore,
pricing,
big.NewInt(refreshRate),
p2ps,
)
if err != nil {
return nil, fmt.Errorf("accounting: %w", err)
}
b.accountingCloser = acc
pseudosettleService := pseudosettle.New(p2ps, logger, stateStore, acc, big.NewInt(refreshRate), p2ps)
if err = p2ps.AddProtocol(pseudosettleService.Protocol()); err != nil {
return nil, fmt.Errorf("pseudosettle service: %w", err)
}
acc.SetRefreshFunc(pseudosettleService.Pay)
if o.SwapEnable {
var priceOracle priceoracle.Service
swapService, priceOracle, err = InitSwap(
p2ps,
logger,
stateStore,
networkID,
overlayEthAddress,
chequebookService,
chequeStore,
cashoutService,
acc,
o.PriceOracleAddress,
chainID,
transactionService,
)
if err != nil {
return nil, err
}
b.priceOracleCloser = priceOracle
acc.SetPayFunc(swapService.Pay)
}
pricing.SetPaymentThresholdObserver(acc)
retrieve := retrieval.New(swarmAddress, storer, p2ps, kad, logger, acc, pricer, tracer)
tagService := tags.NewTags(stateStore, logger)
b.tagsCloser = tagService
pssService := pss.New(pssPrivateKey, logger)
b.pssCloser = pssService
var ns storage.Storer
if o.GlobalPinningEnabled {
// create recovery callback for content repair
recoverFunc := recovery.NewCallback(pssService)
ns = netstore.New(storer, validStamp, recoverFunc, retrieve, logger)
} else {
ns = netstore.New(storer, validStamp, nil, retrieve, logger)
}
traversalService := traversal.New(ns)
pinningService := pinning.NewService(storer, stateStore, traversalService)
pushSyncProtocol := pushsync.New(swarmAddress, p2ps, storer, kad, tagService, o.FullNodeMode, pssService.TryUnwrap, validStamp, logger, acc, pricer, signer, tracer, warmupTime)
// set the pushSyncer in the PSS
pssService.SetPushSyncer(pushSyncProtocol)
if o.GlobalPinningEnabled {
// register function for chunk repair upon receiving a trojan message
chunkRepairHandler := recovery.NewRepairHandler(ns, logger, pushSyncProtocol)
b.recoveryHandleCleanup = pssService.Register(recovery.Topic, chunkRepairHandler)
}
pusherService := pusher.New(networkID, storer, kad, pushSyncProtocol, tagService, logger, tracer, warmupTime)
b.pusherCloser = pusherService
pullStorage := pullstorage.New(storer)
pullSyncProtocol := pullsync.New(p2ps, pullStorage, pssService.TryUnwrap, validStamp, logger)
b.pullSyncCloser = pullSyncProtocol
var pullerService *puller.Puller
if o.FullNodeMode {
pullerService := puller.New(stateStore, kad, pullSyncProtocol, logger, puller.Options{}, warmupTime)
b.pullerCloser = pullerService
}
retrieveProtocolSpec := retrieve.Protocol()
pushSyncProtocolSpec := pushSyncProtocol.Protocol()
pullSyncProtocolSpec := pullSyncProtocol.Protocol()
if o.FullNodeMode {
logger.Info("starting in full mode")
} else {
logger.Info("starting in light mode")
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, retrieveProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pushSyncProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pullSyncProtocolSpec)
}
if err = p2ps.AddProtocol(retrieveProtocolSpec); err != nil {
return nil, fmt.Errorf("retrieval service: %w", err)
}
if err = p2ps.AddProtocol(pushSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pushsync service: %w", err)
}
if err = p2ps.AddProtocol(pullSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pullsync protocol: %w", err)
}
multiResolver := multiresolver.NewMultiResolver(
multiresolver.WithConnectionConfigs(o.ResolverConnectionCfgs),
multiresolver.WithLogger(o.Logger),
)
b.resolverCloser = multiResolver
var apiService api.Service
if o.APIAddr != "" {
// API server
feedFactory := factory.New(ns)
steward := steward.New(storer, traversalService, pushSyncProtocol)
apiService = api.New(tagService, ns, multiResolver, pssService, traversalService, pinningService, feedFactory, post, postageContractService, steward, signer, logger, tracer, api.Options{
CORSAllowedOrigins: o.CORSAllowedOrigins,
GatewayMode: o.GatewayMode,
WsPingPeriod: 60 * time.Second,
})
apiListener, err := net.Listen("tcp", o.APIAddr)
if err != nil {
return nil, fmt.Errorf("api listener: %w", err)
}
apiServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: apiService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("api address: %s", apiListener.Addr())
if err := apiServer.Serve(apiListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("api server: %v", err)
logger.Error("unable to serve api")
}
}()
b.apiServer = apiServer
b.apiCloser = apiService
}
if debugAPIService != nil {
// register metrics from components
debugAPIService.MustRegisterMetrics(p2ps.Metrics()...)
debugAPIService.MustRegisterMetrics(pingPong.Metrics()...)
debugAPIService.MustRegisterMetrics(acc.Metrics()...)
debugAPIService.MustRegisterMetrics(storer.Metrics()...)
debugAPIService.MustRegisterMetrics(kad.Metrics()...)
if pullerService != nil {
debugAPIService.MustRegisterMetrics(pullerService.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pushSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pusherService.Metrics()...)
debugAPIService.MustRegisterMetrics(pullSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pullStorage.Metrics()...)
debugAPIService.MustRegisterMetrics(retrieve.Metrics()...)
debugAPIService.MustRegisterMetrics(lightNodes.Metrics()...)
if bs, ok := batchStore.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(bs.Metrics()...)
}
if eventListener != nil {
if ls, ok := eventListener.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(ls.Metrics()...)
}
}
if pssServiceMetrics, ok := pssService.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(pssServiceMetrics.Metrics()...)
}
if apiService != nil {
debugAPIService.MustRegisterMetrics(apiService.Metrics()...)
}
if l, ok := logger.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(l.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pseudosettleService.Metrics()...)
if swapService != nil {
debugAPIService.MustRegisterMetrics(swapService.Metrics()...)
}
// inject dependencies and configure full debug api http path routes
debugAPIService.Configure(p2ps, pingPong, kad, lightNodes, storer, tagService, acc, pseudosettleService, o.SwapEnable, swapService, chequebookService, batchStore, transactionService)
}
if err := kad.Start(p2pCtx); err != nil {
return nil, err
}
p2ps.Ready()
return b, nil
}
func (b *Bee) Shutdown(ctx context.Context) error {
var mErr error
// if a shutdown is already in process, return here
b.shutdownMutex.Lock()
if b.shutdownInProgress {
b.shutdownMutex.Unlock()
return ErrShutdownInProgress
}
b.shutdownInProgress = true
b.shutdownMutex.Unlock()
// halt kademlia while shutting down other
// components.
b.topologyHalter.Halt()
// halt p2p layer from accepting new connections
// while shutting down other components
b.p2pHalter.Halt()
// tryClose is a convenient closure which decrease
// repetitive io.Closer tryClose procedure.
tryClose := func(c io.Closer, errMsg string) {
if c == nil {
return
}
if err := c.Close(); err != nil {
mErr = multierror.Append(mErr, fmt.Errorf("%s: %w", errMsg, err))
}
}
tryClose(b.apiCloser, "api")
var eg errgroup.Group
if b.apiServer != nil {
eg.Go(func() error {
if err := b.apiServer.Shutdown(ctx); err != nil {
return fmt.Errorf("api server: %w", err)
}
return nil
})
}
if b.debugAPIServer != nil {
eg.Go(func() error {
if err := b.debugAPIServer.Shutdown(ctx); err != nil {
return fmt.Errorf("debug api server: %w", err)
}
return nil
})
}
if err := eg.Wait(); err != nil {
mErr = multierror.Append(mErr, err)
}
if b.recoveryHandleCleanup != nil {
b.recoveryHandleCleanup()
}
var wg sync.WaitGroup
wg.Add(5)
go func() {
defer wg.Done()
tryClose(b.pssCloser, "pss")
}()
go func() {
defer wg.Done()
tryClose(b.pusherCloser, "pusher")
}()
go func() {
defer wg.Done()
tryClose(b.pullerCloser, "puller")
}()
go func() {
defer wg.Done()
tryClose(b.accountingCloser, "accounting")
}()
b.p2pCancel()
go func() {
defer wg.Done()
tryClose(b.pullSyncCloser, "pull sync")
}()
wg.Wait()
tryClose(b.p2pService, "p2p server")
tryClose(b.priceOracleCloser, "price oracle service")
wg.Add(3)
go func() {
defer wg.Done()
tryClose(b.transactionMonitorCloser, "transaction monitor")
tryClose(b.transactionCloser, "transaction")
}()
go func() {
defer wg.Done()
tryClose(b.listenerCloser, "listener")
}()
go func() {
defer wg.Done()
tryClose(b.postageServiceCloser, "postage service")
}()
wg.Wait()
if c := b.ethClientCloser; c != nil {
c()
}
tryClose(b.tracerCloser, "tracer")
tryClose(b.tagsCloser, "tag persistence")
tryClose(b.topologyCloser, "topology driver")
tryClose(b.stateStoreCloser, "statestore")
tryClose(b.localstoreCloser, "localstore")
tryClose(b.errorLogWriter, "error log writer")
tryClose(b.resolverCloser, "resolver service")
return mErr
}
func getTxHash(stateStore storage.StateStorer, logger logging.Logger, o Options) ([]byte, error) {
if o.Standalone {
return nil, nil // in standalone mode tx hash is not used
}
if o.Transaction != "" {
txHashTrimmed := strings.TrimPrefix(o.Transaction, "0x")
if len(txHashTrimmed) != 64 {
return nil, errors.New("invalid length")
}
txHash, err := hex.DecodeString(txHashTrimmed)
if err != nil {
return nil, err
}
logger.Infof("using the provided transaction hash %x", txHash)
return txHash, nil
}
var txHash common.Hash
key := chequebook.ChequebookDeploymentKey
if err := stateStore.Get(key, &txHash); err != nil {
if errors.Is(err, storage.ErrNotFound) {
return nil, errors.New("chequebook deployment transaction hash not found. Please specify the transaction hash manually.")
}
return nil, err
}
logger.Infof("using the chequebook transaction hash %x", txHash)
return txHash.Bytes(), nil
}
// pidKiller is used to issue a forced shut down of the node from sub modules. The issue with using the
// node's Shutdown method is that it only shuts down the node and does not exit the start process
// which is waiting on the os.Signals. This is not desirable, but currently bee node cannot handle
// rate-limiting blockchain API calls properly. We will shut down the node in this case to allow the
// user to rectify the API issues (by adjusting limits or using a different one). There is no platform
// agnostic way to trigger os.Signals in go unfortunately. Which is why we will use the process.Kill
// approach which works on windows as well.
type pidKiller struct {
node *Bee
}
var ErrShutdownInProgress error = errors.New("shutdown in progress")
func (p *pidKiller) Shutdown(ctx context.Context) error {
err := p.node.Shutdown(ctx)
if err != nil {
return err
}
ps, err := os.FindProcess(syscall.Getpid())
if err != nil {
return err
}
return ps.Kill()
}
| {
return nil, fmt.Errorf("invalid payment tolerance: %s", paymentTolerance)
} | conditional_block |
node.go | // Copyright 2020 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package node defines the concept of a Bee node
// by bootstrapping and injecting all necessary
// dependencies.
package node
import (
"context"
"crypto/ecdsa"
"encoding/hex"
"errors"
"fmt"
"io"
"log"
"math/big"
"net"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethersphere/bee/pkg/accounting"
"github.com/ethersphere/bee/pkg/addressbook"
"github.com/ethersphere/bee/pkg/api"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/debugapi"
"github.com/ethersphere/bee/pkg/feeds/factory"
"github.com/ethersphere/bee/pkg/hive"
"github.com/ethersphere/bee/pkg/localstore"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/metrics"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/p2p"
"github.com/ethersphere/bee/pkg/p2p/libp2p"
"github.com/ethersphere/bee/pkg/pingpong"
"github.com/ethersphere/bee/pkg/pinning"
"github.com/ethersphere/bee/pkg/postage"
"github.com/ethersphere/bee/pkg/postage/batchservice"
"github.com/ethersphere/bee/pkg/postage/batchstore"
"github.com/ethersphere/bee/pkg/postage/listener"
"github.com/ethersphere/bee/pkg/postage/postagecontract"
"github.com/ethersphere/bee/pkg/pricer"
"github.com/ethersphere/bee/pkg/pricing"
"github.com/ethersphere/bee/pkg/pss"
"github.com/ethersphere/bee/pkg/puller"
"github.com/ethersphere/bee/pkg/pullsync"
"github.com/ethersphere/bee/pkg/pullsync/pullstorage"
"github.com/ethersphere/bee/pkg/pusher"
"github.com/ethersphere/bee/pkg/pushsync"
"github.com/ethersphere/bee/pkg/recovery"
"github.com/ethersphere/bee/pkg/resolver/multiresolver"
"github.com/ethersphere/bee/pkg/retrieval"
"github.com/ethersphere/bee/pkg/settlement/pseudosettle"
"github.com/ethersphere/bee/pkg/settlement/swap"
"github.com/ethersphere/bee/pkg/settlement/swap/chequebook"
"github.com/ethersphere/bee/pkg/settlement/swap/priceoracle"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/bee/pkg/steward"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/tags" | "github.com/ethersphere/bee/pkg/traversal"
"github.com/hashicorp/go-multierror"
ma "github.com/multiformats/go-multiaddr"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
type Bee struct {
p2pService io.Closer
p2pHalter p2p.Halter
p2pCancel context.CancelFunc
apiCloser io.Closer
apiServer *http.Server
debugAPIServer *http.Server
resolverCloser io.Closer
errorLogWriter *io.PipeWriter
tracerCloser io.Closer
tagsCloser io.Closer
stateStoreCloser io.Closer
localstoreCloser io.Closer
topologyCloser io.Closer
topologyHalter topology.Halter
pusherCloser io.Closer
pullerCloser io.Closer
accountingCloser io.Closer
pullSyncCloser io.Closer
pssCloser io.Closer
ethClientCloser func()
transactionMonitorCloser io.Closer
transactionCloser io.Closer
recoveryHandleCleanup func()
listenerCloser io.Closer
postageServiceCloser io.Closer
priceOracleCloser io.Closer
shutdownInProgress bool
shutdownMutex sync.Mutex
}
type Options struct {
DataDir string
CacheCapacity uint64
DBOpenFilesLimit uint64
DBWriteBufferSize uint64
DBBlockCacheCapacity uint64
DBDisableSeeksCompaction bool
APIAddr string
DebugAPIAddr string
Addr string
NATAddr string
EnableWS bool
EnableQUIC bool
WelcomeMessage string
Bootnodes []string
CORSAllowedOrigins []string
Logger logging.Logger
Standalone bool
TracingEnabled bool
TracingEndpoint string
TracingServiceName string
GlobalPinningEnabled bool
PaymentThreshold string
PaymentTolerance string
PaymentEarly string
ResolverConnectionCfgs []multiresolver.ConnectionConfig
GatewayMode bool
BootnodeMode bool
SwapEndpoint string
SwapFactoryAddress string
SwapLegacyFactoryAddresses []string
SwapInitialDeposit string
SwapEnable bool
FullNodeMode bool
Transaction string
PostageContractAddress string
PriceOracleAddress string
BlockTime uint64
DeployGasPrice string
WarmupTime time.Duration
}
const (
refreshRate = int64(6000000)
basePrice = 10000
)
func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey, signer crypto.Signer, networkID uint64, logger logging.Logger, libp2pPrivateKey, pssPrivateKey *ecdsa.PrivateKey, o Options) (b *Bee, err error) {
tracer, tracerCloser, err := tracing.NewTracer(&tracing.Options{
Enabled: o.TracingEnabled,
Endpoint: o.TracingEndpoint,
ServiceName: o.TracingServiceName,
})
if err != nil {
return nil, fmt.Errorf("tracer: %w", err)
}
p2pCtx, p2pCancel := context.WithCancel(context.Background())
defer func() {
// if there's been an error on this function
// we'd like to cancel the p2p context so that
// incoming connections will not be possible
if err != nil {
p2pCancel()
}
}()
// light nodes have zero warmup time for pull/pushsync protocols
warmupTime := o.WarmupTime
if !o.FullNodeMode {
warmupTime = 0
}
b = &Bee{
p2pCancel: p2pCancel,
errorLogWriter: logger.WriterLevel(logrus.ErrorLevel),
tracerCloser: tracerCloser,
}
var debugAPIService *debugapi.Service
if o.DebugAPIAddr != "" {
overlayEthAddress, err := signer.EthereumAddress()
if err != nil {
return nil, fmt.Errorf("eth address: %w", err)
}
// set up basic debug api endpoints for debugging and /health endpoint
debugAPIService = debugapi.New(swarmAddress, publicKey, pssPrivateKey.PublicKey, overlayEthAddress, logger, tracer, o.CORSAllowedOrigins)
debugAPIListener, err := net.Listen("tcp", o.DebugAPIAddr)
if err != nil {
return nil, fmt.Errorf("debug api listener: %w", err)
}
debugAPIServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: debugAPIService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("debug api address: %s", debugAPIListener.Addr())
if err := debugAPIServer.Serve(debugAPIListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("debug api server: %v", err)
logger.Error("unable to serve debug api")
}
}()
b.debugAPIServer = debugAPIServer
}
stateStore, err := InitStateStore(logger, o.DataDir)
if err != nil {
return nil, err
}
b.stateStoreCloser = stateStore
err = CheckOverlayWithStore(swarmAddress, stateStore)
if err != nil {
return nil, err
}
addressbook := addressbook.New(stateStore)
var (
swapBackend *ethclient.Client
overlayEthAddress common.Address
chainID int64
transactionService transaction.Service
transactionMonitor transaction.Monitor
chequebookFactory chequebook.Factory
chequebookService chequebook.Service
chequeStore chequebook.ChequeStore
cashoutService chequebook.CashoutService
)
if !o.Standalone {
swapBackend, overlayEthAddress, chainID, transactionMonitor, transactionService, err = InitChain(
p2pCtx,
logger,
stateStore,
o.SwapEndpoint,
signer,
o.BlockTime,
)
if err != nil {
return nil, fmt.Errorf("init chain: %w", err)
}
b.ethClientCloser = swapBackend.Close
b.transactionCloser = tracerCloser
b.transactionMonitorCloser = transactionMonitor
}
if o.SwapEnable {
chequebookFactory, err = InitChequebookFactory(
logger,
swapBackend,
chainID,
transactionService,
o.SwapFactoryAddress,
o.SwapLegacyFactoryAddresses,
)
if err != nil {
return nil, err
}
if err = chequebookFactory.VerifyBytecode(p2pCtx); err != nil {
return nil, fmt.Errorf("factory fail: %w", err)
}
chequebookService, err = InitChequebookService(
p2pCtx,
logger,
stateStore,
signer,
chainID,
swapBackend,
overlayEthAddress,
transactionService,
chequebookFactory,
o.SwapInitialDeposit,
o.DeployGasPrice,
)
if err != nil {
return nil, err
}
chequeStore, cashoutService = initChequeStoreCashout(
stateStore,
swapBackend,
chequebookFactory,
chainID,
overlayEthAddress,
transactionService,
)
}
lightNodes := lightnode.NewContainer(swarmAddress)
txHash, err := getTxHash(stateStore, logger, o)
if err != nil {
return nil, fmt.Errorf("invalid transaction hash: %w", err)
}
senderMatcher := transaction.NewMatcher(swapBackend, types.NewEIP155Signer(big.NewInt(chainID)))
p2ps, err := libp2p.New(p2pCtx, signer, networkID, swarmAddress, addr, addressbook, stateStore, lightNodes, senderMatcher, logger, tracer, libp2p.Options{
PrivateKey: libp2pPrivateKey,
NATAddr: o.NATAddr,
EnableWS: o.EnableWS,
EnableQUIC: o.EnableQUIC,
Standalone: o.Standalone,
WelcomeMessage: o.WelcomeMessage,
FullNode: o.FullNodeMode,
Transaction: txHash,
})
if err != nil {
return nil, fmt.Errorf("p2p service: %w", err)
}
b.p2pService = p2ps
b.p2pHalter = p2ps
// localstore depends on batchstore
var path string
if o.DataDir != "" {
logger.Infof("using datadir in: '%s'", o.DataDir)
path = filepath.Join(o.DataDir, "localstore")
}
lo := &localstore.Options{
Capacity: o.CacheCapacity,
OpenFilesLimit: o.DBOpenFilesLimit,
BlockCacheCapacity: o.DBBlockCacheCapacity,
WriteBufferSize: o.DBWriteBufferSize,
DisableSeeksCompaction: o.DBDisableSeeksCompaction,
}
storer, err := localstore.New(path, swarmAddress.Bytes(), stateStore, lo, logger)
if err != nil {
return nil, fmt.Errorf("localstore: %w", err)
}
b.localstoreCloser = storer
batchStore, err := batchstore.New(stateStore, storer.UnreserveBatch)
if err != nil {
return nil, fmt.Errorf("batchstore: %w", err)
}
validStamp := postage.ValidStamp(batchStore)
post, err := postage.NewService(stateStore, batchStore, chainID)
if err != nil {
return nil, fmt.Errorf("postage service load: %w", err)
}
b.postageServiceCloser = post
var (
postageContractService postagecontract.Interface
batchSvc postage.EventUpdater
eventListener postage.Listener
)
var postageSyncStart uint64 = 0
if !o.Standalone {
postageContractAddress, startBlock, found := listener.DiscoverAddresses(chainID)
if o.PostageContractAddress != "" {
if !common.IsHexAddress(o.PostageContractAddress) {
return nil, errors.New("malformed postage stamp address")
}
postageContractAddress = common.HexToAddress(o.PostageContractAddress)
} else if !found {
return nil, errors.New("no known postage stamp addresses for this network")
}
if found {
postageSyncStart = startBlock
}
eventListener = listener.New(logger, swapBackend, postageContractAddress, o.BlockTime, &pidKiller{node: b})
b.listenerCloser = eventListener
batchSvc = batchservice.New(stateStore, batchStore, logger, eventListener, overlayEthAddress.Bytes(), post)
erc20Address, err := postagecontract.LookupERC20Address(p2pCtx, transactionService, postageContractAddress)
if err != nil {
return nil, err
}
postageContractService = postagecontract.New(
overlayEthAddress,
postageContractAddress,
erc20Address,
transactionService,
post,
)
}
if !o.Standalone {
if natManager := p2ps.NATManager(); natManager != nil {
// wait for nat manager to init
logger.Debug("initializing NAT manager")
select {
case <-natManager.Ready():
// this is magic sleep to give NAT time to sync the mappings
// this is a hack, kind of alchemy and should be improved
time.Sleep(3 * time.Second)
logger.Debug("NAT manager initialized")
case <-time.After(10 * time.Second):
logger.Warning("NAT manager init timeout")
}
}
}
// Construct protocols.
pingPong := pingpong.New(p2ps, logger, tracer)
if err = p2ps.AddProtocol(pingPong.Protocol()); err != nil {
return nil, fmt.Errorf("pingpong service: %w", err)
}
hive := hive.New(p2ps, addressbook, networkID, logger)
if err = p2ps.AddProtocol(hive.Protocol()); err != nil {
return nil, fmt.Errorf("hive service: %w", err)
}
var bootnodes []ma.Multiaddr
if o.Standalone {
logger.Info("Starting node in standalone mode, no p2p connections will be made or accepted")
} else {
for _, a := range o.Bootnodes {
addr, err := ma.NewMultiaddr(a)
if err != nil {
logger.Debugf("multiaddress fail %s: %v", a, err)
logger.Warningf("invalid bootnode address %s", a)
continue
}
bootnodes = append(bootnodes, addr)
}
}
var swapService *swap.Service
metricsDB, err := shed.NewDBWrap(stateStore.DB())
if err != nil {
return nil, fmt.Errorf("unable to create metrics storage for kademlia: %w", err)
}
kad := kademlia.New(swarmAddress, addressbook, hive, p2ps, metricsDB, logger, kademlia.Options{Bootnodes: bootnodes, StandaloneMode: o.Standalone, BootnodeMode: o.BootnodeMode})
b.topologyCloser = kad
b.topologyHalter = kad
hive.SetAddPeersHandler(kad.AddPeers)
p2ps.SetPickyNotifier(kad)
batchStore.SetRadiusSetter(kad)
if batchSvc != nil {
syncedChan, err := batchSvc.Start(postageSyncStart)
if err != nil {
return nil, fmt.Errorf("unable to start batch service: %w", err)
}
// wait for the postage contract listener to sync
logger.Info("waiting to sync postage contract data, this may take a while... more info available in Debug loglevel")
// arguably this is not a very nice solution since we dont support
// interrupts at this stage of the application lifecycle. some changes
// would be needed on the cmd level to support context cancellation at
// this stage
<-syncedChan
}
minThreshold := big.NewInt(2 * refreshRate)
paymentThreshold, ok := new(big.Int).SetString(o.PaymentThreshold, 10)
if !ok {
return nil, fmt.Errorf("invalid payment threshold: %s", paymentThreshold)
}
pricer := pricer.NewFixedPricer(swarmAddress, basePrice)
if paymentThreshold.Cmp(minThreshold) < 0 {
return nil, fmt.Errorf("payment threshold below minimum generally accepted value, need at least %s", minThreshold)
}
pricing := pricing.New(p2ps, logger, paymentThreshold, minThreshold)
if err = p2ps.AddProtocol(pricing.Protocol()); err != nil {
return nil, fmt.Errorf("pricing service: %w", err)
}
addrs, err := p2ps.Addresses()
if err != nil {
return nil, fmt.Errorf("get server addresses: %w", err)
}
for _, addr := range addrs {
logger.Debugf("p2p address: %s", addr)
}
paymentTolerance, ok := new(big.Int).SetString(o.PaymentTolerance, 10)
if !ok {
return nil, fmt.Errorf("invalid payment tolerance: %s", paymentTolerance)
}
paymentEarly, ok := new(big.Int).SetString(o.PaymentEarly, 10)
if !ok {
return nil, fmt.Errorf("invalid payment early: %s", paymentEarly)
}
acc, err := accounting.NewAccounting(
paymentThreshold,
paymentTolerance,
paymentEarly,
logger,
stateStore,
pricing,
big.NewInt(refreshRate),
p2ps,
)
if err != nil {
return nil, fmt.Errorf("accounting: %w", err)
}
b.accountingCloser = acc
pseudosettleService := pseudosettle.New(p2ps, logger, stateStore, acc, big.NewInt(refreshRate), p2ps)
if err = p2ps.AddProtocol(pseudosettleService.Protocol()); err != nil {
return nil, fmt.Errorf("pseudosettle service: %w", err)
}
acc.SetRefreshFunc(pseudosettleService.Pay)
if o.SwapEnable {
var priceOracle priceoracle.Service
swapService, priceOracle, err = InitSwap(
p2ps,
logger,
stateStore,
networkID,
overlayEthAddress,
chequebookService,
chequeStore,
cashoutService,
acc,
o.PriceOracleAddress,
chainID,
transactionService,
)
if err != nil {
return nil, err
}
b.priceOracleCloser = priceOracle
acc.SetPayFunc(swapService.Pay)
}
pricing.SetPaymentThresholdObserver(acc)
retrieve := retrieval.New(swarmAddress, storer, p2ps, kad, logger, acc, pricer, tracer)
tagService := tags.NewTags(stateStore, logger)
b.tagsCloser = tagService
pssService := pss.New(pssPrivateKey, logger)
b.pssCloser = pssService
var ns storage.Storer
if o.GlobalPinningEnabled {
// create recovery callback for content repair
recoverFunc := recovery.NewCallback(pssService)
ns = netstore.New(storer, validStamp, recoverFunc, retrieve, logger)
} else {
ns = netstore.New(storer, validStamp, nil, retrieve, logger)
}
traversalService := traversal.New(ns)
pinningService := pinning.NewService(storer, stateStore, traversalService)
pushSyncProtocol := pushsync.New(swarmAddress, p2ps, storer, kad, tagService, o.FullNodeMode, pssService.TryUnwrap, validStamp, logger, acc, pricer, signer, tracer, warmupTime)
// set the pushSyncer in the PSS
pssService.SetPushSyncer(pushSyncProtocol)
if o.GlobalPinningEnabled {
// register function for chunk repair upon receiving a trojan message
chunkRepairHandler := recovery.NewRepairHandler(ns, logger, pushSyncProtocol)
b.recoveryHandleCleanup = pssService.Register(recovery.Topic, chunkRepairHandler)
}
pusherService := pusher.New(networkID, storer, kad, pushSyncProtocol, tagService, logger, tracer, warmupTime)
b.pusherCloser = pusherService
pullStorage := pullstorage.New(storer)
pullSyncProtocol := pullsync.New(p2ps, pullStorage, pssService.TryUnwrap, validStamp, logger)
b.pullSyncCloser = pullSyncProtocol
var pullerService *puller.Puller
if o.FullNodeMode {
pullerService := puller.New(stateStore, kad, pullSyncProtocol, logger, puller.Options{}, warmupTime)
b.pullerCloser = pullerService
}
retrieveProtocolSpec := retrieve.Protocol()
pushSyncProtocolSpec := pushSyncProtocol.Protocol()
pullSyncProtocolSpec := pullSyncProtocol.Protocol()
if o.FullNodeMode {
logger.Info("starting in full mode")
} else {
logger.Info("starting in light mode")
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, retrieveProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pushSyncProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pullSyncProtocolSpec)
}
if err = p2ps.AddProtocol(retrieveProtocolSpec); err != nil {
return nil, fmt.Errorf("retrieval service: %w", err)
}
if err = p2ps.AddProtocol(pushSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pushsync service: %w", err)
}
if err = p2ps.AddProtocol(pullSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pullsync protocol: %w", err)
}
multiResolver := multiresolver.NewMultiResolver(
multiresolver.WithConnectionConfigs(o.ResolverConnectionCfgs),
multiresolver.WithLogger(o.Logger),
)
b.resolverCloser = multiResolver
var apiService api.Service
if o.APIAddr != "" {
// API server
feedFactory := factory.New(ns)
steward := steward.New(storer, traversalService, pushSyncProtocol)
apiService = api.New(tagService, ns, multiResolver, pssService, traversalService, pinningService, feedFactory, post, postageContractService, steward, signer, logger, tracer, api.Options{
CORSAllowedOrigins: o.CORSAllowedOrigins,
GatewayMode: o.GatewayMode,
WsPingPeriod: 60 * time.Second,
})
apiListener, err := net.Listen("tcp", o.APIAddr)
if err != nil {
return nil, fmt.Errorf("api listener: %w", err)
}
apiServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: apiService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("api address: %s", apiListener.Addr())
if err := apiServer.Serve(apiListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("api server: %v", err)
logger.Error("unable to serve api")
}
}()
b.apiServer = apiServer
b.apiCloser = apiService
}
if debugAPIService != nil {
// register metrics from components
debugAPIService.MustRegisterMetrics(p2ps.Metrics()...)
debugAPIService.MustRegisterMetrics(pingPong.Metrics()...)
debugAPIService.MustRegisterMetrics(acc.Metrics()...)
debugAPIService.MustRegisterMetrics(storer.Metrics()...)
debugAPIService.MustRegisterMetrics(kad.Metrics()...)
if pullerService != nil {
debugAPIService.MustRegisterMetrics(pullerService.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pushSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pusherService.Metrics()...)
debugAPIService.MustRegisterMetrics(pullSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pullStorage.Metrics()...)
debugAPIService.MustRegisterMetrics(retrieve.Metrics()...)
debugAPIService.MustRegisterMetrics(lightNodes.Metrics()...)
if bs, ok := batchStore.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(bs.Metrics()...)
}
if eventListener != nil {
if ls, ok := eventListener.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(ls.Metrics()...)
}
}
if pssServiceMetrics, ok := pssService.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(pssServiceMetrics.Metrics()...)
}
if apiService != nil {
debugAPIService.MustRegisterMetrics(apiService.Metrics()...)
}
if l, ok := logger.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(l.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pseudosettleService.Metrics()...)
if swapService != nil {
debugAPIService.MustRegisterMetrics(swapService.Metrics()...)
}
// inject dependencies and configure full debug api http path routes
debugAPIService.Configure(p2ps, pingPong, kad, lightNodes, storer, tagService, acc, pseudosettleService, o.SwapEnable, swapService, chequebookService, batchStore, transactionService)
}
if err := kad.Start(p2pCtx); err != nil {
return nil, err
}
p2ps.Ready()
return b, nil
}
func (b *Bee) Shutdown(ctx context.Context) error {
var mErr error
// if a shutdown is already in process, return here
b.shutdownMutex.Lock()
if b.shutdownInProgress {
b.shutdownMutex.Unlock()
return ErrShutdownInProgress
}
b.shutdownInProgress = true
b.shutdownMutex.Unlock()
// halt kademlia while shutting down other
// components.
b.topologyHalter.Halt()
// halt p2p layer from accepting new connections
// while shutting down other components
b.p2pHalter.Halt()
// tryClose is a convenient closure which decrease
// repetitive io.Closer tryClose procedure.
tryClose := func(c io.Closer, errMsg string) {
if c == nil {
return
}
if err := c.Close(); err != nil {
mErr = multierror.Append(mErr, fmt.Errorf("%s: %w", errMsg, err))
}
}
tryClose(b.apiCloser, "api")
var eg errgroup.Group
if b.apiServer != nil {
eg.Go(func() error {
if err := b.apiServer.Shutdown(ctx); err != nil {
return fmt.Errorf("api server: %w", err)
}
return nil
})
}
if b.debugAPIServer != nil {
eg.Go(func() error {
if err := b.debugAPIServer.Shutdown(ctx); err != nil {
return fmt.Errorf("debug api server: %w", err)
}
return nil
})
}
if err := eg.Wait(); err != nil {
mErr = multierror.Append(mErr, err)
}
if b.recoveryHandleCleanup != nil {
b.recoveryHandleCleanup()
}
var wg sync.WaitGroup
wg.Add(5)
go func() {
defer wg.Done()
tryClose(b.pssCloser, "pss")
}()
go func() {
defer wg.Done()
tryClose(b.pusherCloser, "pusher")
}()
go func() {
defer wg.Done()
tryClose(b.pullerCloser, "puller")
}()
go func() {
defer wg.Done()
tryClose(b.accountingCloser, "accounting")
}()
b.p2pCancel()
go func() {
defer wg.Done()
tryClose(b.pullSyncCloser, "pull sync")
}()
wg.Wait()
tryClose(b.p2pService, "p2p server")
tryClose(b.priceOracleCloser, "price oracle service")
wg.Add(3)
go func() {
defer wg.Done()
tryClose(b.transactionMonitorCloser, "transaction monitor")
tryClose(b.transactionCloser, "transaction")
}()
go func() {
defer wg.Done()
tryClose(b.listenerCloser, "listener")
}()
go func() {
defer wg.Done()
tryClose(b.postageServiceCloser, "postage service")
}()
wg.Wait()
if c := b.ethClientCloser; c != nil {
c()
}
tryClose(b.tracerCloser, "tracer")
tryClose(b.tagsCloser, "tag persistence")
tryClose(b.topologyCloser, "topology driver")
tryClose(b.stateStoreCloser, "statestore")
tryClose(b.localstoreCloser, "localstore")
tryClose(b.errorLogWriter, "error log writer")
tryClose(b.resolverCloser, "resolver service")
return mErr
}
func getTxHash(stateStore storage.StateStorer, logger logging.Logger, o Options) ([]byte, error) {
if o.Standalone {
return nil, nil // in standalone mode tx hash is not used
}
if o.Transaction != "" {
txHashTrimmed := strings.TrimPrefix(o.Transaction, "0x")
if len(txHashTrimmed) != 64 {
return nil, errors.New("invalid length")
}
txHash, err := hex.DecodeString(txHashTrimmed)
if err != nil {
return nil, err
}
logger.Infof("using the provided transaction hash %x", txHash)
return txHash, nil
}
var txHash common.Hash
key := chequebook.ChequebookDeploymentKey
if err := stateStore.Get(key, &txHash); err != nil {
if errors.Is(err, storage.ErrNotFound) {
return nil, errors.New("chequebook deployment transaction hash not found. Please specify the transaction hash manually.")
}
return nil, err
}
logger.Infof("using the chequebook transaction hash %x", txHash)
return txHash.Bytes(), nil
}
// pidKiller is used to issue a forced shut down of the node from sub modules. The issue with using the
// node's Shutdown method is that it only shuts down the node and does not exit the start process
// which is waiting on the os.Signals. This is not desirable, but currently bee node cannot handle
// rate-limiting blockchain API calls properly. We will shut down the node in this case to allow the
// user to rectify the API issues (by adjusting limits or using a different one). There is no platform
// agnostic way to trigger os.Signals in go unfortunately. Which is why we will use the process.Kill
// approach which works on windows as well.
type pidKiller struct {
node *Bee
}
var ErrShutdownInProgress error = errors.New("shutdown in progress")
func (p *pidKiller) Shutdown(ctx context.Context) error {
err := p.node.Shutdown(ctx)
if err != nil {
return err
}
ps, err := os.FindProcess(syscall.Getpid())
if err != nil {
return err
}
return ps.Kill()
} | "github.com/ethersphere/bee/pkg/topology"
"github.com/ethersphere/bee/pkg/topology/kademlia"
"github.com/ethersphere/bee/pkg/topology/lightnode"
"github.com/ethersphere/bee/pkg/tracing"
"github.com/ethersphere/bee/pkg/transaction" | random_line_split |
node.go | // Copyright 2020 The Swarm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package node defines the concept of a Bee node
// by bootstrapping and injecting all necessary
// dependencies.
package node
import (
"context"
"crypto/ecdsa"
"encoding/hex"
"errors"
"fmt"
"io"
"log"
"math/big"
"net"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"syscall"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethersphere/bee/pkg/accounting"
"github.com/ethersphere/bee/pkg/addressbook"
"github.com/ethersphere/bee/pkg/api"
"github.com/ethersphere/bee/pkg/crypto"
"github.com/ethersphere/bee/pkg/debugapi"
"github.com/ethersphere/bee/pkg/feeds/factory"
"github.com/ethersphere/bee/pkg/hive"
"github.com/ethersphere/bee/pkg/localstore"
"github.com/ethersphere/bee/pkg/logging"
"github.com/ethersphere/bee/pkg/metrics"
"github.com/ethersphere/bee/pkg/netstore"
"github.com/ethersphere/bee/pkg/p2p"
"github.com/ethersphere/bee/pkg/p2p/libp2p"
"github.com/ethersphere/bee/pkg/pingpong"
"github.com/ethersphere/bee/pkg/pinning"
"github.com/ethersphere/bee/pkg/postage"
"github.com/ethersphere/bee/pkg/postage/batchservice"
"github.com/ethersphere/bee/pkg/postage/batchstore"
"github.com/ethersphere/bee/pkg/postage/listener"
"github.com/ethersphere/bee/pkg/postage/postagecontract"
"github.com/ethersphere/bee/pkg/pricer"
"github.com/ethersphere/bee/pkg/pricing"
"github.com/ethersphere/bee/pkg/pss"
"github.com/ethersphere/bee/pkg/puller"
"github.com/ethersphere/bee/pkg/pullsync"
"github.com/ethersphere/bee/pkg/pullsync/pullstorage"
"github.com/ethersphere/bee/pkg/pusher"
"github.com/ethersphere/bee/pkg/pushsync"
"github.com/ethersphere/bee/pkg/recovery"
"github.com/ethersphere/bee/pkg/resolver/multiresolver"
"github.com/ethersphere/bee/pkg/retrieval"
"github.com/ethersphere/bee/pkg/settlement/pseudosettle"
"github.com/ethersphere/bee/pkg/settlement/swap"
"github.com/ethersphere/bee/pkg/settlement/swap/chequebook"
"github.com/ethersphere/bee/pkg/settlement/swap/priceoracle"
"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/bee/pkg/steward"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/swarm"
"github.com/ethersphere/bee/pkg/tags"
"github.com/ethersphere/bee/pkg/topology"
"github.com/ethersphere/bee/pkg/topology/kademlia"
"github.com/ethersphere/bee/pkg/topology/lightnode"
"github.com/ethersphere/bee/pkg/tracing"
"github.com/ethersphere/bee/pkg/transaction"
"github.com/ethersphere/bee/pkg/traversal"
"github.com/hashicorp/go-multierror"
ma "github.com/multiformats/go-multiaddr"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
type Bee struct {
p2pService io.Closer
p2pHalter p2p.Halter
p2pCancel context.CancelFunc
apiCloser io.Closer
apiServer *http.Server
debugAPIServer *http.Server
resolverCloser io.Closer
errorLogWriter *io.PipeWriter
tracerCloser io.Closer
tagsCloser io.Closer
stateStoreCloser io.Closer
localstoreCloser io.Closer
topologyCloser io.Closer
topologyHalter topology.Halter
pusherCloser io.Closer
pullerCloser io.Closer
accountingCloser io.Closer
pullSyncCloser io.Closer
pssCloser io.Closer
ethClientCloser func()
transactionMonitorCloser io.Closer
transactionCloser io.Closer
recoveryHandleCleanup func()
listenerCloser io.Closer
postageServiceCloser io.Closer
priceOracleCloser io.Closer
shutdownInProgress bool
shutdownMutex sync.Mutex
}
type Options struct {
DataDir string
CacheCapacity uint64
DBOpenFilesLimit uint64
DBWriteBufferSize uint64
DBBlockCacheCapacity uint64
DBDisableSeeksCompaction bool
APIAddr string
DebugAPIAddr string
Addr string
NATAddr string
EnableWS bool
EnableQUIC bool
WelcomeMessage string
Bootnodes []string
CORSAllowedOrigins []string
Logger logging.Logger
Standalone bool
TracingEnabled bool
TracingEndpoint string
TracingServiceName string
GlobalPinningEnabled bool
PaymentThreshold string
PaymentTolerance string
PaymentEarly string
ResolverConnectionCfgs []multiresolver.ConnectionConfig
GatewayMode bool
BootnodeMode bool
SwapEndpoint string
SwapFactoryAddress string
SwapLegacyFactoryAddresses []string
SwapInitialDeposit string
SwapEnable bool
FullNodeMode bool
Transaction string
PostageContractAddress string
PriceOracleAddress string
BlockTime uint64
DeployGasPrice string
WarmupTime time.Duration
}
const (
refreshRate = int64(6000000)
basePrice = 10000
)
func NewBee(addr string, swarmAddress swarm.Address, publicKey ecdsa.PublicKey, signer crypto.Signer, networkID uint64, logger logging.Logger, libp2pPrivateKey, pssPrivateKey *ecdsa.PrivateKey, o Options) (b *Bee, err error) {
tracer, tracerCloser, err := tracing.NewTracer(&tracing.Options{
Enabled: o.TracingEnabled,
Endpoint: o.TracingEndpoint,
ServiceName: o.TracingServiceName,
})
if err != nil {
return nil, fmt.Errorf("tracer: %w", err)
}
p2pCtx, p2pCancel := context.WithCancel(context.Background())
defer func() {
// if there's been an error on this function
// we'd like to cancel the p2p context so that
// incoming connections will not be possible
if err != nil {
p2pCancel()
}
}()
// light nodes have zero warmup time for pull/pushsync protocols
warmupTime := o.WarmupTime
if !o.FullNodeMode {
warmupTime = 0
}
b = &Bee{
p2pCancel: p2pCancel,
errorLogWriter: logger.WriterLevel(logrus.ErrorLevel),
tracerCloser: tracerCloser,
}
var debugAPIService *debugapi.Service
if o.DebugAPIAddr != "" {
overlayEthAddress, err := signer.EthereumAddress()
if err != nil {
return nil, fmt.Errorf("eth address: %w", err)
}
// set up basic debug api endpoints for debugging and /health endpoint
debugAPIService = debugapi.New(swarmAddress, publicKey, pssPrivateKey.PublicKey, overlayEthAddress, logger, tracer, o.CORSAllowedOrigins)
debugAPIListener, err := net.Listen("tcp", o.DebugAPIAddr)
if err != nil {
return nil, fmt.Errorf("debug api listener: %w", err)
}
debugAPIServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: debugAPIService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("debug api address: %s", debugAPIListener.Addr())
if err := debugAPIServer.Serve(debugAPIListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("debug api server: %v", err)
logger.Error("unable to serve debug api")
}
}()
b.debugAPIServer = debugAPIServer
}
stateStore, err := InitStateStore(logger, o.DataDir)
if err != nil {
return nil, err
}
b.stateStoreCloser = stateStore
err = CheckOverlayWithStore(swarmAddress, stateStore)
if err != nil {
return nil, err
}
addressbook := addressbook.New(stateStore)
var (
swapBackend *ethclient.Client
overlayEthAddress common.Address
chainID int64
transactionService transaction.Service
transactionMonitor transaction.Monitor
chequebookFactory chequebook.Factory
chequebookService chequebook.Service
chequeStore chequebook.ChequeStore
cashoutService chequebook.CashoutService
)
if !o.Standalone {
swapBackend, overlayEthAddress, chainID, transactionMonitor, transactionService, err = InitChain(
p2pCtx,
logger,
stateStore,
o.SwapEndpoint,
signer,
o.BlockTime,
)
if err != nil {
return nil, fmt.Errorf("init chain: %w", err)
}
b.ethClientCloser = swapBackend.Close
b.transactionCloser = tracerCloser
b.transactionMonitorCloser = transactionMonitor
}
if o.SwapEnable {
chequebookFactory, err = InitChequebookFactory(
logger,
swapBackend,
chainID,
transactionService,
o.SwapFactoryAddress,
o.SwapLegacyFactoryAddresses,
)
if err != nil {
return nil, err
}
if err = chequebookFactory.VerifyBytecode(p2pCtx); err != nil {
return nil, fmt.Errorf("factory fail: %w", err)
}
chequebookService, err = InitChequebookService(
p2pCtx,
logger,
stateStore,
signer,
chainID,
swapBackend,
overlayEthAddress,
transactionService,
chequebookFactory,
o.SwapInitialDeposit,
o.DeployGasPrice,
)
if err != nil {
return nil, err
}
chequeStore, cashoutService = initChequeStoreCashout(
stateStore,
swapBackend,
chequebookFactory,
chainID,
overlayEthAddress,
transactionService,
)
}
lightNodes := lightnode.NewContainer(swarmAddress)
txHash, err := getTxHash(stateStore, logger, o)
if err != nil {
return nil, fmt.Errorf("invalid transaction hash: %w", err)
}
senderMatcher := transaction.NewMatcher(swapBackend, types.NewEIP155Signer(big.NewInt(chainID)))
p2ps, err := libp2p.New(p2pCtx, signer, networkID, swarmAddress, addr, addressbook, stateStore, lightNodes, senderMatcher, logger, tracer, libp2p.Options{
PrivateKey: libp2pPrivateKey,
NATAddr: o.NATAddr,
EnableWS: o.EnableWS,
EnableQUIC: o.EnableQUIC,
Standalone: o.Standalone,
WelcomeMessage: o.WelcomeMessage,
FullNode: o.FullNodeMode,
Transaction: txHash,
})
if err != nil {
return nil, fmt.Errorf("p2p service: %w", err)
}
b.p2pService = p2ps
b.p2pHalter = p2ps
// localstore depends on batchstore
var path string
if o.DataDir != "" {
logger.Infof("using datadir in: '%s'", o.DataDir)
path = filepath.Join(o.DataDir, "localstore")
}
lo := &localstore.Options{
Capacity: o.CacheCapacity,
OpenFilesLimit: o.DBOpenFilesLimit,
BlockCacheCapacity: o.DBBlockCacheCapacity,
WriteBufferSize: o.DBWriteBufferSize,
DisableSeeksCompaction: o.DBDisableSeeksCompaction,
}
storer, err := localstore.New(path, swarmAddress.Bytes(), stateStore, lo, logger)
if err != nil {
return nil, fmt.Errorf("localstore: %w", err)
}
b.localstoreCloser = storer
batchStore, err := batchstore.New(stateStore, storer.UnreserveBatch)
if err != nil {
return nil, fmt.Errorf("batchstore: %w", err)
}
validStamp := postage.ValidStamp(batchStore)
post, err := postage.NewService(stateStore, batchStore, chainID)
if err != nil {
return nil, fmt.Errorf("postage service load: %w", err)
}
b.postageServiceCloser = post
var (
postageContractService postagecontract.Interface
batchSvc postage.EventUpdater
eventListener postage.Listener
)
var postageSyncStart uint64 = 0
if !o.Standalone {
postageContractAddress, startBlock, found := listener.DiscoverAddresses(chainID)
if o.PostageContractAddress != "" {
if !common.IsHexAddress(o.PostageContractAddress) {
return nil, errors.New("malformed postage stamp address")
}
postageContractAddress = common.HexToAddress(o.PostageContractAddress)
} else if !found {
return nil, errors.New("no known postage stamp addresses for this network")
}
if found {
postageSyncStart = startBlock
}
eventListener = listener.New(logger, swapBackend, postageContractAddress, o.BlockTime, &pidKiller{node: b})
b.listenerCloser = eventListener
batchSvc = batchservice.New(stateStore, batchStore, logger, eventListener, overlayEthAddress.Bytes(), post)
erc20Address, err := postagecontract.LookupERC20Address(p2pCtx, transactionService, postageContractAddress)
if err != nil {
return nil, err
}
postageContractService = postagecontract.New(
overlayEthAddress,
postageContractAddress,
erc20Address,
transactionService,
post,
)
}
if !o.Standalone {
if natManager := p2ps.NATManager(); natManager != nil {
// wait for nat manager to init
logger.Debug("initializing NAT manager")
select {
case <-natManager.Ready():
// this is magic sleep to give NAT time to sync the mappings
// this is a hack, kind of alchemy and should be improved
time.Sleep(3 * time.Second)
logger.Debug("NAT manager initialized")
case <-time.After(10 * time.Second):
logger.Warning("NAT manager init timeout")
}
}
}
// Construct protocols.
pingPong := pingpong.New(p2ps, logger, tracer)
if err = p2ps.AddProtocol(pingPong.Protocol()); err != nil {
return nil, fmt.Errorf("pingpong service: %w", err)
}
hive := hive.New(p2ps, addressbook, networkID, logger)
if err = p2ps.AddProtocol(hive.Protocol()); err != nil {
return nil, fmt.Errorf("hive service: %w", err)
}
var bootnodes []ma.Multiaddr
if o.Standalone {
logger.Info("Starting node in standalone mode, no p2p connections will be made or accepted")
} else {
for _, a := range o.Bootnodes {
addr, err := ma.NewMultiaddr(a)
if err != nil {
logger.Debugf("multiaddress fail %s: %v", a, err)
logger.Warningf("invalid bootnode address %s", a)
continue
}
bootnodes = append(bootnodes, addr)
}
}
var swapService *swap.Service
metricsDB, err := shed.NewDBWrap(stateStore.DB())
if err != nil {
return nil, fmt.Errorf("unable to create metrics storage for kademlia: %w", err)
}
kad := kademlia.New(swarmAddress, addressbook, hive, p2ps, metricsDB, logger, kademlia.Options{Bootnodes: bootnodes, StandaloneMode: o.Standalone, BootnodeMode: o.BootnodeMode})
b.topologyCloser = kad
b.topologyHalter = kad
hive.SetAddPeersHandler(kad.AddPeers)
p2ps.SetPickyNotifier(kad)
batchStore.SetRadiusSetter(kad)
if batchSvc != nil {
syncedChan, err := batchSvc.Start(postageSyncStart)
if err != nil {
return nil, fmt.Errorf("unable to start batch service: %w", err)
}
// wait for the postage contract listener to sync
logger.Info("waiting to sync postage contract data, this may take a while... more info available in Debug loglevel")
// arguably this is not a very nice solution since we dont support
// interrupts at this stage of the application lifecycle. some changes
// would be needed on the cmd level to support context cancellation at
// this stage
<-syncedChan
}
minThreshold := big.NewInt(2 * refreshRate)
paymentThreshold, ok := new(big.Int).SetString(o.PaymentThreshold, 10)
if !ok {
return nil, fmt.Errorf("invalid payment threshold: %s", paymentThreshold)
}
pricer := pricer.NewFixedPricer(swarmAddress, basePrice)
if paymentThreshold.Cmp(minThreshold) < 0 {
return nil, fmt.Errorf("payment threshold below minimum generally accepted value, need at least %s", minThreshold)
}
pricing := pricing.New(p2ps, logger, paymentThreshold, minThreshold)
if err = p2ps.AddProtocol(pricing.Protocol()); err != nil {
return nil, fmt.Errorf("pricing service: %w", err)
}
addrs, err := p2ps.Addresses()
if err != nil {
return nil, fmt.Errorf("get server addresses: %w", err)
}
for _, addr := range addrs {
logger.Debugf("p2p address: %s", addr)
}
paymentTolerance, ok := new(big.Int).SetString(o.PaymentTolerance, 10)
if !ok {
return nil, fmt.Errorf("invalid payment tolerance: %s", paymentTolerance)
}
paymentEarly, ok := new(big.Int).SetString(o.PaymentEarly, 10)
if !ok {
return nil, fmt.Errorf("invalid payment early: %s", paymentEarly)
}
acc, err := accounting.NewAccounting(
paymentThreshold,
paymentTolerance,
paymentEarly,
logger,
stateStore,
pricing,
big.NewInt(refreshRate),
p2ps,
)
if err != nil {
return nil, fmt.Errorf("accounting: %w", err)
}
b.accountingCloser = acc
pseudosettleService := pseudosettle.New(p2ps, logger, stateStore, acc, big.NewInt(refreshRate), p2ps)
if err = p2ps.AddProtocol(pseudosettleService.Protocol()); err != nil {
return nil, fmt.Errorf("pseudosettle service: %w", err)
}
acc.SetRefreshFunc(pseudosettleService.Pay)
if o.SwapEnable {
var priceOracle priceoracle.Service
swapService, priceOracle, err = InitSwap(
p2ps,
logger,
stateStore,
networkID,
overlayEthAddress,
chequebookService,
chequeStore,
cashoutService,
acc,
o.PriceOracleAddress,
chainID,
transactionService,
)
if err != nil {
return nil, err
}
b.priceOracleCloser = priceOracle
acc.SetPayFunc(swapService.Pay)
}
pricing.SetPaymentThresholdObserver(acc)
retrieve := retrieval.New(swarmAddress, storer, p2ps, kad, logger, acc, pricer, tracer)
tagService := tags.NewTags(stateStore, logger)
b.tagsCloser = tagService
pssService := pss.New(pssPrivateKey, logger)
b.pssCloser = pssService
var ns storage.Storer
if o.GlobalPinningEnabled {
// create recovery callback for content repair
recoverFunc := recovery.NewCallback(pssService)
ns = netstore.New(storer, validStamp, recoverFunc, retrieve, logger)
} else {
ns = netstore.New(storer, validStamp, nil, retrieve, logger)
}
traversalService := traversal.New(ns)
pinningService := pinning.NewService(storer, stateStore, traversalService)
pushSyncProtocol := pushsync.New(swarmAddress, p2ps, storer, kad, tagService, o.FullNodeMode, pssService.TryUnwrap, validStamp, logger, acc, pricer, signer, tracer, warmupTime)
// set the pushSyncer in the PSS
pssService.SetPushSyncer(pushSyncProtocol)
if o.GlobalPinningEnabled {
// register function for chunk repair upon receiving a trojan message
chunkRepairHandler := recovery.NewRepairHandler(ns, logger, pushSyncProtocol)
b.recoveryHandleCleanup = pssService.Register(recovery.Topic, chunkRepairHandler)
}
pusherService := pusher.New(networkID, storer, kad, pushSyncProtocol, tagService, logger, tracer, warmupTime)
b.pusherCloser = pusherService
pullStorage := pullstorage.New(storer)
pullSyncProtocol := pullsync.New(p2ps, pullStorage, pssService.TryUnwrap, validStamp, logger)
b.pullSyncCloser = pullSyncProtocol
var pullerService *puller.Puller
if o.FullNodeMode {
pullerService := puller.New(stateStore, kad, pullSyncProtocol, logger, puller.Options{}, warmupTime)
b.pullerCloser = pullerService
}
retrieveProtocolSpec := retrieve.Protocol()
pushSyncProtocolSpec := pushSyncProtocol.Protocol()
pullSyncProtocolSpec := pullSyncProtocol.Protocol()
if o.FullNodeMode {
logger.Info("starting in full mode")
} else {
logger.Info("starting in light mode")
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, retrieveProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pushSyncProtocolSpec)
p2p.WithBlocklistStreams(p2p.DefaultBlocklistTime, pullSyncProtocolSpec)
}
if err = p2ps.AddProtocol(retrieveProtocolSpec); err != nil {
return nil, fmt.Errorf("retrieval service: %w", err)
}
if err = p2ps.AddProtocol(pushSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pushsync service: %w", err)
}
if err = p2ps.AddProtocol(pullSyncProtocolSpec); err != nil {
return nil, fmt.Errorf("pullsync protocol: %w", err)
}
multiResolver := multiresolver.NewMultiResolver(
multiresolver.WithConnectionConfigs(o.ResolverConnectionCfgs),
multiresolver.WithLogger(o.Logger),
)
b.resolverCloser = multiResolver
var apiService api.Service
if o.APIAddr != "" {
// API server
feedFactory := factory.New(ns)
steward := steward.New(storer, traversalService, pushSyncProtocol)
apiService = api.New(tagService, ns, multiResolver, pssService, traversalService, pinningService, feedFactory, post, postageContractService, steward, signer, logger, tracer, api.Options{
CORSAllowedOrigins: o.CORSAllowedOrigins,
GatewayMode: o.GatewayMode,
WsPingPeriod: 60 * time.Second,
})
apiListener, err := net.Listen("tcp", o.APIAddr)
if err != nil {
return nil, fmt.Errorf("api listener: %w", err)
}
apiServer := &http.Server{
IdleTimeout: 30 * time.Second,
ReadHeaderTimeout: 3 * time.Second,
Handler: apiService,
ErrorLog: log.New(b.errorLogWriter, "", 0),
}
go func() {
logger.Infof("api address: %s", apiListener.Addr())
if err := apiServer.Serve(apiListener); err != nil && err != http.ErrServerClosed {
logger.Debugf("api server: %v", err)
logger.Error("unable to serve api")
}
}()
b.apiServer = apiServer
b.apiCloser = apiService
}
if debugAPIService != nil {
// register metrics from components
debugAPIService.MustRegisterMetrics(p2ps.Metrics()...)
debugAPIService.MustRegisterMetrics(pingPong.Metrics()...)
debugAPIService.MustRegisterMetrics(acc.Metrics()...)
debugAPIService.MustRegisterMetrics(storer.Metrics()...)
debugAPIService.MustRegisterMetrics(kad.Metrics()...)
if pullerService != nil {
debugAPIService.MustRegisterMetrics(pullerService.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pushSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pusherService.Metrics()...)
debugAPIService.MustRegisterMetrics(pullSyncProtocol.Metrics()...)
debugAPIService.MustRegisterMetrics(pullStorage.Metrics()...)
debugAPIService.MustRegisterMetrics(retrieve.Metrics()...)
debugAPIService.MustRegisterMetrics(lightNodes.Metrics()...)
if bs, ok := batchStore.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(bs.Metrics()...)
}
if eventListener != nil {
if ls, ok := eventListener.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(ls.Metrics()...)
}
}
if pssServiceMetrics, ok := pssService.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(pssServiceMetrics.Metrics()...)
}
if apiService != nil {
debugAPIService.MustRegisterMetrics(apiService.Metrics()...)
}
if l, ok := logger.(metrics.Collector); ok {
debugAPIService.MustRegisterMetrics(l.Metrics()...)
}
debugAPIService.MustRegisterMetrics(pseudosettleService.Metrics()...)
if swapService != nil {
debugAPIService.MustRegisterMetrics(swapService.Metrics()...)
}
// inject dependencies and configure full debug api http path routes
debugAPIService.Configure(p2ps, pingPong, kad, lightNodes, storer, tagService, acc, pseudosettleService, o.SwapEnable, swapService, chequebookService, batchStore, transactionService)
}
if err := kad.Start(p2pCtx); err != nil {
return nil, err
}
p2ps.Ready()
return b, nil
}
func (b *Bee) Shutdown(ctx context.Context) error {
var mErr error
// if a shutdown is already in process, return here
b.shutdownMutex.Lock()
if b.shutdownInProgress {
b.shutdownMutex.Unlock()
return ErrShutdownInProgress
}
b.shutdownInProgress = true
b.shutdownMutex.Unlock()
// halt kademlia while shutting down other
// components.
b.topologyHalter.Halt()
// halt p2p layer from accepting new connections
// while shutting down other components
b.p2pHalter.Halt()
// tryClose is a convenient closure which decrease
// repetitive io.Closer tryClose procedure.
tryClose := func(c io.Closer, errMsg string) {
if c == nil {
return
}
if err := c.Close(); err != nil {
mErr = multierror.Append(mErr, fmt.Errorf("%s: %w", errMsg, err))
}
}
tryClose(b.apiCloser, "api")
var eg errgroup.Group
if b.apiServer != nil {
eg.Go(func() error {
if err := b.apiServer.Shutdown(ctx); err != nil {
return fmt.Errorf("api server: %w", err)
}
return nil
})
}
if b.debugAPIServer != nil {
eg.Go(func() error {
if err := b.debugAPIServer.Shutdown(ctx); err != nil {
return fmt.Errorf("debug api server: %w", err)
}
return nil
})
}
if err := eg.Wait(); err != nil {
mErr = multierror.Append(mErr, err)
}
if b.recoveryHandleCleanup != nil {
b.recoveryHandleCleanup()
}
var wg sync.WaitGroup
wg.Add(5)
go func() {
defer wg.Done()
tryClose(b.pssCloser, "pss")
}()
go func() {
defer wg.Done()
tryClose(b.pusherCloser, "pusher")
}()
go func() {
defer wg.Done()
tryClose(b.pullerCloser, "puller")
}()
go func() {
defer wg.Done()
tryClose(b.accountingCloser, "accounting")
}()
b.p2pCancel()
go func() {
defer wg.Done()
tryClose(b.pullSyncCloser, "pull sync")
}()
wg.Wait()
tryClose(b.p2pService, "p2p server")
tryClose(b.priceOracleCloser, "price oracle service")
wg.Add(3)
go func() {
defer wg.Done()
tryClose(b.transactionMonitorCloser, "transaction monitor")
tryClose(b.transactionCloser, "transaction")
}()
go func() {
defer wg.Done()
tryClose(b.listenerCloser, "listener")
}()
go func() {
defer wg.Done()
tryClose(b.postageServiceCloser, "postage service")
}()
wg.Wait()
if c := b.ethClientCloser; c != nil {
c()
}
tryClose(b.tracerCloser, "tracer")
tryClose(b.tagsCloser, "tag persistence")
tryClose(b.topologyCloser, "topology driver")
tryClose(b.stateStoreCloser, "statestore")
tryClose(b.localstoreCloser, "localstore")
tryClose(b.errorLogWriter, "error log writer")
tryClose(b.resolverCloser, "resolver service")
return mErr
}
func getTxHash(stateStore storage.StateStorer, logger logging.Logger, o Options) ([]byte, error) {
if o.Standalone {
return nil, nil // in standalone mode tx hash is not used
}
if o.Transaction != "" {
txHashTrimmed := strings.TrimPrefix(o.Transaction, "0x")
if len(txHashTrimmed) != 64 {
return nil, errors.New("invalid length")
}
txHash, err := hex.DecodeString(txHashTrimmed)
if err != nil {
return nil, err
}
logger.Infof("using the provided transaction hash %x", txHash)
return txHash, nil
}
var txHash common.Hash
key := chequebook.ChequebookDeploymentKey
if err := stateStore.Get(key, &txHash); err != nil {
if errors.Is(err, storage.ErrNotFound) {
return nil, errors.New("chequebook deployment transaction hash not found. Please specify the transaction hash manually.")
}
return nil, err
}
logger.Infof("using the chequebook transaction hash %x", txHash)
return txHash.Bytes(), nil
}
// pidKiller is used to issue a forced shut down of the node from sub modules. The issue with using the
// node's Shutdown method is that it only shuts down the node and does not exit the start process
// which is waiting on the os.Signals. This is not desirable, but currently bee node cannot handle
// rate-limiting blockchain API calls properly. We will shut down the node in this case to allow the
// user to rectify the API issues (by adjusting limits or using a different one). There is no platform
// agnostic way to trigger os.Signals in go unfortunately. Which is why we will use the process.Kill
// approach which works on windows as well.
type pidKiller struct {
node *Bee
}
var ErrShutdownInProgress error = errors.New("shutdown in progress")
func (p *pidKiller) | (ctx context.Context) error {
err := p.node.Shutdown(ctx)
if err != nil {
return err
}
ps, err := os.FindProcess(syscall.Getpid())
if err != nil {
return err
}
return ps.Kill()
}
| Shutdown | identifier_name |
graphics2.js | //VAIABLES GLOBALES
//OBTIENE EL OBJETO SELECCIONADO CON CLICK
//var ONFOCUS_OBJECT;
//OBTIENE LA LUZ SELECCIONADA DEL MENU
var ONFOCUS_LIGHT;
var KEYDOWN;
var FUNCTION;
var selected_object = null;
var aux;
var picking_method = 'transform';
// instantiate raycaster
var raycaster = new THREE.Raycaster();
var mouse = new THREE.Vector2();
// to orbit with mouse
var mouseOrbit;
//VARIABLES PARA LA ESCENA, CAMARA Y RENDERIZADO DE LA VENTANA
var renderer
var camera
var scene
var backgroundScene = "#002233"
//VARIABLES PARA EEFECTO DEL MOUSE
var presionado=false;
var x_anterior;
var y_anterior;
var escala = 1;
// var rotacion;
//OBJETO COMPLEJO
var cuboRubik = new THREE.Group();
var pivotPoint = new THREE.Object3D();
var cubePosX;
var cubePosY;
var cubePosZ;
var cubeMesh;
//CREA TRES VERTICES CON RESPECTO A UN CUBO PEQUEÑO EN TODAS LAS FIGURAS.
//TRASLACIONES PERMITIDAS CON DRAG
//VERTICES -> EN LOS TRES EJES (X, Y, Z)
//CUBO INTERIOR -> EN CUALQUIER DIRECCION
//CARAS DEL CUBO -> EN LOS PLANOS (X-Y, X-Z, Y-Z)
var splineHelperObjects = [], splineOutline;
var splinePointsLength = 4;
var transformControl;
var ARC_SEGMENTS = 200;
var splineMesh;
var splines = {};
var positions = [];
//CONTENEDOR DE FIGURAS
var figuras= new Object();
objects=[];
temp=[];
var normalizadores={};
figuras.cantidad=0;
//TABLERO
var tablero = new THREE.Group();
//SIZE OF THE TILES
var tile_width=4;
var tile_color="#ffffff";
//CUBO
var cube_width = tile_width*0.75;
//LUCES
var distanceFromCenter = 1.75*tile_width;
var addLights = function( distanceFromCenter ){
ambient_light = new THREE.AmbientLight("#ffffff",0.2 ); // soft white light
//luces.push(ambient_light);
scene.add(ambient_light);
white_up = new THREE.SpotLight("#ffffff",1);
//white_up_light.position.set(0 - tile_width, 20, 0 - tile_width);
white_up.position.set(0, 200, 0);
//luces.push(white_up_light);
scene.add(white_up);
white_down = new THREE.SpotLight("#ffffff",0.2);
//white_down_light.position.set(0 - tile_width, -20, 0 - tile_width);
white_down.position.set(0 - tile_width, -100, 0 - tile_width);
scene.add(white_down);
red = new THREE.SpotLight("#ff0000",6);
red.position.set(distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(red);
green = new THREE.SpotLight("#00ff00",6);
green.position.set( -distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(green);
blue = new THREE.SpotLight("#0000ff",6);
blue.position.set( 0 - tile_width, 20, -distanceFromCenter - tile_width);
scene.add(blue);
}
var addTablero = function(){
var tile_geometry = new THREE.BoxGeometry(tile_width,tile_width/10,tile_width);
var texture = new THREE.TextureLoader().load( "resources/madera.jpg");
//new THREE.MeshLambertMaterial({ map: THREE.ImageUtils.loadTexture("crate.gif") });
//var black_material = new THREE.MeshPhongMaterial({ color:"#000000", side: THREE.DoubleSide, flatShading: true }); //fixed color
var black_material = new THREE.MeshPhongMaterial({ map: THREE.ImageUtils.loadTexture("resources/madera.jpg") }); //fixed color
var color_material = new THREE.MeshPhongMaterial({ color:tile_color, side: THREE.DoubleSide, flatShading: true }); //to change with gui
var black_color = -1;
//Chess Table has 64 tiles, 8 rows, 8 columns
for(var i = 0; i < 8; i++){
if (i % 2 == 0) { black_color = -1 }
else { black_color = 1 };
for(var j = 0; j < 8; j++){
var material;
if(black_color == 1){ material = black_material; } // iterate black and white
else{ material = color_material; }
var tile = new THREE.Mesh( tile_geometry, material );
tile.position.x = (-4 + j) * tile_width;
tile.position.z = (-4 + i) * tile_width;
tablero.add(tile);
black_color *= -1; } }
scene.add(tablero);
console.log(tablero.position);
}
var createRubikCube = function(){
for(var i= 0;i<3;i++){
for(var j=0;j<3;j++){
for(var k=0;k<3;k++){
let geometryC = new THREE.BoxGeometry(cube_width, cube_width, cube_width );
if(i==0){
//pintar abajo
geometryC.faces[ 6 ].color.setHex(0x4b3621);
geometryC.faces[ 7 ].color.setHex(0x4b3621);
}else if(i==2){
//pintar arriba
geometryC.faces[ 4 ].color.setHex(0xffffff);
geometryC.faces[ 5 ].color.setHex(0xffffff);
}
if(j==0){
//pintar atras
geometryC.faces[ 10 ].color.setHex(0xff0000);
geometryC.faces[ 11 ].color.setHex(0xff0000);
}else if(j==2){
//pintar adelante
geometryC.faces[ 8 ].color.setHex(0xff5500);
geometryC.faces[ 9 ].color.setHex(0xff5500);
}
if(k==0){
//pintar izq
geometryC.faces[ 2 ].color.setHex(0x00ff55);
geometryC.faces[ 3 ].color.setHex(0x00ff55);
}else if(k==2){
//pintar der
geometryC.faces[ 0 ].color.setHex(0xffff00);
geometryC.faces[ 1 ].color.setHex(0xffff00);
}
let material = new THREE.MeshLambertMaterial( { color: "#ffffff",side: THREE.DoubleSide, vertexColors: THREE.FaceColors, flatShading: true} );
//let material = new THREE.MeshLambertMaterial( { color: 0xffffff, vertexColors: THREE.FaceColors} );
material.light = true;
let cube = new THREE.Mesh(geometryC, material);
cube.position.x = ((-1.65 + k) * cube_width)+(((k+1)%2)*(-1+k)*(cube_width/50));
cube.position.z = ((-1.65 + j) * cube_width)+(((j+1)%2)*(-1+j)*(cube_width/50));
cube.position.y = ((2 + i) * cube_width)+(((i+1)%2)*(-1+i)*(cube_width/50));
cuboRubik.add(cube);
}
}
}
}
var addRubikCube = function(){
var geometry = new THREE.BoxGeometry( 4, 4, 4 );
var material = new THREE.MeshBasicMaterial( {color: 0x000000} );
cubeMesh = new THREE.Mesh( geometry, material );
cubeMesh.position.y = 3;
cuboRubik.position.y =-7.5;
cuboRubik.position.x =1;
cuboRubik.position.z = 1;
scene.remove(pivotPoint);
cuboRubik.add(pivotPoint);
cubeMesh.add(cuboRubik);
cubeMesh.add(pivotPoint);
scene.add(cubeMesh);
//scene.add(cuboRubik);
}
var rmRubikCube = function(){
//cubeMesh.remove(pivotPoint);
scene.add(pivotPoint);
cuboRubik.remove(pivotPoint);
scene.remove(cubeMesh);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive
}
//FUNCION CREAR FIGURA
figuras.agregar=function(name,geometria,COLOR){
this.cantidad+=1;
let nombre = this.cantidad+"."+name
//SE CREA EL MATERIAL DE LA FIGURA
/*if (name == "ESFERA" || name == "CILINDRO" || name== "TOROIDE") {
let maxAnisotropy = renderer.capabilities.getMaxAnisotropy();
let cubeTexture = new THREE.CubeTextureLoader()
.load([
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0']);
cubeTexture.anisotropy = maxAnisotropy;
cubeTexture.wrapS = cubeTexture.wrapT = THREE.RepeatWrapping;
//cubeTexture.repeat.set(2,2);
var material = new THREE.MeshPhongMaterial( { color: COLOR, envMap: cubeTexture, side: THREE.DoubleSide, overdraw: 0.5 } );
}
*/
var material = new THREE.MeshPhongMaterial( { color: COLOR, overdraw: 0.5 } );
this[nombre]=new Object();
this[nombre].id=Math.trunc((this.cantidad-1)/3);
//GENERA LA FIGURA
this[nombre].esqueleto=new THREE.Mesh( geometria, material);
normalizadores[this[nombre].esqueleto.uuid]= [];
//this[nombre].esqueleto.position.x = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniX = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniZ = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let normIni = Math.sqrt((Math.pow(posIniX,2)+(Math.pow(posIniZ,2))));
let angleIni = getRandomInt(1,361);
//altura
this[nombre].esqueleto.position.y = getRandomInt(2*cube_width,4*cube_width);
//posicion X-Z
this[nombre].esqueleto.position.x = (normIni)*(Math.sin(angleIni*Math.PI/180));
this[nombre].esqueleto.position.z = (normIni)*(Math.cos(angleIni*Math.PI/180));
//rotacion
this[nombre].esqueleto.rotation.x = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.y = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.z = Math.random() * 2 * Math.PI;
//EMITE SOMBRA
this[nombre].esqueleto.castShadow = true;
//GUARDA EL INVERSO DE LA MATRIZ DE DEFORMACIÓN
this[nombre].normalizador=0;
objects.push(this[nombre].esqueleto);
temp.push(this[nombre].esqueleto);
//group.add(this[nombre].esqueleto);
pivotPoint.add(this[nombre].esqueleto);
splineHelperObjects.push(self.figuras[nombre].esqueleto)
return nombre;
};
//VARIEBLE PARA CONTROLES DE LA INTERFAZ
var speed=0.025;
var speed_around=0.025;
var menu = {
background: backgroundScene,
baldosas: tile_color,
Rubik: true,
rotar_alrededor: false, //inicia con escena sin girar las figuras
speed_around: this.speed_around,
rotar: false,
speed:this.speed,
white_up: true, white_down: false,
red: false, green: false , blue: false,
figuras: "ESFERA", //por defecto opcion de agregar un cubo
addGraphic: function(){ agregar();}, //llamada a funcion agregar() una figura
color: '#ff00ff'
}
var shape_params = {
color: "#00ffff",
picking: "translate",
removeObj: function(){ remove(); }
};
//MENU DE CONTROLES
gui = new dat.GUI({ height : 3 * 8 - 1 });
gui_controllers = [];
function init() {
//CREA LA ESCENA THREE.JS
scene = new THREE.Scene();
//var backgroundScene = "#002233"
scene.background = new THREE.Color(backgroundScene);
//window.addEventListener( 'resize', onWindowResize, false );
//ELIJE TIPO DE CAMARA DE THREE.JS
camera = new THREE.PerspectiveCamera(75,window.innerWidth/window.innerHeight,0.1,500);
camera.position.set(30,10,0); // Define posición de la cámara en XYZ
camera.lookAt( scene.position );
scene.add(pivotPoint);
//REPRESENTADOR WebGL
renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true});
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight*escala );
//PERMITE QUE SE VEAN LAS SOMBRAS
renderer.shadowMap.enabled = true;
renderer.setSize(window.innerWidth,window.innerHeight);
document.body.appendChild(renderer.domElement);
//mouseOrbit = new THREE.OrbitControls(camera, renderer.domElement);
mouseOrbit = new THREE.OrbitControls( camera, renderer.domElement );
gui.addColor(menu, 'background').onChange( changeBackgroundColor ).listen();
gui.addColor(menu, 'baldosas').onChange( changeColorTiles ).listen();
//ROTA TODO EL GRUPO DE FIGURAS
//gui.add(menu, 'Rubik').onChange(updateFiguras);
//ROTA TODO EL GRUPO DE FIGURAS
var animRotar = gui.addFolder("Rotacion")
gui_controllers[0] = animRotar
.add(menu, 'rotar_alrededor')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed_around',-1,1);
gui_controllers[0] = animRotar
.add(menu, 'rotar')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed',-1,1);
//LUCES
var scenectl = gui.addFolder("Luces");
scenectl.add(menu, 'white_up');
scenectl.add(menu, 'white_down');
scenectl.add(menu, 'red');
scenectl.add(menu, 'green');
scenectl.add(menu, 'blue');
//NUEVA FIGURA -> SECCION DEL MENU
var figura = gui.addFolder('Nueva Figura');
figura.add(menu, 'figuras', ["ESFERA", "TOROIDE", "PIRAMIDE", "CILINDRO"]); //lista de figuras posibles para crear
figura.addColor(menu,'color');
figura.add(menu, 'addGraphic'); //llamada a funcion agregar figura
var shapectl = gui.addFolder("Figura Seleccionada");
// adding folder to gui control
shapectl.addColor(shape_params, 'color').onChange(ChangeColor).listen();
shapectl.add(shape_params, 'picking', [ "translate", "rotate", "scale"] );
shapectl.add(shape_params, 'removeObj');
// adding events to window
window.addEventListener( 'mousemove', onMouseMove, false );
document.addEventListener( 'mousedown', onDocumentMouseDown );
// ----- TRANSLATE CONTROL ---------------------------
control = new THREE.TransformControls( camera, renderer.domElement );
control.addEventListener( 'change', render );
control.addEventListener( 'dragging-changed', function ( event ) {
mouseOrbit.enabled = ! event.value;
} );
scene.add( control );
}
//ANIMAR
//AGREGA UNA FIGURA EN LA ESCENA
function agregar() {
let color = menu.color;
let fig = menu.figuras;
if(fig == "ESFERA"){
geometria = new THREE.SphereGeometry(1.5,32,32);
}
if(fig == "TOROIDE"){
geometria =new THREE.TorusGeometry(1.5,0.5,32,100 );
}
if(fig == "PIRAMIDE"){
geometria = new THREE.CylinderBufferGeometry(0,2,3,4);
}
if(fig == "CILINDRO"){
geometria = new THREE.CylinderGeometry(1,1,3,8);
}
nombre =figuras.agregar(fig,geometria,color);
renderer.render( scene, camera );
}
var updateLights = function(){
// change in GUI for lights
if (menu.white_up) { white_up.intensity = 1; }
else { white_up.intensity = 0; };
if (menu.white_down) { white_down.intensity = 1; }
else { white_down.intensity = 0; };
if (menu.red) { red.intensity = 6; }
else { red.intensity = 0; };
if (menu.green) { green.intensity = 6; }
else { green.intensity = 0; };
if (menu.blue) { blue.intensity = 6; }
else { blue.intensity = 0; };
}
var animateFiguras = function(){
if(menu.rotar_alrededor){
if(menu.Rubik){
cubeMesh.add(pivotPoint);
pivotPoint.rotation.y += menu.speed_around; }else{
cubeMesh.remove(pivotPoint);
}
}
else{
pivotPoint.rotation=false;
}
if(menu.rotar){
rotateFiguras();
}
}
var updateFiguras = function(){
if (menu.Rubik){
addRubikCube();
}else{
menu['rotar_alrededor'] = false;
rmRubikCube();
}
}
var rotateFiguras = function(){
// cubeMesh.remove(cuboRubik);
// scene.add(cuboRubik);
for(let i = 0; i< temp.length; i++){
temp[i].rotation.y += menu.speed;
}
cubeMesh.rotation.y += menu.speed;
if (menu.Rubik){
pivotPoint.rotation.y -= menu.speed_around;
}
// scene.remove(cuboRubik);
// cubeMesh.add(cuboRubik);
// let temp = pivotPoint.children;
}
// callback for event to change color
function ChangeColor(){
selected_object.material.color.setHex(shape_params.color);
};
// update and render loop
var isHex = function (posible_hex) {
let re = /[0-9A-Fa-f]{6}/g;
if (re.test(posible_hex)) { return true; }
else { return false; } }
// callback for event to change color
function changeBackgroundColor(){
let background_hex = new THREE.Color(menu.background).getHexString();
if ( isHex(background_hex) && scene.background != "#" + background_hex ){
scene.background = new THREE.Color("#"+background_hex); }
// backgroundScene = menu.background;
// scene.background.color.setHex(backgroundScene);
};
function changeColorTiles(){
let color_hex = new THREE.Color(menu.baldosas).getHexString();
if ( isHex(color_hex) && tile_color != "#" + color_hex ){
tile_color = "#" + color_hex;
let table = scene.getObjectByName(tablero);
scene.remove(table);
addTablero(); }
// for(let t = 0;t<color_tiles.length; t++ ){
// color_tiles[t].material.color.setHex(menu.baldosas);
// }
};
function remove(){
if(selected_object.id != cubeMesh.id)
{
console.log("remove");
control.detach(selected_object);
index = pivotPoint.children.indexOf(selected_object);
pivotPoint.children.splice(index,1);
//scene.remove(selected_object);
renderer.render( scene, camera );
}
}
// get mouse coordinates all the time
function onMo | ent ) {
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
}
// callback event for mouse down
function onDocumentMouseDown( event ) {
// event.preventDefault();
event.stopPropagation();
raycaster.setFromCamera( mouse, camera );
// calculate objects intersecting the picking ray
//var intersects = raycaster.intersectObjects( scene.children);
objects = pivotPoint.children.concat([cubeMesh]);
console.log(objects);
var intersects = raycaster.intersectObjects( objects );
// intersects[0].object.material.color.set( 0xff0000 );
//validate if has objects intersected
if (intersects.length>0){
// pick first intersected object
selected_object = intersects[0].object;
// change gui color
shape_params.color = selected_object.material.color.getHex();
control.setMode(shape_params.picking);
control.attach( selected_object );
}
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
render();
}
window.addEventListener( 'resize', onWindowResize, false );
init();
addTablero();
addLights(distanceFromCenter);
createRubikCube();
//scene.add(cuboRubik);
addRubikCube();
function render(){
renderer.render(scene,camera);
}
var showAnimationLoop = function(){
requestAnimationFrame(showAnimationLoop);
updateLights();
animateFiguras();
render(); }
showAnimationLoop(); | useMove( ev | identifier_name |
graphics2.js | //VAIABLES GLOBALES
//OBTIENE EL OBJETO SELECCIONADO CON CLICK
//var ONFOCUS_OBJECT;
//OBTIENE LA LUZ SELECCIONADA DEL MENU
var ONFOCUS_LIGHT;
var KEYDOWN;
var FUNCTION;
var selected_object = null;
var aux;
var picking_method = 'transform';
// instantiate raycaster
var raycaster = new THREE.Raycaster();
var mouse = new THREE.Vector2();
// to orbit with mouse
var mouseOrbit;
//VARIABLES PARA LA ESCENA, CAMARA Y RENDERIZADO DE LA VENTANA
var renderer
var camera
var scene
var backgroundScene = "#002233"
//VARIABLES PARA EEFECTO DEL MOUSE
var presionado=false;
var x_anterior;
var y_anterior;
var escala = 1;
// var rotacion;
//OBJETO COMPLEJO
var cuboRubik = new THREE.Group();
var pivotPoint = new THREE.Object3D();
var cubePosX;
var cubePosY;
var cubePosZ;
var cubeMesh;
//CREA TRES VERTICES CON RESPECTO A UN CUBO PEQUEÑO EN TODAS LAS FIGURAS.
//TRASLACIONES PERMITIDAS CON DRAG
//VERTICES -> EN LOS TRES EJES (X, Y, Z)
//CUBO INTERIOR -> EN CUALQUIER DIRECCION
//CARAS DEL CUBO -> EN LOS PLANOS (X-Y, X-Z, Y-Z)
var splineHelperObjects = [], splineOutline;
var splinePointsLength = 4;
var transformControl;
var ARC_SEGMENTS = 200;
var splineMesh;
var splines = {};
var positions = [];
//CONTENEDOR DE FIGURAS
var figuras= new Object();
objects=[];
temp=[];
var normalizadores={};
figuras.cantidad=0;
//TABLERO
var tablero = new THREE.Group();
//SIZE OF THE TILES
var tile_width=4;
var tile_color="#ffffff";
//CUBO
var cube_width = tile_width*0.75;
//LUCES
var distanceFromCenter = 1.75*tile_width;
var addLights = function( distanceFromCenter ){
ambient_light = new THREE.AmbientLight("#ffffff",0.2 ); // soft white light
//luces.push(ambient_light);
scene.add(ambient_light);
white_up = new THREE.SpotLight("#ffffff",1);
//white_up_light.position.set(0 - tile_width, 20, 0 - tile_width);
white_up.position.set(0, 200, 0);
//luces.push(white_up_light);
scene.add(white_up);
white_down = new THREE.SpotLight("#ffffff",0.2);
//white_down_light.position.set(0 - tile_width, -20, 0 - tile_width);
white_down.position.set(0 - tile_width, -100, 0 - tile_width);
scene.add(white_down);
red = new THREE.SpotLight("#ff0000",6);
red.position.set(distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(red);
green = new THREE.SpotLight("#00ff00",6);
green.position.set( -distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(green);
blue = new THREE.SpotLight("#0000ff",6);
blue.position.set( 0 - tile_width, 20, -distanceFromCenter - tile_width);
scene.add(blue);
}
var addTablero = function(){
var tile_geometry = new THREE.BoxGeometry(tile_width,tile_width/10,tile_width);
var texture = new THREE.TextureLoader().load( "resources/madera.jpg");
//new THREE.MeshLambertMaterial({ map: THREE.ImageUtils.loadTexture("crate.gif") });
//var black_material = new THREE.MeshPhongMaterial({ color:"#000000", side: THREE.DoubleSide, flatShading: true }); //fixed color
var black_material = new THREE.MeshPhongMaterial({ map: THREE.ImageUtils.loadTexture("resources/madera.jpg") }); //fixed color
var color_material = new THREE.MeshPhongMaterial({ color:tile_color, side: THREE.DoubleSide, flatShading: true }); //to change with gui
var black_color = -1;
//Chess Table has 64 tiles, 8 rows, 8 columns
for(var i = 0; i < 8; i++){
if (i % 2 == 0) { black_color = -1 }
else { black_color = 1 };
for(var j = 0; j < 8; j++){
var material;
if(black_color == 1){ material = black_material; } // iterate black and white
else{ material = color_material; }
var tile = new THREE.Mesh( tile_geometry, material );
tile.position.x = (-4 + j) * tile_width;
tile.position.z = (-4 + i) * tile_width;
tablero.add(tile);
black_color *= -1; } }
scene.add(tablero);
console.log(tablero.position);
}
var createRubikCube = function(){
for(var i= 0;i<3;i++){
for(var j=0;j<3;j++){
for(var k=0;k<3;k++){
let geometryC = new THREE.BoxGeometry(cube_width, cube_width, cube_width );
if(i==0){
//pintar abajo
geometryC.faces[ 6 ].color.setHex(0x4b3621);
geometryC.faces[ 7 ].color.setHex(0x4b3621);
}else if(i==2){
//pintar arriba
geometryC.faces[ 4 ].color.setHex(0xffffff);
geometryC.faces[ 5 ].color.setHex(0xffffff);
}
if(j==0){
//pintar atras
geometryC.faces[ 10 ].color.setHex(0xff0000);
geometryC.faces[ 11 ].color.setHex(0xff0000);
}else if(j==2){
//pintar adelante
geometryC.faces[ 8 ].color.setHex(0xff5500);
geometryC.faces[ 9 ].color.setHex(0xff5500);
}
if(k==0){
//pintar izq
geometryC.faces[ 2 ].color.setHex(0x00ff55);
geometryC.faces[ 3 ].color.setHex(0x00ff55);
}else if(k==2){
//pintar der
geometryC.faces[ 0 ].color.setHex(0xffff00);
geometryC.faces[ 1 ].color.setHex(0xffff00);
}
let material = new THREE.MeshLambertMaterial( { color: "#ffffff",side: THREE.DoubleSide, vertexColors: THREE.FaceColors, flatShading: true} );
//let material = new THREE.MeshLambertMaterial( { color: 0xffffff, vertexColors: THREE.FaceColors} );
material.light = true;
let cube = new THREE.Mesh(geometryC, material);
cube.position.x = ((-1.65 + k) * cube_width)+(((k+1)%2)*(-1+k)*(cube_width/50));
cube.position.z = ((-1.65 + j) * cube_width)+(((j+1)%2)*(-1+j)*(cube_width/50));
cube.position.y = ((2 + i) * cube_width)+(((i+1)%2)*(-1+i)*(cube_width/50));
cuboRubik.add(cube);
}
}
}
}
var addRubikCube = function(){
var geometry = new THREE.BoxGeometry( 4, 4, 4 );
var material = new THREE.MeshBasicMaterial( {color: 0x000000} );
cubeMesh = new THREE.Mesh( geometry, material );
cubeMesh.position.y = 3;
cuboRubik.position.y =-7.5;
cuboRubik.position.x =1;
cuboRubik.position.z = 1;
scene.remove(pivotPoint);
cuboRubik.add(pivotPoint);
cubeMesh.add(cuboRubik);
cubeMesh.add(pivotPoint);
scene.add(cubeMesh);
//scene.add(cuboRubik);
}
var rmRubikCube = function(){
//cubeMesh.remove(pivotPoint);
scene.add(pivotPoint);
cuboRubik.remove(pivotPoint);
scene.remove(cubeMesh);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive
}
//FUNCION CREAR FIGURA
figuras.agregar=function(name,geometria,COLOR){
this.cantidad+=1;
let nombre = this.cantidad+"."+name
//SE CREA EL MATERIAL DE LA FIGURA
/*if (name == "ESFERA" || name == "CILINDRO" || name== "TOROIDE") {
let maxAnisotropy = renderer.capabilities.getMaxAnisotropy();
let cubeTexture = new THREE.CubeTextureLoader()
.load([
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0']);
cubeTexture.anisotropy = maxAnisotropy;
cubeTexture.wrapS = cubeTexture.wrapT = THREE.RepeatWrapping;
//cubeTexture.repeat.set(2,2);
var material = new THREE.MeshPhongMaterial( { color: COLOR, envMap: cubeTexture, side: THREE.DoubleSide, overdraw: 0.5 } );
}
*/
var material = new THREE.MeshPhongMaterial( { color: COLOR, overdraw: 0.5 } );
this[nombre]=new Object();
this[nombre].id=Math.trunc((this.cantidad-1)/3);
//GENERA LA FIGURA
this[nombre].esqueleto=new THREE.Mesh( geometria, material);
normalizadores[this[nombre].esqueleto.uuid]= [];
//this[nombre].esqueleto.position.x = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniX = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniZ = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let normIni = Math.sqrt((Math.pow(posIniX,2)+(Math.pow(posIniZ,2))));
let angleIni = getRandomInt(1,361);
//altura
this[nombre].esqueleto.position.y = getRandomInt(2*cube_width,4*cube_width);
//posicion X-Z
this[nombre].esqueleto.position.x = (normIni)*(Math.sin(angleIni*Math.PI/180));
this[nombre].esqueleto.position.z = (normIni)*(Math.cos(angleIni*Math.PI/180));
//rotacion
this[nombre].esqueleto.rotation.x = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.y = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.z = Math.random() * 2 * Math.PI;
//EMITE SOMBRA
this[nombre].esqueleto.castShadow = true;
//GUARDA EL INVERSO DE LA MATRIZ DE DEFORMACIÓN
this[nombre].normalizador=0;
objects.push(this[nombre].esqueleto);
temp.push(this[nombre].esqueleto);
//group.add(this[nombre].esqueleto);
pivotPoint.add(this[nombre].esqueleto);
splineHelperObjects.push(self.figuras[nombre].esqueleto)
return nombre;
};
//VARIEBLE PARA CONTROLES DE LA INTERFAZ
var speed=0.025;
var speed_around=0.025;
var menu = {
background: backgroundScene,
baldosas: tile_color,
Rubik: true,
rotar_alrededor: false, //inicia con escena sin girar las figuras
speed_around: this.speed_around,
rotar: false,
speed:this.speed,
white_up: true, white_down: false,
red: false, green: false , blue: false,
figuras: "ESFERA", //por defecto opcion de agregar un cubo
addGraphic: function(){ agregar();}, //llamada a funcion agregar() una figura
color: '#ff00ff'
}
var shape_params = {
color: "#00ffff",
picking: "translate",
removeObj: function(){ remove(); }
};
//MENU DE CONTROLES
gui = new dat.GUI({ height : 3 * 8 - 1 });
gui_controllers = [];
function init() {
//CREA LA ESCENA THREE.JS
scene = new THREE.Scene();
//var backgroundScene = "#002233"
scene.background = new THREE.Color(backgroundScene);
//window.addEventListener( 'resize', onWindowResize, false );
//ELIJE TIPO DE CAMARA DE THREE.JS
camera = new THREE.PerspectiveCamera(75,window.innerWidth/window.innerHeight,0.1,500);
camera.position.set(30,10,0); // Define posición de la cámara en XYZ
camera.lookAt( scene.position );
scene.add(pivotPoint);
//REPRESENTADOR WebGL
renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true});
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight*escala );
//PERMITE QUE SE VEAN LAS SOMBRAS
renderer.shadowMap.enabled = true;
renderer.setSize(window.innerWidth,window.innerHeight);
document.body.appendChild(renderer.domElement);
//mouseOrbit = new THREE.OrbitControls(camera, renderer.domElement);
mouseOrbit = new THREE.OrbitControls( camera, renderer.domElement );
gui.addColor(menu, 'background').onChange( changeBackgroundColor ).listen();
gui.addColor(menu, 'baldosas').onChange( changeColorTiles ).listen();
//ROTA TODO EL GRUPO DE FIGURAS
//gui.add(menu, 'Rubik').onChange(updateFiguras);
//ROTA TODO EL GRUPO DE FIGURAS
var animRotar = gui.addFolder("Rotacion")
gui_controllers[0] = animRotar
.add(menu, 'rotar_alrededor')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed_around',-1,1);
gui_controllers[0] = animRotar
.add(menu, 'rotar')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed',-1,1);
//LUCES
var scenectl = gui.addFolder("Luces");
scenectl.add(menu, 'white_up');
scenectl.add(menu, 'white_down');
scenectl.add(menu, 'red');
scenectl.add(menu, 'green');
scenectl.add(menu, 'blue');
//NUEVA FIGURA -> SECCION DEL MENU
var figura = gui.addFolder('Nueva Figura');
figura.add(menu, 'figuras', ["ESFERA", "TOROIDE", "PIRAMIDE", "CILINDRO"]); //lista de figuras posibles para crear
figura.addColor(menu,'color');
figura.add(menu, 'addGraphic'); //llamada a funcion agregar figura
var shapectl = gui.addFolder("Figura Seleccionada");
// adding folder to gui control
shapectl.addColor(shape_params, 'color').onChange(ChangeColor).listen();
shapectl.add(shape_params, 'picking', [ "translate", "rotate", "scale"] );
shapectl.add(shape_params, 'removeObj');
// adding events to window
window.addEventListener( 'mousemove', onMouseMove, false );
document.addEventListener( 'mousedown', onDocumentMouseDown );
// ----- TRANSLATE CONTROL ---------------------------
control = new THREE.TransformControls( camera, renderer.domElement );
control.addEventListener( 'change', render );
control.addEventListener( 'dragging-changed', function ( event ) {
mouseOrbit.enabled = ! event.value;
} );
scene.add( control );
}
//ANIMAR
//AGREGA UNA FIGURA EN LA ESCENA
function agregar() {
let color = menu.color;
let fig = menu.figuras;
if(fig == "ESFERA"){
geometria = new THREE.SphereGeometry(1.5,32,32);
}
if(fig == "TOROIDE"){
geometria =new THREE.TorusGeometry(1.5,0.5,32,100 );
}
if(fig == "PIRAMIDE"){
geometria = new THREE.CylinderBufferGeometry(0,2,3,4);
}
if(fig == "CILINDRO"){
geometria = new THREE.CylinderGeometry(1,1,3,8);
}
nombre =figuras.agregar(fig,geometria,color);
renderer.render( scene, camera );
}
var updateLights = function(){
// change in GUI for lights
if (menu.white_up) { white_up.intensity = 1; }
else { white_up.intensity = 0; };
if (menu.white_down) { white_down.intensity = 1; }
else { white_down.intensity = 0; };
if (menu.red) { red.intensity = 6; }
else { red.intensity = 0; };
if (menu.green) { green.intensity = 6; }
else { green.intensity = 0; };
if (menu.blue) { blue.intensity = 6; }
else { blue.intensity = 0; };
}
var animateFiguras = function(){
if(menu.rotar_alrededor){
if(menu.Rubik){
cubeMesh.add(pivotPoint);
pivotPoint.rotation.y += menu.speed_around; }else{
cubeMesh.remove(pivotPoint);
}
}
else{
pivotPoint.rotation=false;
}
if(menu.rotar){
rotateFiguras();
}
}
var updateFiguras = function(){
if (menu.Rubik){
addRubikCube();
}else{
menu['rotar_alrededor'] = false;
rmRubikCube();
}
}
var rotateFiguras = function(){
// cubeMesh.remove(cuboRubik);
// scene.add(cuboRubik);
for(let i = 0; i< temp.length; i++){
temp[i].rotation.y += menu.speed;
}
cubeMesh.rotation.y += menu.speed;
if (menu.Rubik){
pivotPoint.rotation.y -= menu.speed_around;
}
// scene.remove(cuboRubik);
// cubeMesh.add(cuboRubik);
// let temp = pivotPoint.children;
}
// callback for event to change color
function ChangeColor(){
| // update and render loop
var isHex = function (posible_hex) {
let re = /[0-9A-Fa-f]{6}/g;
if (re.test(posible_hex)) { return true; }
else { return false; } }
// callback for event to change color
function changeBackgroundColor(){
let background_hex = new THREE.Color(menu.background).getHexString();
if ( isHex(background_hex) && scene.background != "#" + background_hex ){
scene.background = new THREE.Color("#"+background_hex); }
// backgroundScene = menu.background;
// scene.background.color.setHex(backgroundScene);
};
function changeColorTiles(){
let color_hex = new THREE.Color(menu.baldosas).getHexString();
if ( isHex(color_hex) && tile_color != "#" + color_hex ){
tile_color = "#" + color_hex;
let table = scene.getObjectByName(tablero);
scene.remove(table);
addTablero(); }
// for(let t = 0;t<color_tiles.length; t++ ){
// color_tiles[t].material.color.setHex(menu.baldosas);
// }
};
function remove(){
if(selected_object.id != cubeMesh.id)
{
console.log("remove");
control.detach(selected_object);
index = pivotPoint.children.indexOf(selected_object);
pivotPoint.children.splice(index,1);
//scene.remove(selected_object);
renderer.render( scene, camera );
}
}
// get mouse coordinates all the time
function onMouseMove( event ) {
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
}
// callback event for mouse down
function onDocumentMouseDown( event ) {
// event.preventDefault();
event.stopPropagation();
raycaster.setFromCamera( mouse, camera );
// calculate objects intersecting the picking ray
//var intersects = raycaster.intersectObjects( scene.children);
objects = pivotPoint.children.concat([cubeMesh]);
console.log(objects);
var intersects = raycaster.intersectObjects( objects );
// intersects[0].object.material.color.set( 0xff0000 );
//validate if has objects intersected
if (intersects.length>0){
// pick first intersected object
selected_object = intersects[0].object;
// change gui color
shape_params.color = selected_object.material.color.getHex();
control.setMode(shape_params.picking);
control.attach( selected_object );
}
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
render();
}
window.addEventListener( 'resize', onWindowResize, false );
init();
addTablero();
addLights(distanceFromCenter);
createRubikCube();
//scene.add(cuboRubik);
addRubikCube();
function render(){
renderer.render(scene,camera);
}
var showAnimationLoop = function(){
requestAnimationFrame(showAnimationLoop);
updateLights();
animateFiguras();
render(); }
showAnimationLoop(); | selected_object.material.color.setHex(shape_params.color);
};
| identifier_body |
graphics2.js | //VAIABLES GLOBALES
//OBTIENE EL OBJETO SELECCIONADO CON CLICK
//var ONFOCUS_OBJECT;
//OBTIENE LA LUZ SELECCIONADA DEL MENU
var ONFOCUS_LIGHT;
var KEYDOWN;
var FUNCTION;
var selected_object = null;
var aux;
var picking_method = 'transform';
// instantiate raycaster
var raycaster = new THREE.Raycaster();
var mouse = new THREE.Vector2();
// to orbit with mouse
var mouseOrbit;
//VARIABLES PARA LA ESCENA, CAMARA Y RENDERIZADO DE LA VENTANA
var renderer
var camera
var scene
var backgroundScene = "#002233"
//VARIABLES PARA EEFECTO DEL MOUSE
var presionado=false;
var x_anterior;
var y_anterior;
var escala = 1;
// var rotacion;
//OBJETO COMPLEJO
var cuboRubik = new THREE.Group();
var pivotPoint = new THREE.Object3D();
var cubePosX;
var cubePosY;
var cubePosZ;
var cubeMesh;
//CREA TRES VERTICES CON RESPECTO A UN CUBO PEQUEÑO EN TODAS LAS FIGURAS.
//TRASLACIONES PERMITIDAS CON DRAG
//VERTICES -> EN LOS TRES EJES (X, Y, Z)
//CUBO INTERIOR -> EN CUALQUIER DIRECCION
//CARAS DEL CUBO -> EN LOS PLANOS (X-Y, X-Z, Y-Z)
var splineHelperObjects = [], splineOutline;
var splinePointsLength = 4;
var transformControl;
var ARC_SEGMENTS = 200;
var splineMesh;
var splines = {};
var positions = [];
//CONTENEDOR DE FIGURAS
var figuras= new Object();
objects=[];
temp=[];
var normalizadores={};
figuras.cantidad=0;
//TABLERO
var tablero = new THREE.Group();
//SIZE OF THE TILES
var tile_width=4;
var tile_color="#ffffff";
//CUBO
var cube_width = tile_width*0.75;
//LUCES
var distanceFromCenter = 1.75*tile_width;
var addLights = function( distanceFromCenter ){
ambient_light = new THREE.AmbientLight("#ffffff",0.2 ); // soft white light
//luces.push(ambient_light);
scene.add(ambient_light);
white_up = new THREE.SpotLight("#ffffff",1);
//white_up_light.position.set(0 - tile_width, 20, 0 - tile_width);
white_up.position.set(0, 200, 0);
//luces.push(white_up_light);
scene.add(white_up);
white_down = new THREE.SpotLight("#ffffff",0.2);
//white_down_light.position.set(0 - tile_width, -20, 0 - tile_width);
white_down.position.set(0 - tile_width, -100, 0 - tile_width);
scene.add(white_down);
red = new THREE.SpotLight("#ff0000",6);
red.position.set(distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(red);
green = new THREE.SpotLight("#00ff00",6);
green.position.set( -distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(green);
blue = new THREE.SpotLight("#0000ff",6);
blue.position.set( 0 - tile_width, 20, -distanceFromCenter - tile_width);
scene.add(blue);
}
var addTablero = function(){
var tile_geometry = new THREE.BoxGeometry(tile_width,tile_width/10,tile_width);
var texture = new THREE.TextureLoader().load( "resources/madera.jpg");
//new THREE.MeshLambertMaterial({ map: THREE.ImageUtils.loadTexture("crate.gif") });
//var black_material = new THREE.MeshPhongMaterial({ color:"#000000", side: THREE.DoubleSide, flatShading: true }); //fixed color
var black_material = new THREE.MeshPhongMaterial({ map: THREE.ImageUtils.loadTexture("resources/madera.jpg") }); //fixed color
var color_material = new THREE.MeshPhongMaterial({ color:tile_color, side: THREE.DoubleSide, flatShading: true }); //to change with gui
var black_color = -1;
//Chess Table has 64 tiles, 8 rows, 8 columns
for(var i = 0; i < 8; i++){
if (i % 2 == 0) { black_color = -1 }
else { black_color = 1 };
for(var j = 0; j < 8; j++){
var material;
if(black_color == 1){ material = black_material; } // iterate black and white
else{ material = color_material; }
var tile = new THREE.Mesh( tile_geometry, material );
tile.position.x = (-4 + j) * tile_width;
tile.position.z = (-4 + i) * tile_width;
tablero.add(tile);
black_color *= -1; } }
scene.add(tablero);
console.log(tablero.position);
}
var createRubikCube = function(){
for(var i= 0;i<3;i++){
for(var j=0;j<3;j++){
for(var k=0;k<3;k++){
let geometryC = new THREE.BoxGeometry(cube_width, cube_width, cube_width );
if(i==0){
//pintar abajo
geometryC.faces[ 6 ].color.setHex(0x4b3621);
geometryC.faces[ 7 ].color.setHex(0x4b3621);
}else if(i==2){
//pintar arriba
geometryC.faces[ 4 ].color.setHex(0xffffff);
geometryC.faces[ 5 ].color.setHex(0xffffff);
}
if(j==0){
//pintar atras
geometryC.faces[ 10 ].color.setHex(0xff0000);
geometryC.faces[ 11 ].color.setHex(0xff0000);
}else if(j==2){
//pintar adelante
geometryC.faces[ 8 ].color.setHex(0xff5500);
geometryC.faces[ 9 ].color.setHex(0xff5500);
}
if(k==0){
//pintar izq
geometryC.faces[ 2 ].color.setHex(0x00ff55);
geometryC.faces[ 3 ].color.setHex(0x00ff55);
}else if(k==2){
//pintar der
geometryC.faces[ 0 ].color.setHex(0xffff00);
geometryC.faces[ 1 ].color.setHex(0xffff00);
}
let material = new THREE.MeshLambertMaterial( { color: "#ffffff",side: THREE.DoubleSide, vertexColors: THREE.FaceColors, flatShading: true} );
//let material = new THREE.MeshLambertMaterial( { color: 0xffffff, vertexColors: THREE.FaceColors} );
material.light = true;
let cube = new THREE.Mesh(geometryC, material);
cube.position.x = ((-1.65 + k) * cube_width)+(((k+1)%2)*(-1+k)*(cube_width/50));
cube.position.z = ((-1.65 + j) * cube_width)+(((j+1)%2)*(-1+j)*(cube_width/50));
cube.position.y = ((2 + i) * cube_width)+(((i+1)%2)*(-1+i)*(cube_width/50));
cuboRubik.add(cube);
}
}
}
}
var addRubikCube = function(){
var geometry = new THREE.BoxGeometry( 4, 4, 4 );
var material = new THREE.MeshBasicMaterial( {color: 0x000000} );
cubeMesh = new THREE.Mesh( geometry, material );
cubeMesh.position.y = 3;
cuboRubik.position.y =-7.5;
cuboRubik.position.x =1;
cuboRubik.position.z = 1;
scene.remove(pivotPoint);
cuboRubik.add(pivotPoint);
cubeMesh.add(cuboRubik);
cubeMesh.add(pivotPoint);
scene.add(cubeMesh);
//scene.add(cuboRubik);
}
var rmRubikCube = function(){
//cubeMesh.remove(pivotPoint);
scene.add(pivotPoint);
cuboRubik.remove(pivotPoint);
scene.remove(cubeMesh);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive
}
//FUNCION CREAR FIGURA
figuras.agregar=function(name,geometria,COLOR){
this.cantidad+=1;
let nombre = this.cantidad+"."+name
//SE CREA EL MATERIAL DE LA FIGURA
/*if (name == "ESFERA" || name == "CILINDRO" || name== "TOROIDE") {
let maxAnisotropy = renderer.capabilities.getMaxAnisotropy();
let cubeTexture = new THREE.CubeTextureLoader()
.load([
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0']);
cubeTexture.anisotropy = maxAnisotropy;
cubeTexture.wrapS = cubeTexture.wrapT = THREE.RepeatWrapping;
//cubeTexture.repeat.set(2,2);
var material = new THREE.MeshPhongMaterial( { color: COLOR, envMap: cubeTexture, side: THREE.DoubleSide, overdraw: 0.5 } );
}
*/
var material = new THREE.MeshPhongMaterial( { color: COLOR, overdraw: 0.5 } );
this[nombre]=new Object();
this[nombre].id=Math.trunc((this.cantidad-1)/3);
//GENERA LA FIGURA
this[nombre].esqueleto=new THREE.Mesh( geometria, material);
normalizadores[this[nombre].esqueleto.uuid]= [];
//this[nombre].esqueleto.position.x = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniX = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniZ = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let normIni = Math.sqrt((Math.pow(posIniX,2)+(Math.pow(posIniZ,2))));
let angleIni = getRandomInt(1,361);
//altura
this[nombre].esqueleto.position.y = getRandomInt(2*cube_width,4*cube_width);
//posicion X-Z
this[nombre].esqueleto.position.x = (normIni)*(Math.sin(angleIni*Math.PI/180));
this[nombre].esqueleto.position.z = (normIni)*(Math.cos(angleIni*Math.PI/180));
//rotacion
this[nombre].esqueleto.rotation.x = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.y = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.z = Math.random() * 2 * Math.PI;
//EMITE SOMBRA
this[nombre].esqueleto.castShadow = true;
//GUARDA EL INVERSO DE LA MATRIZ DE DEFORMACIÓN
this[nombre].normalizador=0;
objects.push(this[nombre].esqueleto);
temp.push(this[nombre].esqueleto);
//group.add(this[nombre].esqueleto);
pivotPoint.add(this[nombre].esqueleto);
splineHelperObjects.push(self.figuras[nombre].esqueleto)
return nombre;
};
//VARIEBLE PARA CONTROLES DE LA INTERFAZ
var speed=0.025;
var speed_around=0.025;
var menu = {
background: backgroundScene,
baldosas: tile_color,
Rubik: true,
rotar_alrededor: false, //inicia con escena sin girar las figuras
speed_around: this.speed_around,
rotar: false,
speed:this.speed,
white_up: true, white_down: false,
red: false, green: false , blue: false,
figuras: "ESFERA", //por defecto opcion de agregar un cubo
addGraphic: function(){ agregar();}, //llamada a funcion agregar() una figura
color: '#ff00ff'
}
var shape_params = {
color: "#00ffff",
picking: "translate",
removeObj: function(){ remove(); }
};
//MENU DE CONTROLES
gui = new dat.GUI({ height : 3 * 8 - 1 });
gui_controllers = [];
function init() {
//CREA LA ESCENA THREE.JS
scene = new THREE.Scene();
//var backgroundScene = "#002233"
scene.background = new THREE.Color(backgroundScene);
//window.addEventListener( 'resize', onWindowResize, false );
//ELIJE TIPO DE CAMARA DE THREE.JS
camera = new THREE.PerspectiveCamera(75,window.innerWidth/window.innerHeight,0.1,500);
camera.position.set(30,10,0); // Define posición de la cámara en XYZ
camera.lookAt( scene.position );
scene.add(pivotPoint);
//REPRESENTADOR WebGL
renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true});
renderer.setPixelRatio( window.devicePixelRatio ); | //PERMITE QUE SE VEAN LAS SOMBRAS
renderer.shadowMap.enabled = true;
renderer.setSize(window.innerWidth,window.innerHeight);
document.body.appendChild(renderer.domElement);
//mouseOrbit = new THREE.OrbitControls(camera, renderer.domElement);
mouseOrbit = new THREE.OrbitControls( camera, renderer.domElement );
gui.addColor(menu, 'background').onChange( changeBackgroundColor ).listen();
gui.addColor(menu, 'baldosas').onChange( changeColorTiles ).listen();
//ROTA TODO EL GRUPO DE FIGURAS
//gui.add(menu, 'Rubik').onChange(updateFiguras);
//ROTA TODO EL GRUPO DE FIGURAS
var animRotar = gui.addFolder("Rotacion")
gui_controllers[0] = animRotar
.add(menu, 'rotar_alrededor')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed_around',-1,1);
gui_controllers[0] = animRotar
.add(menu, 'rotar')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed',-1,1);
//LUCES
var scenectl = gui.addFolder("Luces");
scenectl.add(menu, 'white_up');
scenectl.add(menu, 'white_down');
scenectl.add(menu, 'red');
scenectl.add(menu, 'green');
scenectl.add(menu, 'blue');
//NUEVA FIGURA -> SECCION DEL MENU
var figura = gui.addFolder('Nueva Figura');
figura.add(menu, 'figuras', ["ESFERA", "TOROIDE", "PIRAMIDE", "CILINDRO"]); //lista de figuras posibles para crear
figura.addColor(menu,'color');
figura.add(menu, 'addGraphic'); //llamada a funcion agregar figura
var shapectl = gui.addFolder("Figura Seleccionada");
// adding folder to gui control
shapectl.addColor(shape_params, 'color').onChange(ChangeColor).listen();
shapectl.add(shape_params, 'picking', [ "translate", "rotate", "scale"] );
shapectl.add(shape_params, 'removeObj');
// adding events to window
window.addEventListener( 'mousemove', onMouseMove, false );
document.addEventListener( 'mousedown', onDocumentMouseDown );
// ----- TRANSLATE CONTROL ---------------------------
control = new THREE.TransformControls( camera, renderer.domElement );
control.addEventListener( 'change', render );
control.addEventListener( 'dragging-changed', function ( event ) {
mouseOrbit.enabled = ! event.value;
} );
scene.add( control );
}
//ANIMAR
//AGREGA UNA FIGURA EN LA ESCENA
function agregar() {
let color = menu.color;
let fig = menu.figuras;
if(fig == "ESFERA"){
geometria = new THREE.SphereGeometry(1.5,32,32);
}
if(fig == "TOROIDE"){
geometria =new THREE.TorusGeometry(1.5,0.5,32,100 );
}
if(fig == "PIRAMIDE"){
geometria = new THREE.CylinderBufferGeometry(0,2,3,4);
}
if(fig == "CILINDRO"){
geometria = new THREE.CylinderGeometry(1,1,3,8);
}
nombre =figuras.agregar(fig,geometria,color);
renderer.render( scene, camera );
}
var updateLights = function(){
// change in GUI for lights
if (menu.white_up) { white_up.intensity = 1; }
else { white_up.intensity = 0; };
if (menu.white_down) { white_down.intensity = 1; }
else { white_down.intensity = 0; };
if (menu.red) { red.intensity = 6; }
else { red.intensity = 0; };
if (menu.green) { green.intensity = 6; }
else { green.intensity = 0; };
if (menu.blue) { blue.intensity = 6; }
else { blue.intensity = 0; };
}
var animateFiguras = function(){
if(menu.rotar_alrededor){
if(menu.Rubik){
cubeMesh.add(pivotPoint);
pivotPoint.rotation.y += menu.speed_around; }else{
cubeMesh.remove(pivotPoint);
}
}
else{
pivotPoint.rotation=false;
}
if(menu.rotar){
rotateFiguras();
}
}
var updateFiguras = function(){
if (menu.Rubik){
addRubikCube();
}else{
menu['rotar_alrededor'] = false;
rmRubikCube();
}
}
var rotateFiguras = function(){
// cubeMesh.remove(cuboRubik);
// scene.add(cuboRubik);
for(let i = 0; i< temp.length; i++){
temp[i].rotation.y += menu.speed;
}
cubeMesh.rotation.y += menu.speed;
if (menu.Rubik){
pivotPoint.rotation.y -= menu.speed_around;
}
// scene.remove(cuboRubik);
// cubeMesh.add(cuboRubik);
// let temp = pivotPoint.children;
}
// callback for event to change color
function ChangeColor(){
selected_object.material.color.setHex(shape_params.color);
};
// update and render loop
var isHex = function (posible_hex) {
let re = /[0-9A-Fa-f]{6}/g;
if (re.test(posible_hex)) { return true; }
else { return false; } }
// callback for event to change color
function changeBackgroundColor(){
let background_hex = new THREE.Color(menu.background).getHexString();
if ( isHex(background_hex) && scene.background != "#" + background_hex ){
scene.background = new THREE.Color("#"+background_hex); }
// backgroundScene = menu.background;
// scene.background.color.setHex(backgroundScene);
};
function changeColorTiles(){
let color_hex = new THREE.Color(menu.baldosas).getHexString();
if ( isHex(color_hex) && tile_color != "#" + color_hex ){
tile_color = "#" + color_hex;
let table = scene.getObjectByName(tablero);
scene.remove(table);
addTablero(); }
// for(let t = 0;t<color_tiles.length; t++ ){
// color_tiles[t].material.color.setHex(menu.baldosas);
// }
};
function remove(){
if(selected_object.id != cubeMesh.id)
{
console.log("remove");
control.detach(selected_object);
index = pivotPoint.children.indexOf(selected_object);
pivotPoint.children.splice(index,1);
//scene.remove(selected_object);
renderer.render( scene, camera );
}
}
// get mouse coordinates all the time
function onMouseMove( event ) {
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
}
// callback event for mouse down
function onDocumentMouseDown( event ) {
// event.preventDefault();
event.stopPropagation();
raycaster.setFromCamera( mouse, camera );
// calculate objects intersecting the picking ray
//var intersects = raycaster.intersectObjects( scene.children);
objects = pivotPoint.children.concat([cubeMesh]);
console.log(objects);
var intersects = raycaster.intersectObjects( objects );
// intersects[0].object.material.color.set( 0xff0000 );
//validate if has objects intersected
if (intersects.length>0){
// pick first intersected object
selected_object = intersects[0].object;
// change gui color
shape_params.color = selected_object.material.color.getHex();
control.setMode(shape_params.picking);
control.attach( selected_object );
}
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
render();
}
window.addEventListener( 'resize', onWindowResize, false );
init();
addTablero();
addLights(distanceFromCenter);
createRubikCube();
//scene.add(cuboRubik);
addRubikCube();
function render(){
renderer.render(scene,camera);
}
var showAnimationLoop = function(){
requestAnimationFrame(showAnimationLoop);
updateLights();
animateFiguras();
render(); }
showAnimationLoop(); | renderer.setSize( window.innerWidth, window.innerHeight*escala );
| random_line_split |
graphics2.js | //VAIABLES GLOBALES
//OBTIENE EL OBJETO SELECCIONADO CON CLICK
//var ONFOCUS_OBJECT;
//OBTIENE LA LUZ SELECCIONADA DEL MENU
var ONFOCUS_LIGHT;
var KEYDOWN;
var FUNCTION;
var selected_object = null;
var aux;
var picking_method = 'transform';
// instantiate raycaster
var raycaster = new THREE.Raycaster();
var mouse = new THREE.Vector2();
// to orbit with mouse
var mouseOrbit;
//VARIABLES PARA LA ESCENA, CAMARA Y RENDERIZADO DE LA VENTANA
var renderer
var camera
var scene
var backgroundScene = "#002233"
//VARIABLES PARA EEFECTO DEL MOUSE
var presionado=false;
var x_anterior;
var y_anterior;
var escala = 1;
// var rotacion;
//OBJETO COMPLEJO
var cuboRubik = new THREE.Group();
var pivotPoint = new THREE.Object3D();
var cubePosX;
var cubePosY;
var cubePosZ;
var cubeMesh;
//CREA TRES VERTICES CON RESPECTO A UN CUBO PEQUEÑO EN TODAS LAS FIGURAS.
//TRASLACIONES PERMITIDAS CON DRAG
//VERTICES -> EN LOS TRES EJES (X, Y, Z)
//CUBO INTERIOR -> EN CUALQUIER DIRECCION
//CARAS DEL CUBO -> EN LOS PLANOS (X-Y, X-Z, Y-Z)
var splineHelperObjects = [], splineOutline;
var splinePointsLength = 4;
var transformControl;
var ARC_SEGMENTS = 200;
var splineMesh;
var splines = {};
var positions = [];
//CONTENEDOR DE FIGURAS
var figuras= new Object();
objects=[];
temp=[];
var normalizadores={};
figuras.cantidad=0;
//TABLERO
var tablero = new THREE.Group();
//SIZE OF THE TILES
var tile_width=4;
var tile_color="#ffffff";
//CUBO
var cube_width = tile_width*0.75;
//LUCES
var distanceFromCenter = 1.75*tile_width;
var addLights = function( distanceFromCenter ){
ambient_light = new THREE.AmbientLight("#ffffff",0.2 ); // soft white light
//luces.push(ambient_light);
scene.add(ambient_light);
white_up = new THREE.SpotLight("#ffffff",1);
//white_up_light.position.set(0 - tile_width, 20, 0 - tile_width);
white_up.position.set(0, 200, 0);
//luces.push(white_up_light);
scene.add(white_up);
white_down = new THREE.SpotLight("#ffffff",0.2);
//white_down_light.position.set(0 - tile_width, -20, 0 - tile_width);
white_down.position.set(0 - tile_width, -100, 0 - tile_width);
scene.add(white_down);
red = new THREE.SpotLight("#ff0000",6);
red.position.set(distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(red);
green = new THREE.SpotLight("#00ff00",6);
green.position.set( -distanceFromCenter - tile_width, 20, distanceFromCenter - tile_width);
scene.add(green);
blue = new THREE.SpotLight("#0000ff",6);
blue.position.set( 0 - tile_width, 20, -distanceFromCenter - tile_width);
scene.add(blue);
}
var addTablero = function(){
var tile_geometry = new THREE.BoxGeometry(tile_width,tile_width/10,tile_width);
var texture = new THREE.TextureLoader().load( "resources/madera.jpg");
//new THREE.MeshLambertMaterial({ map: THREE.ImageUtils.loadTexture("crate.gif") });
//var black_material = new THREE.MeshPhongMaterial({ color:"#000000", side: THREE.DoubleSide, flatShading: true }); //fixed color
var black_material = new THREE.MeshPhongMaterial({ map: THREE.ImageUtils.loadTexture("resources/madera.jpg") }); //fixed color
var color_material = new THREE.MeshPhongMaterial({ color:tile_color, side: THREE.DoubleSide, flatShading: true }); //to change with gui
var black_color = -1;
//Chess Table has 64 tiles, 8 rows, 8 columns
for(var i = 0; i < 8; i++){
if (i % 2 == 0) { black_color = -1 }
else { black_color = 1 };
for(var j = 0; j < 8; j++){
var material;
if(black_color == 1){ material = black_material; } // iterate black and white
else{ material = color_material; }
var tile = new THREE.Mesh( tile_geometry, material );
tile.position.x = (-4 + j) * tile_width;
tile.position.z = (-4 + i) * tile_width;
tablero.add(tile);
black_color *= -1; } }
scene.add(tablero);
console.log(tablero.position);
}
var createRubikCube = function(){
for(var i= 0;i<3;i++){
for(var j=0;j<3;j++){
for(var k=0;k<3;k++){ |
}
}
}
var addRubikCube = function(){
var geometry = new THREE.BoxGeometry( 4, 4, 4 );
var material = new THREE.MeshBasicMaterial( {color: 0x000000} );
cubeMesh = new THREE.Mesh( geometry, material );
cubeMesh.position.y = 3;
cuboRubik.position.y =-7.5;
cuboRubik.position.x =1;
cuboRubik.position.z = 1;
scene.remove(pivotPoint);
cuboRubik.add(pivotPoint);
cubeMesh.add(cuboRubik);
cubeMesh.add(pivotPoint);
scene.add(cubeMesh);
//scene.add(cuboRubik);
}
var rmRubikCube = function(){
//cubeMesh.remove(pivotPoint);
scene.add(pivotPoint);
cuboRubik.remove(pivotPoint);
scene.remove(cubeMesh);
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive
}
//FUNCION CREAR FIGURA
figuras.agregar=function(name,geometria,COLOR){
this.cantidad+=1;
let nombre = this.cantidad+"."+name
//SE CREA EL MATERIAL DE LA FIGURA
/*if (name == "ESFERA" || name == "CILINDRO" || name== "TOROIDE") {
let maxAnisotropy = renderer.capabilities.getMaxAnisotropy();
let cubeTexture = new THREE.CubeTextureLoader()
.load([
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0',
'https://dl.dropboxusercontent.com/s/vbnwu8hrqhfqy6o/sprite.png?dl=0']);
cubeTexture.anisotropy = maxAnisotropy;
cubeTexture.wrapS = cubeTexture.wrapT = THREE.RepeatWrapping;
//cubeTexture.repeat.set(2,2);
var material = new THREE.MeshPhongMaterial( { color: COLOR, envMap: cubeTexture, side: THREE.DoubleSide, overdraw: 0.5 } );
}
*/
var material = new THREE.MeshPhongMaterial( { color: COLOR, overdraw: 0.5 } );
this[nombre]=new Object();
this[nombre].id=Math.trunc((this.cantidad-1)/3);
//GENERA LA FIGURA
this[nombre].esqueleto=new THREE.Mesh( geometria, material);
normalizadores[this[nombre].esqueleto.uuid]= [];
//this[nombre].esqueleto.position.x = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniX = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let posIniZ = Math.pow(-1,getRandomInt(0,2))*(getRandomInt(3*cube_width,4*cube_width));
let normIni = Math.sqrt((Math.pow(posIniX,2)+(Math.pow(posIniZ,2))));
let angleIni = getRandomInt(1,361);
//altura
this[nombre].esqueleto.position.y = getRandomInt(2*cube_width,4*cube_width);
//posicion X-Z
this[nombre].esqueleto.position.x = (normIni)*(Math.sin(angleIni*Math.PI/180));
this[nombre].esqueleto.position.z = (normIni)*(Math.cos(angleIni*Math.PI/180));
//rotacion
this[nombre].esqueleto.rotation.x = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.y = Math.random() * 2 * Math.PI;
this[nombre].esqueleto.rotation.z = Math.random() * 2 * Math.PI;
//EMITE SOMBRA
this[nombre].esqueleto.castShadow = true;
//GUARDA EL INVERSO DE LA MATRIZ DE DEFORMACIÓN
this[nombre].normalizador=0;
objects.push(this[nombre].esqueleto);
temp.push(this[nombre].esqueleto);
//group.add(this[nombre].esqueleto);
pivotPoint.add(this[nombre].esqueleto);
splineHelperObjects.push(self.figuras[nombre].esqueleto)
return nombre;
};
//VARIEBLE PARA CONTROLES DE LA INTERFAZ
var speed=0.025;
var speed_around=0.025;
var menu = {
background: backgroundScene,
baldosas: tile_color,
Rubik: true,
rotar_alrededor: false, //inicia con escena sin girar las figuras
speed_around: this.speed_around,
rotar: false,
speed:this.speed,
white_up: true, white_down: false,
red: false, green: false , blue: false,
figuras: "ESFERA", //por defecto opcion de agregar un cubo
addGraphic: function(){ agregar();}, //llamada a funcion agregar() una figura
color: '#ff00ff'
}
var shape_params = {
color: "#00ffff",
picking: "translate",
removeObj: function(){ remove(); }
};
//MENU DE CONTROLES
gui = new dat.GUI({ height : 3 * 8 - 1 });
gui_controllers = [];
function init() {
//CREA LA ESCENA THREE.JS
scene = new THREE.Scene();
//var backgroundScene = "#002233"
scene.background = new THREE.Color(backgroundScene);
//window.addEventListener( 'resize', onWindowResize, false );
//ELIJE TIPO DE CAMARA DE THREE.JS
camera = new THREE.PerspectiveCamera(75,window.innerWidth/window.innerHeight,0.1,500);
camera.position.set(30,10,0); // Define posición de la cámara en XYZ
camera.lookAt( scene.position );
scene.add(pivotPoint);
//REPRESENTADOR WebGL
renderer = new THREE.WebGLRenderer({ alpha: true, antialias: true});
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight*escala );
//PERMITE QUE SE VEAN LAS SOMBRAS
renderer.shadowMap.enabled = true;
renderer.setSize(window.innerWidth,window.innerHeight);
document.body.appendChild(renderer.domElement);
//mouseOrbit = new THREE.OrbitControls(camera, renderer.domElement);
mouseOrbit = new THREE.OrbitControls( camera, renderer.domElement );
gui.addColor(menu, 'background').onChange( changeBackgroundColor ).listen();
gui.addColor(menu, 'baldosas').onChange( changeColorTiles ).listen();
//ROTA TODO EL GRUPO DE FIGURAS
//gui.add(menu, 'Rubik').onChange(updateFiguras);
//ROTA TODO EL GRUPO DE FIGURAS
var animRotar = gui.addFolder("Rotacion")
gui_controllers[0] = animRotar
.add(menu, 'rotar_alrededor')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed_around',-1,1);
gui_controllers[0] = animRotar
.add(menu, 'rotar')
.listen()
.onFinishChange(
function(value) {
}
);
animRotar.add(menu, 'speed',-1,1);
//LUCES
var scenectl = gui.addFolder("Luces");
scenectl.add(menu, 'white_up');
scenectl.add(menu, 'white_down');
scenectl.add(menu, 'red');
scenectl.add(menu, 'green');
scenectl.add(menu, 'blue');
//NUEVA FIGURA -> SECCION DEL MENU
var figura = gui.addFolder('Nueva Figura');
figura.add(menu, 'figuras', ["ESFERA", "TOROIDE", "PIRAMIDE", "CILINDRO"]); //lista de figuras posibles para crear
figura.addColor(menu,'color');
figura.add(menu, 'addGraphic'); //llamada a funcion agregar figura
var shapectl = gui.addFolder("Figura Seleccionada");
// adding folder to gui control
shapectl.addColor(shape_params, 'color').onChange(ChangeColor).listen();
shapectl.add(shape_params, 'picking', [ "translate", "rotate", "scale"] );
shapectl.add(shape_params, 'removeObj');
// adding events to window
window.addEventListener( 'mousemove', onMouseMove, false );
document.addEventListener( 'mousedown', onDocumentMouseDown );
// ----- TRANSLATE CONTROL ---------------------------
control = new THREE.TransformControls( camera, renderer.domElement );
control.addEventListener( 'change', render );
control.addEventListener( 'dragging-changed', function ( event ) {
mouseOrbit.enabled = ! event.value;
} );
scene.add( control );
}
//ANIMAR
//AGREGA UNA FIGURA EN LA ESCENA
function agregar() {
let color = menu.color;
let fig = menu.figuras;
if(fig == "ESFERA"){
geometria = new THREE.SphereGeometry(1.5,32,32);
}
if(fig == "TOROIDE"){
geometria =new THREE.TorusGeometry(1.5,0.5,32,100 );
}
if(fig == "PIRAMIDE"){
geometria = new THREE.CylinderBufferGeometry(0,2,3,4);
}
if(fig == "CILINDRO"){
geometria = new THREE.CylinderGeometry(1,1,3,8);
}
nombre =figuras.agregar(fig,geometria,color);
renderer.render( scene, camera );
}
var updateLights = function(){
// change in GUI for lights
if (menu.white_up) { white_up.intensity = 1; }
else { white_up.intensity = 0; };
if (menu.white_down) { white_down.intensity = 1; }
else { white_down.intensity = 0; };
if (menu.red) { red.intensity = 6; }
else { red.intensity = 0; };
if (menu.green) { green.intensity = 6; }
else { green.intensity = 0; };
if (menu.blue) { blue.intensity = 6; }
else { blue.intensity = 0; };
}
var animateFiguras = function(){
if(menu.rotar_alrededor){
if(menu.Rubik){
cubeMesh.add(pivotPoint);
pivotPoint.rotation.y += menu.speed_around; }else{
cubeMesh.remove(pivotPoint);
}
}
else{
pivotPoint.rotation=false;
}
if(menu.rotar){
rotateFiguras();
}
}
var updateFiguras = function(){
if (menu.Rubik){
addRubikCube();
}else{
menu['rotar_alrededor'] = false;
rmRubikCube();
}
}
var rotateFiguras = function(){
// cubeMesh.remove(cuboRubik);
// scene.add(cuboRubik);
for(let i = 0; i< temp.length; i++){
temp[i].rotation.y += menu.speed;
}
cubeMesh.rotation.y += menu.speed;
if (menu.Rubik){
pivotPoint.rotation.y -= menu.speed_around;
}
// scene.remove(cuboRubik);
// cubeMesh.add(cuboRubik);
// let temp = pivotPoint.children;
}
// callback for event to change color
function ChangeColor(){
selected_object.material.color.setHex(shape_params.color);
};
// update and render loop
var isHex = function (posible_hex) {
let re = /[0-9A-Fa-f]{6}/g;
if (re.test(posible_hex)) { return true; }
else { return false; } }
// callback for event to change color
function changeBackgroundColor(){
let background_hex = new THREE.Color(menu.background).getHexString();
if ( isHex(background_hex) && scene.background != "#" + background_hex ){
scene.background = new THREE.Color("#"+background_hex); }
// backgroundScene = menu.background;
// scene.background.color.setHex(backgroundScene);
};
function changeColorTiles(){
let color_hex = new THREE.Color(menu.baldosas).getHexString();
if ( isHex(color_hex) && tile_color != "#" + color_hex ){
tile_color = "#" + color_hex;
let table = scene.getObjectByName(tablero);
scene.remove(table);
addTablero(); }
// for(let t = 0;t<color_tiles.length; t++ ){
// color_tiles[t].material.color.setHex(menu.baldosas);
// }
};
function remove(){
if(selected_object.id != cubeMesh.id)
{
console.log("remove");
control.detach(selected_object);
index = pivotPoint.children.indexOf(selected_object);
pivotPoint.children.splice(index,1);
//scene.remove(selected_object);
renderer.render( scene, camera );
}
}
// get mouse coordinates all the time
function onMouseMove( event ) {
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
}
// callback event for mouse down
function onDocumentMouseDown( event ) {
// event.preventDefault();
event.stopPropagation();
raycaster.setFromCamera( mouse, camera );
// calculate objects intersecting the picking ray
//var intersects = raycaster.intersectObjects( scene.children);
objects = pivotPoint.children.concat([cubeMesh]);
console.log(objects);
var intersects = raycaster.intersectObjects( objects );
// intersects[0].object.material.color.set( 0xff0000 );
//validate if has objects intersected
if (intersects.length>0){
// pick first intersected object
selected_object = intersects[0].object;
// change gui color
shape_params.color = selected_object.material.color.getHex();
control.setMode(shape_params.picking);
control.attach( selected_object );
}
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
render();
}
window.addEventListener( 'resize', onWindowResize, false );
init();
addTablero();
addLights(distanceFromCenter);
createRubikCube();
//scene.add(cuboRubik);
addRubikCube();
function render(){
renderer.render(scene,camera);
}
var showAnimationLoop = function(){
requestAnimationFrame(showAnimationLoop);
updateLights();
animateFiguras();
render(); }
showAnimationLoop(); |
let geometryC = new THREE.BoxGeometry(cube_width, cube_width, cube_width );
if(i==0){
//pintar abajo
geometryC.faces[ 6 ].color.setHex(0x4b3621);
geometryC.faces[ 7 ].color.setHex(0x4b3621);
}else if(i==2){
//pintar arriba
geometryC.faces[ 4 ].color.setHex(0xffffff);
geometryC.faces[ 5 ].color.setHex(0xffffff);
}
if(j==0){
//pintar atras
geometryC.faces[ 10 ].color.setHex(0xff0000);
geometryC.faces[ 11 ].color.setHex(0xff0000);
}else if(j==2){
//pintar adelante
geometryC.faces[ 8 ].color.setHex(0xff5500);
geometryC.faces[ 9 ].color.setHex(0xff5500);
}
if(k==0){
//pintar izq
geometryC.faces[ 2 ].color.setHex(0x00ff55);
geometryC.faces[ 3 ].color.setHex(0x00ff55);
}else if(k==2){
//pintar der
geometryC.faces[ 0 ].color.setHex(0xffff00);
geometryC.faces[ 1 ].color.setHex(0xffff00);
}
let material = new THREE.MeshLambertMaterial( { color: "#ffffff",side: THREE.DoubleSide, vertexColors: THREE.FaceColors, flatShading: true} );
//let material = new THREE.MeshLambertMaterial( { color: 0xffffff, vertexColors: THREE.FaceColors} );
material.light = true;
let cube = new THREE.Mesh(geometryC, material);
cube.position.x = ((-1.65 + k) * cube_width)+(((k+1)%2)*(-1+k)*(cube_width/50));
cube.position.z = ((-1.65 + j) * cube_width)+(((j+1)%2)*(-1+j)*(cube_width/50));
cube.position.y = ((2 + i) * cube_width)+(((i+1)%2)*(-1+i)*(cube_width/50));
cuboRubik.add(cube);
} | conditional_block |
read-genome-coverage.py | #!/usr/bin/env python
from __future__ import print_function, division, unicode_literals
import os
import sys
import json
import logging
import tempfile
import itertools
import traceback
import subprocess as sp
from os.path import basename
from datetime import datetime
from argparse import ArgumentParser, FileType
PREPROC_CMDS = {
'exon': "awk '$3 == \"exon\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"exon\";print}}' > {output}",
'gene': "awk '$3 == \"gene\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"gene\";print}}' > {output}",
'intron': "subtractBed -a {input[0]} -b {input[1]} | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF)=\"intron\";print}}' > {output}",
'intergenic': "complementBed -i {input[0]} -g <(cut -f 1-2 {input[1]} | sort -k1,1) | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"intergenic\";print}}' > {output}"
}
def strfdelta(tdelta, fmt):
d = {"days": tdelta.days}
d["hours"], rem = divmod(tdelta.seconds, 3600)
d["minutes"], d["seconds"] = divmod(rem, 60)
return fmt.format(**d)
def preprocess(element, inputs=None):
'''element can be one of <gene> <exon> <intron> <intergenic>'''
log = logging.getLogger('gencov')
element_bed = tempfile.mkstemp(suffix='.bed')[1]
if not inputs:
inputs = [ args.annotation ]
else:
inputs = inputs[element]
command = PREPROC_CMDS[element].format(input=inputs, output=element_bed)
log.debug(command)
proc = sp.Popen(command, shell=True, executable='/bin/bash', stderr=sp.PIPE)
err_msg = proc.communicate()[1]
if err_msg:
raise IOError(err_msg)
log.info("%s preprocessed" % element.title())
return element_bed
def gtf_processing(genome=None, prefix='gencov'):
"""Annotation preprocessing. Provide a bed file with the
following elements:
- projected exons
- projected genes
- introns
- integenic regions
"""
all_bed = prefix + ".all.bed"
if not os.path.exists(all_bed) or os.stat(all_bed).st_size == 0:
log.info("Preprocessing annotation...")
features = ('exon', 'gene', 'intron', 'intergenic')
merged_exons, merged_genes = map(preprocess, features[:2])
ins = {
'intron': [merged_genes, merged_exons],
'intergenic': [merged_genes, genome]
}
intron_bed, intergenic_bed = map(preprocess, features[2:], [ins, ins])
log.info("Concatenate bed files for all elements...")
with open(all_bed, 'w') as out_bed:
cat_all(merged_exons, merged_genes, intron_bed, intergenic_bed, out_bed=out_bed)
for f in (merged_exons, merged_genes, intron_bed, intergenic_bed):
os.remove(f)
return all_bed
def cat_all(*args, **kwargs):
out_bed = kwargs.get('out_bed', sys.stdout)
for bed in args:
print(open(bed,'r').read(), end='', file=out_bed)
def get_chromosomes(genome_file):
with open(genome_file) as genome:
chrs = [l.split()[0] for l in genome]
return chrs
def process_bam(bam, all_elements, chrs=None, all_reads=False):
if not os.path.exists(bam):
raise IOError("Fail to open {0!r} for reading".format(bam))
bai = "{0}.bai".format(bam)
if chrs and not os.path.exists(bai):
log.info("Indexing {0}...".format(bam))
sp.call('samtools index {0}'.format(bam), shell=True)
log.info('Processing {0}...'.format(bam))
command = "samtools view -u"
sam_filter = 4
if not all_reads:
sam_filter += 256
command += " -F {0} {1}".format(str(sam_filter), bam)
if chrs:
command += " {0}".format(" ".join(chrs))
command = "{0} | bamToBed -i stdin -tag NH -bed12 | intersectBed -a stdin -b {1} -split -wao".format(command, all_elements)
log.debug(command)
return sp.Popen(command, shell=True, stdout=sp.PIPE, stderr=sp.PIPE, bufsize=1)
def update_counts(element, tot_counts, cont_counts, split_counts, is_split):
elem='total'
tot_counts[elem] = tot_counts.get(elem,0) + 1
if is_split:
split_counts['total'] = split_counts.get('total',0) + 1
if len(element) > 1:
if len(set(element)) == 1:
elem = element[0]
else:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
split_counts[elem] = split_counts.get(elem, 0) + 1
else:
cont_counts['total'] = cont_counts.get('total', 0) + 1
if len(element) > 1:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
cont_counts[elem] = cont_counts.get(elem, 0) + 1
def count_features(bed, uniq=False):
# Initialize
n_skipped = {}
newRead = False # keep track of different reads
prev_rid = None # read id of the previous read
is_split = False # check if current read is a split
element = [] # list with all elements intersecting the read
cont_counts = {} # Continuous read counts
split_counts = {} # Split read counts
tot_counts = {} # Total number of reads
o = bed.stdout
log.info("Compute genomic coverage...")
# Iterate
while True:
try:
line = o.next()
if not line:
n_skipped['empty'] = n_skipped.get('gene', 0) + 1
continue
if 'gene' in line:
n_skipped['gene'] = n_skipped.get('gene', 0) + 1
continue
rchr, rstart, rend, rid, rflag, rstrand, rtstart, rtend, rrgb, rbcount, rbsizes, rbstarts, achr, astart, aend, ael, covg = line.strip().split("\t")
if uniq and int(rflag) != 1:
n_skipped['non-uniq'] = n_skipped.get('non-uniq', 0) + 1
continue
newRead = (rid != prev_rid)
if (newRead) and prev_rid!=None:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
# Re-Initialize the counters
element = []
element.append(ael) | update_counts(element, tot_counts, cont_counts, split_counts, is_split)
break
for k,v in n_skipped.iteritems():
log.info("Skipped {1} {0} lines".format(k, v))
return (tot_counts, cont_counts, split_counts)
def write_output(stats, out, output_format='tsv', json_indent=4):
if not args.ID:
args.ID = basename(args.bam)
if output_format == 'tsv':
for k, v in stats.iteritems():
for k1, v1 in v.iteritems():
line_array = [args.ID, k, str(k1), str(v1)]
out.write("\t".join(line_array)+"\n")
elif output_format == 'json':
out.write('Total reads: {0}\n'.format(json.dumps(stats['total'], indent=json_indent)))
out.write('Continuous reads: {0}\n'.format(json.dumps(stats['continuous'], indent=json_indent)))
out.write('Split reads: {0}\n'.format(json.dumps(stats['split'], indent=json_indent)))
def main(args):
bn_bam = os.path.basename(args.bam).rsplit(".", 1)[0]
bn_gtf = os.path.basename(args.annotation).rsplit(".", 1)[0]
start = datetime.now()
all_elements = gtf_processing(genome=args.genome, prefix=bn_bam + "." + bn_gtf)
chrs = None if args.all_chrs else get_chromosomes(args.genome)
if args.uniq:
args.all_reads = False
bed = process_bam(args.bam, all_elements, chrs=chrs, all_reads=args.all_reads)
read_type = "UNIQ" if args.uniq else "ALL" if args.all_reads else "PRIMARY"
chroms = ", ".join(chrs) if chrs else "ALL"
log.info("Chromosomes: {0}".format(str(chroms)))
log.info("Mapped reads: {0}".format(str(read_type)))
tot, cont, split = count_features(bed, uniq=args.uniq)
stats_summary = {"total" : tot, "continuous" : cont, "split" : split}
write_output(stats_summary, args.output, output_format=args.output_format)
end = datetime.now() - start
log.info('DONE ({0})'.format(strfdelta(end, "{hours}h{minutes}m{seconds}s")))
if not args.keep:
os.remove(all_elements)
def parse_arguments(argv):
""" Parsing arguments """
parser = ArgumentParser(argv, description = "Count the number of reads in genomic regions. NOTE: SAMtools and BEDtools must be installed")
parser.add_argument("-a", "--annotation", type=str, help="gtf with all elements (genes, transcripts and exons)", required=True)
parser.add_argument("-g", "--genome", type=str, help="genome chromosome sizes", required=True)
parser.add_argument("-b", "--bam", type=str, help="bam file", required=True)
parser.add_argument("-o", "--output", type=FileType('w'), default=sys.stdout, help="output file name")
parser.add_argument("-I", "--ID", type=str, help="the ID of the experiment, from which the bam comes from")
parser.add_argument("--keep", dest='keep', help="Do not delete the temporary files generated during the run", action='store_true', default=False)
parser.add_argument("--uniq", dest='uniq', action='store_true', help="Only use uniquely mapped reads", default=False)
parser.add_argument("--loglevel", dest='loglevel', help="Set the loglevel", default="info")
parser.add_argument("--all-reads", dest='all_reads', action='store_true', help="Use all reads from the BAM file. Default: use primary alignments only ('samtools view -F 260')", default=False)
parser.add_argument("--output-format", dest='output_format', help="Set the output format", default="tsv")
parser.add_argument("--all-chromosomes", dest='all_chrs', action='store_true', help="Use all chromosomes from the BAM file header. Default: use only chromosomes in the genome index file.", default=False)
return parser.parse_args()
def setup_logger():
""" Logging setup """
log = logging.getLogger("gencov")
log.setLevel(logging.getLevelName(args.loglevel.upper()))
ch = logging.StreamHandler()
ch.setLevel = log.level
fmt = logging.Formatter('%(asctime)s - %(message)s', '%Y-%m-%d %H:%M:%S')
ch.setFormatter(fmt)
log.addHandler(ch)
return log
if __name__ == "__main__":
"""
Given a bam file, compute the read coverage for different genomic regions:
- exons
- introns
- exon-intron junctions
- intergenic
*** ONLY PRIMARY alignments are used ***
"""
try:
args = parse_arguments(sys.argv)
log = setup_logger()
main(args)
exit(0)
except Exception,err:
log.error("Error:")
errinfo = traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)
log.error("".join(errinfo))
exit(1) | prev_rid = rid
is_split = int(rbcount) > 1
except StopIteration: | random_line_split |
read-genome-coverage.py | #!/usr/bin/env python
from __future__ import print_function, division, unicode_literals
import os
import sys
import json
import logging
import tempfile
import itertools
import traceback
import subprocess as sp
from os.path import basename
from datetime import datetime
from argparse import ArgumentParser, FileType
PREPROC_CMDS = {
'exon': "awk '$3 == \"exon\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"exon\";print}}' > {output}",
'gene': "awk '$3 == \"gene\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"gene\";print}}' > {output}",
'intron': "subtractBed -a {input[0]} -b {input[1]} | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF)=\"intron\";print}}' > {output}",
'intergenic': "complementBed -i {input[0]} -g <(cut -f 1-2 {input[1]} | sort -k1,1) | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"intergenic\";print}}' > {output}"
}
def strfdelta(tdelta, fmt):
d = {"days": tdelta.days}
d["hours"], rem = divmod(tdelta.seconds, 3600)
d["minutes"], d["seconds"] = divmod(rem, 60)
return fmt.format(**d)
def preprocess(element, inputs=None):
'''element can be one of <gene> <exon> <intron> <intergenic>'''
log = logging.getLogger('gencov')
element_bed = tempfile.mkstemp(suffix='.bed')[1]
if not inputs:
inputs = [ args.annotation ]
else:
inputs = inputs[element]
command = PREPROC_CMDS[element].format(input=inputs, output=element_bed)
log.debug(command)
proc = sp.Popen(command, shell=True, executable='/bin/bash', stderr=sp.PIPE)
err_msg = proc.communicate()[1]
if err_msg:
raise IOError(err_msg)
log.info("%s preprocessed" % element.title())
return element_bed
def gtf_processing(genome=None, prefix='gencov'):
"""Annotation preprocessing. Provide a bed file with the
following elements:
- projected exons
- projected genes
- introns
- integenic regions
"""
all_bed = prefix + ".all.bed"
if not os.path.exists(all_bed) or os.stat(all_bed).st_size == 0:
log.info("Preprocessing annotation...")
features = ('exon', 'gene', 'intron', 'intergenic')
merged_exons, merged_genes = map(preprocess, features[:2])
ins = {
'intron': [merged_genes, merged_exons],
'intergenic': [merged_genes, genome]
}
intron_bed, intergenic_bed = map(preprocess, features[2:], [ins, ins])
log.info("Concatenate bed files for all elements...")
with open(all_bed, 'w') as out_bed:
cat_all(merged_exons, merged_genes, intron_bed, intergenic_bed, out_bed=out_bed)
for f in (merged_exons, merged_genes, intron_bed, intergenic_bed):
os.remove(f)
return all_bed
def cat_all(*args, **kwargs):
out_bed = kwargs.get('out_bed', sys.stdout)
for bed in args:
print(open(bed,'r').read(), end='', file=out_bed)
def get_chromosomes(genome_file):
with open(genome_file) as genome:
chrs = [l.split()[0] for l in genome]
return chrs
def process_bam(bam, all_elements, chrs=None, all_reads=False):
if not os.path.exists(bam):
raise IOError("Fail to open {0!r} for reading".format(bam))
bai = "{0}.bai".format(bam)
if chrs and not os.path.exists(bai):
log.info("Indexing {0}...".format(bam))
sp.call('samtools index {0}'.format(bam), shell=True)
log.info('Processing {0}...'.format(bam))
command = "samtools view -u"
sam_filter = 4
if not all_reads:
sam_filter += 256
command += " -F {0} {1}".format(str(sam_filter), bam)
if chrs:
command += " {0}".format(" ".join(chrs))
command = "{0} | bamToBed -i stdin -tag NH -bed12 | intersectBed -a stdin -b {1} -split -wao".format(command, all_elements)
log.debug(command)
return sp.Popen(command, shell=True, stdout=sp.PIPE, stderr=sp.PIPE, bufsize=1)
def update_counts(element, tot_counts, cont_counts, split_counts, is_split):
elem='total'
tot_counts[elem] = tot_counts.get(elem,0) + 1
if is_split:
split_counts['total'] = split_counts.get('total',0) + 1
if len(element) > 1:
|
else:
elem = element[0]
split_counts[elem] = split_counts.get(elem, 0) + 1
else:
cont_counts['total'] = cont_counts.get('total', 0) + 1
if len(element) > 1:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
cont_counts[elem] = cont_counts.get(elem, 0) + 1
def count_features(bed, uniq=False):
# Initialize
n_skipped = {}
newRead = False # keep track of different reads
prev_rid = None # read id of the previous read
is_split = False # check if current read is a split
element = [] # list with all elements intersecting the read
cont_counts = {} # Continuous read counts
split_counts = {} # Split read counts
tot_counts = {} # Total number of reads
o = bed.stdout
log.info("Compute genomic coverage...")
# Iterate
while True:
try:
line = o.next()
if not line:
n_skipped['empty'] = n_skipped.get('gene', 0) + 1
continue
if 'gene' in line:
n_skipped['gene'] = n_skipped.get('gene', 0) + 1
continue
rchr, rstart, rend, rid, rflag, rstrand, rtstart, rtend, rrgb, rbcount, rbsizes, rbstarts, achr, astart, aend, ael, covg = line.strip().split("\t")
if uniq and int(rflag) != 1:
n_skipped['non-uniq'] = n_skipped.get('non-uniq', 0) + 1
continue
newRead = (rid != prev_rid)
if (newRead) and prev_rid!=None:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
# Re-Initialize the counters
element = []
element.append(ael)
prev_rid = rid
is_split = int(rbcount) > 1
except StopIteration:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
break
for k,v in n_skipped.iteritems():
log.info("Skipped {1} {0} lines".format(k, v))
return (tot_counts, cont_counts, split_counts)
def write_output(stats, out, output_format='tsv', json_indent=4):
if not args.ID:
args.ID = basename(args.bam)
if output_format == 'tsv':
for k, v in stats.iteritems():
for k1, v1 in v.iteritems():
line_array = [args.ID, k, str(k1), str(v1)]
out.write("\t".join(line_array)+"\n")
elif output_format == 'json':
out.write('Total reads: {0}\n'.format(json.dumps(stats['total'], indent=json_indent)))
out.write('Continuous reads: {0}\n'.format(json.dumps(stats['continuous'], indent=json_indent)))
out.write('Split reads: {0}\n'.format(json.dumps(stats['split'], indent=json_indent)))
def main(args):
bn_bam = os.path.basename(args.bam).rsplit(".", 1)[0]
bn_gtf = os.path.basename(args.annotation).rsplit(".", 1)[0]
start = datetime.now()
all_elements = gtf_processing(genome=args.genome, prefix=bn_bam + "." + bn_gtf)
chrs = None if args.all_chrs else get_chromosomes(args.genome)
if args.uniq:
args.all_reads = False
bed = process_bam(args.bam, all_elements, chrs=chrs, all_reads=args.all_reads)
read_type = "UNIQ" if args.uniq else "ALL" if args.all_reads else "PRIMARY"
chroms = ", ".join(chrs) if chrs else "ALL"
log.info("Chromosomes: {0}".format(str(chroms)))
log.info("Mapped reads: {0}".format(str(read_type)))
tot, cont, split = count_features(bed, uniq=args.uniq)
stats_summary = {"total" : tot, "continuous" : cont, "split" : split}
write_output(stats_summary, args.output, output_format=args.output_format)
end = datetime.now() - start
log.info('DONE ({0})'.format(strfdelta(end, "{hours}h{minutes}m{seconds}s")))
if not args.keep:
os.remove(all_elements)
def parse_arguments(argv):
""" Parsing arguments """
parser = ArgumentParser(argv, description = "Count the number of reads in genomic regions. NOTE: SAMtools and BEDtools must be installed")
parser.add_argument("-a", "--annotation", type=str, help="gtf with all elements (genes, transcripts and exons)", required=True)
parser.add_argument("-g", "--genome", type=str, help="genome chromosome sizes", required=True)
parser.add_argument("-b", "--bam", type=str, help="bam file", required=True)
parser.add_argument("-o", "--output", type=FileType('w'), default=sys.stdout, help="output file name")
parser.add_argument("-I", "--ID", type=str, help="the ID of the experiment, from which the bam comes from")
parser.add_argument("--keep", dest='keep', help="Do not delete the temporary files generated during the run", action='store_true', default=False)
parser.add_argument("--uniq", dest='uniq', action='store_true', help="Only use uniquely mapped reads", default=False)
parser.add_argument("--loglevel", dest='loglevel', help="Set the loglevel", default="info")
parser.add_argument("--all-reads", dest='all_reads', action='store_true', help="Use all reads from the BAM file. Default: use primary alignments only ('samtools view -F 260')", default=False)
parser.add_argument("--output-format", dest='output_format', help="Set the output format", default="tsv")
parser.add_argument("--all-chromosomes", dest='all_chrs', action='store_true', help="Use all chromosomes from the BAM file header. Default: use only chromosomes in the genome index file.", default=False)
return parser.parse_args()
def setup_logger():
""" Logging setup """
log = logging.getLogger("gencov")
log.setLevel(logging.getLevelName(args.loglevel.upper()))
ch = logging.StreamHandler()
ch.setLevel = log.level
fmt = logging.Formatter('%(asctime)s - %(message)s', '%Y-%m-%d %H:%M:%S')
ch.setFormatter(fmt)
log.addHandler(ch)
return log
if __name__ == "__main__":
"""
Given a bam file, compute the read coverage for different genomic regions:
- exons
- introns
- exon-intron junctions
- intergenic
*** ONLY PRIMARY alignments are used ***
"""
try:
args = parse_arguments(sys.argv)
log = setup_logger()
main(args)
exit(0)
except Exception,err:
log.error("Error:")
errinfo = traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)
log.error("".join(errinfo))
exit(1)
| if len(set(element)) == 1:
elem = element[0]
else:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic' | conditional_block |
read-genome-coverage.py | #!/usr/bin/env python
from __future__ import print_function, division, unicode_literals
import os
import sys
import json
import logging
import tempfile
import itertools
import traceback
import subprocess as sp
from os.path import basename
from datetime import datetime
from argparse import ArgumentParser, FileType
PREPROC_CMDS = {
'exon': "awk '$3 == \"exon\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"exon\";print}}' > {output}",
'gene': "awk '$3 == \"gene\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"gene\";print}}' > {output}",
'intron': "subtractBed -a {input[0]} -b {input[1]} | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF)=\"intron\";print}}' > {output}",
'intergenic': "complementBed -i {input[0]} -g <(cut -f 1-2 {input[1]} | sort -k1,1) | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"intergenic\";print}}' > {output}"
}
def strfdelta(tdelta, fmt):
d = {"days": tdelta.days}
d["hours"], rem = divmod(tdelta.seconds, 3600)
d["minutes"], d["seconds"] = divmod(rem, 60)
return fmt.format(**d)
def preprocess(element, inputs=None):
'''element can be one of <gene> <exon> <intron> <intergenic>'''
log = logging.getLogger('gencov')
element_bed = tempfile.mkstemp(suffix='.bed')[1]
if not inputs:
inputs = [ args.annotation ]
else:
inputs = inputs[element]
command = PREPROC_CMDS[element].format(input=inputs, output=element_bed)
log.debug(command)
proc = sp.Popen(command, shell=True, executable='/bin/bash', stderr=sp.PIPE)
err_msg = proc.communicate()[1]
if err_msg:
raise IOError(err_msg)
log.info("%s preprocessed" % element.title())
return element_bed
def gtf_processing(genome=None, prefix='gencov'):
"""Annotation preprocessing. Provide a bed file with the
following elements:
- projected exons
- projected genes
- introns
- integenic regions
"""
all_bed = prefix + ".all.bed"
if not os.path.exists(all_bed) or os.stat(all_bed).st_size == 0:
log.info("Preprocessing annotation...")
features = ('exon', 'gene', 'intron', 'intergenic')
merged_exons, merged_genes = map(preprocess, features[:2])
ins = {
'intron': [merged_genes, merged_exons],
'intergenic': [merged_genes, genome]
}
intron_bed, intergenic_bed = map(preprocess, features[2:], [ins, ins])
log.info("Concatenate bed files for all elements...")
with open(all_bed, 'w') as out_bed:
cat_all(merged_exons, merged_genes, intron_bed, intergenic_bed, out_bed=out_bed)
for f in (merged_exons, merged_genes, intron_bed, intergenic_bed):
os.remove(f)
return all_bed
def cat_all(*args, **kwargs):
out_bed = kwargs.get('out_bed', sys.stdout)
for bed in args:
print(open(bed,'r').read(), end='', file=out_bed)
def get_chromosomes(genome_file):
with open(genome_file) as genome:
chrs = [l.split()[0] for l in genome]
return chrs
def process_bam(bam, all_elements, chrs=None, all_reads=False):
if not os.path.exists(bam):
raise IOError("Fail to open {0!r} for reading".format(bam))
bai = "{0}.bai".format(bam)
if chrs and not os.path.exists(bai):
log.info("Indexing {0}...".format(bam))
sp.call('samtools index {0}'.format(bam), shell=True)
log.info('Processing {0}...'.format(bam))
command = "samtools view -u"
sam_filter = 4
if not all_reads:
sam_filter += 256
command += " -F {0} {1}".format(str(sam_filter), bam)
if chrs:
command += " {0}".format(" ".join(chrs))
command = "{0} | bamToBed -i stdin -tag NH -bed12 | intersectBed -a stdin -b {1} -split -wao".format(command, all_elements)
log.debug(command)
return sp.Popen(command, shell=True, stdout=sp.PIPE, stderr=sp.PIPE, bufsize=1)
def update_counts(element, tot_counts, cont_counts, split_counts, is_split):
elem='total'
tot_counts[elem] = tot_counts.get(elem,0) + 1
if is_split:
split_counts['total'] = split_counts.get('total',0) + 1
if len(element) > 1:
if len(set(element)) == 1:
elem = element[0]
else:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
split_counts[elem] = split_counts.get(elem, 0) + 1
else:
cont_counts['total'] = cont_counts.get('total', 0) + 1
if len(element) > 1:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
cont_counts[elem] = cont_counts.get(elem, 0) + 1
def count_features(bed, uniq=False):
# Initialize
|
def write_output(stats, out, output_format='tsv', json_indent=4):
if not args.ID:
args.ID = basename(args.bam)
if output_format == 'tsv':
for k, v in stats.iteritems():
for k1, v1 in v.iteritems():
line_array = [args.ID, k, str(k1), str(v1)]
out.write("\t".join(line_array)+"\n")
elif output_format == 'json':
out.write('Total reads: {0}\n'.format(json.dumps(stats['total'], indent=json_indent)))
out.write('Continuous reads: {0}\n'.format(json.dumps(stats['continuous'], indent=json_indent)))
out.write('Split reads: {0}\n'.format(json.dumps(stats['split'], indent=json_indent)))
def main(args):
bn_bam = os.path.basename(args.bam).rsplit(".", 1)[0]
bn_gtf = os.path.basename(args.annotation).rsplit(".", 1)[0]
start = datetime.now()
all_elements = gtf_processing(genome=args.genome, prefix=bn_bam + "." + bn_gtf)
chrs = None if args.all_chrs else get_chromosomes(args.genome)
if args.uniq:
args.all_reads = False
bed = process_bam(args.bam, all_elements, chrs=chrs, all_reads=args.all_reads)
read_type = "UNIQ" if args.uniq else "ALL" if args.all_reads else "PRIMARY"
chroms = ", ".join(chrs) if chrs else "ALL"
log.info("Chromosomes: {0}".format(str(chroms)))
log.info("Mapped reads: {0}".format(str(read_type)))
tot, cont, split = count_features(bed, uniq=args.uniq)
stats_summary = {"total" : tot, "continuous" : cont, "split" : split}
write_output(stats_summary, args.output, output_format=args.output_format)
end = datetime.now() - start
log.info('DONE ({0})'.format(strfdelta(end, "{hours}h{minutes}m{seconds}s")))
if not args.keep:
os.remove(all_elements)
def parse_arguments(argv):
""" Parsing arguments """
parser = ArgumentParser(argv, description = "Count the number of reads in genomic regions. NOTE: SAMtools and BEDtools must be installed")
parser.add_argument("-a", "--annotation", type=str, help="gtf with all elements (genes, transcripts and exons)", required=True)
parser.add_argument("-g", "--genome", type=str, help="genome chromosome sizes", required=True)
parser.add_argument("-b", "--bam", type=str, help="bam file", required=True)
parser.add_argument("-o", "--output", type=FileType('w'), default=sys.stdout, help="output file name")
parser.add_argument("-I", "--ID", type=str, help="the ID of the experiment, from which the bam comes from")
parser.add_argument("--keep", dest='keep', help="Do not delete the temporary files generated during the run", action='store_true', default=False)
parser.add_argument("--uniq", dest='uniq', action='store_true', help="Only use uniquely mapped reads", default=False)
parser.add_argument("--loglevel", dest='loglevel', help="Set the loglevel", default="info")
parser.add_argument("--all-reads", dest='all_reads', action='store_true', help="Use all reads from the BAM file. Default: use primary alignments only ('samtools view -F 260')", default=False)
parser.add_argument("--output-format", dest='output_format', help="Set the output format", default="tsv")
parser.add_argument("--all-chromosomes", dest='all_chrs', action='store_true', help="Use all chromosomes from the BAM file header. Default: use only chromosomes in the genome index file.", default=False)
return parser.parse_args()
def setup_logger():
""" Logging setup """
log = logging.getLogger("gencov")
log.setLevel(logging.getLevelName(args.loglevel.upper()))
ch = logging.StreamHandler()
ch.setLevel = log.level
fmt = logging.Formatter('%(asctime)s - %(message)s', '%Y-%m-%d %H:%M:%S')
ch.setFormatter(fmt)
log.addHandler(ch)
return log
if __name__ == "__main__":
"""
Given a bam file, compute the read coverage for different genomic regions:
- exons
- introns
- exon-intron junctions
- intergenic
*** ONLY PRIMARY alignments are used ***
"""
try:
args = parse_arguments(sys.argv)
log = setup_logger()
main(args)
exit(0)
except Exception,err:
log.error("Error:")
errinfo = traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)
log.error("".join(errinfo))
exit(1)
| n_skipped = {}
newRead = False # keep track of different reads
prev_rid = None # read id of the previous read
is_split = False # check if current read is a split
element = [] # list with all elements intersecting the read
cont_counts = {} # Continuous read counts
split_counts = {} # Split read counts
tot_counts = {} # Total number of reads
o = bed.stdout
log.info("Compute genomic coverage...")
# Iterate
while True:
try:
line = o.next()
if not line:
n_skipped['empty'] = n_skipped.get('gene', 0) + 1
continue
if 'gene' in line:
n_skipped['gene'] = n_skipped.get('gene', 0) + 1
continue
rchr, rstart, rend, rid, rflag, rstrand, rtstart, rtend, rrgb, rbcount, rbsizes, rbstarts, achr, astart, aend, ael, covg = line.strip().split("\t")
if uniq and int(rflag) != 1:
n_skipped['non-uniq'] = n_skipped.get('non-uniq', 0) + 1
continue
newRead = (rid != prev_rid)
if (newRead) and prev_rid!=None:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
# Re-Initialize the counters
element = []
element.append(ael)
prev_rid = rid
is_split = int(rbcount) > 1
except StopIteration:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
break
for k,v in n_skipped.iteritems():
log.info("Skipped {1} {0} lines".format(k, v))
return (tot_counts, cont_counts, split_counts) | identifier_body |
read-genome-coverage.py | #!/usr/bin/env python
from __future__ import print_function, division, unicode_literals
import os
import sys
import json
import logging
import tempfile
import itertools
import traceback
import subprocess as sp
from os.path import basename
from datetime import datetime
from argparse import ArgumentParser, FileType
PREPROC_CMDS = {
'exon': "awk '$3 == \"exon\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"exon\";print}}' > {output}",
'gene': "awk '$3 == \"gene\"' {input[0]} | sort -k1,1 -k4,4n | mergeBed -i stdin | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"gene\";print}}' > {output}",
'intron': "subtractBed -a {input[0]} -b {input[1]} | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF)=\"intron\";print}}' > {output}",
'intergenic': "complementBed -i {input[0]} -g <(cut -f 1-2 {input[1]} | sort -k1,1) | awk 'BEGIN{{OFS=\"\\t\"}}{{$(NF+1)=\"intergenic\";print}}' > {output}"
}
def strfdelta(tdelta, fmt):
d = {"days": tdelta.days}
d["hours"], rem = divmod(tdelta.seconds, 3600)
d["minutes"], d["seconds"] = divmod(rem, 60)
return fmt.format(**d)
def preprocess(element, inputs=None):
'''element can be one of <gene> <exon> <intron> <intergenic>'''
log = logging.getLogger('gencov')
element_bed = tempfile.mkstemp(suffix='.bed')[1]
if not inputs:
inputs = [ args.annotation ]
else:
inputs = inputs[element]
command = PREPROC_CMDS[element].format(input=inputs, output=element_bed)
log.debug(command)
proc = sp.Popen(command, shell=True, executable='/bin/bash', stderr=sp.PIPE)
err_msg = proc.communicate()[1]
if err_msg:
raise IOError(err_msg)
log.info("%s preprocessed" % element.title())
return element_bed
def | (genome=None, prefix='gencov'):
"""Annotation preprocessing. Provide a bed file with the
following elements:
- projected exons
- projected genes
- introns
- integenic regions
"""
all_bed = prefix + ".all.bed"
if not os.path.exists(all_bed) or os.stat(all_bed).st_size == 0:
log.info("Preprocessing annotation...")
features = ('exon', 'gene', 'intron', 'intergenic')
merged_exons, merged_genes = map(preprocess, features[:2])
ins = {
'intron': [merged_genes, merged_exons],
'intergenic': [merged_genes, genome]
}
intron_bed, intergenic_bed = map(preprocess, features[2:], [ins, ins])
log.info("Concatenate bed files for all elements...")
with open(all_bed, 'w') as out_bed:
cat_all(merged_exons, merged_genes, intron_bed, intergenic_bed, out_bed=out_bed)
for f in (merged_exons, merged_genes, intron_bed, intergenic_bed):
os.remove(f)
return all_bed
def cat_all(*args, **kwargs):
out_bed = kwargs.get('out_bed', sys.stdout)
for bed in args:
print(open(bed,'r').read(), end='', file=out_bed)
def get_chromosomes(genome_file):
with open(genome_file) as genome:
chrs = [l.split()[0] for l in genome]
return chrs
def process_bam(bam, all_elements, chrs=None, all_reads=False):
if not os.path.exists(bam):
raise IOError("Fail to open {0!r} for reading".format(bam))
bai = "{0}.bai".format(bam)
if chrs and not os.path.exists(bai):
log.info("Indexing {0}...".format(bam))
sp.call('samtools index {0}'.format(bam), shell=True)
log.info('Processing {0}...'.format(bam))
command = "samtools view -u"
sam_filter = 4
if not all_reads:
sam_filter += 256
command += " -F {0} {1}".format(str(sam_filter), bam)
if chrs:
command += " {0}".format(" ".join(chrs))
command = "{0} | bamToBed -i stdin -tag NH -bed12 | intersectBed -a stdin -b {1} -split -wao".format(command, all_elements)
log.debug(command)
return sp.Popen(command, shell=True, stdout=sp.PIPE, stderr=sp.PIPE, bufsize=1)
def update_counts(element, tot_counts, cont_counts, split_counts, is_split):
elem='total'
tot_counts[elem] = tot_counts.get(elem,0) + 1
if is_split:
split_counts['total'] = split_counts.get('total',0) + 1
if len(element) > 1:
if len(set(element)) == 1:
elem = element[0]
else:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
split_counts[elem] = split_counts.get(elem, 0) + 1
else:
cont_counts['total'] = cont_counts.get('total', 0) + 1
if len(element) > 1:
if 'intergenic' in element:
elem = 'others'
else:
elem = 'exonic_intronic'
else:
elem = element[0]
cont_counts[elem] = cont_counts.get(elem, 0) + 1
def count_features(bed, uniq=False):
# Initialize
n_skipped = {}
newRead = False # keep track of different reads
prev_rid = None # read id of the previous read
is_split = False # check if current read is a split
element = [] # list with all elements intersecting the read
cont_counts = {} # Continuous read counts
split_counts = {} # Split read counts
tot_counts = {} # Total number of reads
o = bed.stdout
log.info("Compute genomic coverage...")
# Iterate
while True:
try:
line = o.next()
if not line:
n_skipped['empty'] = n_skipped.get('gene', 0) + 1
continue
if 'gene' in line:
n_skipped['gene'] = n_skipped.get('gene', 0) + 1
continue
rchr, rstart, rend, rid, rflag, rstrand, rtstart, rtend, rrgb, rbcount, rbsizes, rbstarts, achr, astart, aend, ael, covg = line.strip().split("\t")
if uniq and int(rflag) != 1:
n_skipped['non-uniq'] = n_skipped.get('non-uniq', 0) + 1
continue
newRead = (rid != prev_rid)
if (newRead) and prev_rid!=None:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
# Re-Initialize the counters
element = []
element.append(ael)
prev_rid = rid
is_split = int(rbcount) > 1
except StopIteration:
update_counts(element, tot_counts, cont_counts, split_counts, is_split)
break
for k,v in n_skipped.iteritems():
log.info("Skipped {1} {0} lines".format(k, v))
return (tot_counts, cont_counts, split_counts)
def write_output(stats, out, output_format='tsv', json_indent=4):
if not args.ID:
args.ID = basename(args.bam)
if output_format == 'tsv':
for k, v in stats.iteritems():
for k1, v1 in v.iteritems():
line_array = [args.ID, k, str(k1), str(v1)]
out.write("\t".join(line_array)+"\n")
elif output_format == 'json':
out.write('Total reads: {0}\n'.format(json.dumps(stats['total'], indent=json_indent)))
out.write('Continuous reads: {0}\n'.format(json.dumps(stats['continuous'], indent=json_indent)))
out.write('Split reads: {0}\n'.format(json.dumps(stats['split'], indent=json_indent)))
def main(args):
bn_bam = os.path.basename(args.bam).rsplit(".", 1)[0]
bn_gtf = os.path.basename(args.annotation).rsplit(".", 1)[0]
start = datetime.now()
all_elements = gtf_processing(genome=args.genome, prefix=bn_bam + "." + bn_gtf)
chrs = None if args.all_chrs else get_chromosomes(args.genome)
if args.uniq:
args.all_reads = False
bed = process_bam(args.bam, all_elements, chrs=chrs, all_reads=args.all_reads)
read_type = "UNIQ" if args.uniq else "ALL" if args.all_reads else "PRIMARY"
chroms = ", ".join(chrs) if chrs else "ALL"
log.info("Chromosomes: {0}".format(str(chroms)))
log.info("Mapped reads: {0}".format(str(read_type)))
tot, cont, split = count_features(bed, uniq=args.uniq)
stats_summary = {"total" : tot, "continuous" : cont, "split" : split}
write_output(stats_summary, args.output, output_format=args.output_format)
end = datetime.now() - start
log.info('DONE ({0})'.format(strfdelta(end, "{hours}h{minutes}m{seconds}s")))
if not args.keep:
os.remove(all_elements)
def parse_arguments(argv):
""" Parsing arguments """
parser = ArgumentParser(argv, description = "Count the number of reads in genomic regions. NOTE: SAMtools and BEDtools must be installed")
parser.add_argument("-a", "--annotation", type=str, help="gtf with all elements (genes, transcripts and exons)", required=True)
parser.add_argument("-g", "--genome", type=str, help="genome chromosome sizes", required=True)
parser.add_argument("-b", "--bam", type=str, help="bam file", required=True)
parser.add_argument("-o", "--output", type=FileType('w'), default=sys.stdout, help="output file name")
parser.add_argument("-I", "--ID", type=str, help="the ID of the experiment, from which the bam comes from")
parser.add_argument("--keep", dest='keep', help="Do not delete the temporary files generated during the run", action='store_true', default=False)
parser.add_argument("--uniq", dest='uniq', action='store_true', help="Only use uniquely mapped reads", default=False)
parser.add_argument("--loglevel", dest='loglevel', help="Set the loglevel", default="info")
parser.add_argument("--all-reads", dest='all_reads', action='store_true', help="Use all reads from the BAM file. Default: use primary alignments only ('samtools view -F 260')", default=False)
parser.add_argument("--output-format", dest='output_format', help="Set the output format", default="tsv")
parser.add_argument("--all-chromosomes", dest='all_chrs', action='store_true', help="Use all chromosomes from the BAM file header. Default: use only chromosomes in the genome index file.", default=False)
return parser.parse_args()
def setup_logger():
""" Logging setup """
log = logging.getLogger("gencov")
log.setLevel(logging.getLevelName(args.loglevel.upper()))
ch = logging.StreamHandler()
ch.setLevel = log.level
fmt = logging.Formatter('%(asctime)s - %(message)s', '%Y-%m-%d %H:%M:%S')
ch.setFormatter(fmt)
log.addHandler(ch)
return log
if __name__ == "__main__":
"""
Given a bam file, compute the read coverage for different genomic regions:
- exons
- introns
- exon-intron junctions
- intergenic
*** ONLY PRIMARY alignments are used ***
"""
try:
args = parse_arguments(sys.argv)
log = setup_logger()
main(args)
exit(0)
except Exception,err:
log.error("Error:")
errinfo = traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)
log.error("".join(errinfo))
exit(1)
| gtf_processing | identifier_name |
main.js | /***
* Defines a rectangular object that can be rendered on the screen
*/
class Sprite {
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
*/
constructor(x, y) {
this.x = x;
this.y = y;
}
/***
* Render the sprite at its position on the screen
*/
draw() {}
}
/***
* Defines a rectangular clickable sprite that does something when clicked
*/
class Clickable extends Sprite {
//we might want to make this a subclass of sprite and just display sprites
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
* @param {number} width the width of the zone
* @param {number} height the height of the zone
*/
constructor(x, y, width, height) {
super(x, y);
this.width = width;
this.height = height;
}
left_click() {}
right_click() {}
test_click(x, y, button) {
if (x >= this.x && x <= this.x + this.width && y >= this.y && y <= this.y + this.height) {
if (button === 0) {
this.left_click();
} else if (button === 2) {
this.right_click()
}
return true;
}
return false;
}
}
class SnakeTile extends Sprite {
constructor(x, y, width, height, trueX, trueY) {
super(x, y);
this.width = width;
this.height = height;
this.trueX = trueX;
this.trueY = trueY;
}
draw() {
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 1;
ctx.strokeRect(this.x, this.y, this.width, this.height);
if (snake_board.grid[this.trueX][this.trueY] === -1) {
ctx.fillStyle = "#0000ff";
ctx.fillRect(this.x, this.y, this.width, this.height);
} else if (snake_board.grid[this.trueX][this.trueY] > 0){
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ff0000";
let scale_factor = (snake_board.grid[this.trueX][this.trueY]/snake_board.length)/4;
ctx.fillRect(this.x + this.width*scale_factor, this.y + this.height*scale_factor, this.width - 2*this.width*scale_factor, this.height - 2*this.height*scale_factor);
}
}
}
class Button extends Clickable{
/**
* Creates a button which runs a function when it is clicked
* @param x
* @param y
* @param width
* @param height
* @param {string} text The text on the button
* @param {function} action The function the button runs on press
*/
constructor(x, y, width, height, text, action) {
super(x, y, width, height);
this.text = text;
this.action = action;
this.font = height + "px Verdana"
}
left_click() {
this.action();
}
draw() {
ctx.fillStyle = "#000099";
ctx.strokeStyle = "#000000";
ctx.lineWidth = 2;
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ffffff";
ctx.font = this.font;
ctx.textAlign = "center";
ctx.fillText(this.text, this.x + this.width/2, this.y + this.height*.9, this.width);
}
}
class Timer extends Sprite {
constructor(x, y, width, height) {
super(x, y);
this.width = width;
this.height = height;
this.font = height + "px Consolas";
}
draw() {
ctx.fillStyle = "#333333";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 2;
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#999999";
ctx.textAlign = "center";
ctx.font = this.font;
ctx.fillText(Math.floor(play_time/60) + ":" + (play_time%60 < 10 ? "0": "") + play_time%60 , this.x + this.width/2, this.y + this.height*.9);
}
}
class Popup extends Clickable {
constructor(title_text) {
//the popup covers the entire screen
super(0, 0, canvas.width, canvas.height);
this.title_text = title_text;
}
draw() {
ctx.fillStyle = "#dfafaf";
ctx.fillRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.strokeStyle = "#333333";
ctx.lineWidth = 3;
ctx.strokeRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.fillStyle = "#ffffff";
ctx.font = "40px Verdana";
ctx.textAlign = "center";
ctx.fillText(this.title_text, this.width/2, this.height*.23, this.width*.33);
}
}
//don't ask me why the signs are reversed
const DIRECTIONS = {
NORTH: {x: 0, y: 1},
EAST: {x: 1, y: 0},
SOUTH: {x: 0, y: -1},
WEST: {x: -1, y: 0}
};
Object.freeze(DIRECTIONS);
const START_SIZE = 5;
const GAIN_FROM_FOOD = 3;
/**
* Represents a snake board. 0 means empty, >0 means snake, -1 means food
* The number on the snake will represent the number of turns that that tile has been alive
* if that number exceeds the length, then the tile is set to 0
*/
class SnakeBoard {
constructor(width, height) {
this.width = width;
this.height = height;
this.direction = DIRECTIONS.EAST;
this.length = START_SIZE;
this.grid = new Array(this.width);
for (let i = 0; i < this.width; i++) {
this.grid[i] = new Array(this.height).fill(0)
}
this.grid[Math.floor(this.width/2)][Math.floor(this.height/2)] = 1;
this.add_food();
}
add_food() {
//get coords of snake and the tiles
let randX = randRange(0, this.width);
let randY = randRange(0, this.height);
//get random values while the tiles are not empty
while (this.grid[randX][randY] !== 0) {
randX = randRange(0, this.width);
randY = randRange(0, this.height);
}
this.grid[randX][randY] = -1;
}
step_turn() {
//updates all the tiles by 1 turn passed
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] > 0) {
this.grid[i][j] = (this.grid[i][j] + 1) % this.length;
}
}
}
let nextX = 0;
let nextY = 0;
//next find the most recent tile and go in that direction
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] === 2) {
nextX = i + this.direction.x;
nextY = j + this.direction.y;
}
}
}
//next check if the direction is valid and the tile is empty(no snake body)
if (nextX >= this.width || nextX < 0 || nextY >= this.height || nextY < 0 || this.grid[nextX][nextY] > 0) {
end_game(false);
return;
}
//account for possible food
if (this.grid[nextX][nextY] === -1) {
this.length += GAIN_FROM_FOOD;
this.add_food();
}
//finally add the next tile
this.grid[nextX][nextY] = 1;
}
check_win() {
if (this.length > this.width*this.height) end_game(true);
}
}
/**
* Return a random int between min(inclusive) and max(exclusive)
* @param min
* @param max
*/
function randRange(min, max) {
return min + Math.floor(Math.random() * (max - min));
}
function get_mouse_pos(event) {
let rect = canvas.getBoundingClientRect();
return {
x: Math.round((event.clientX - rect.left)/(rect.right - rect.left)*canvas.width),
y: Math.round((event.clientY - rect.top)/(rect.bottom - rect.top)*canvas.height)
};
}
function reverse(arr) {
let new_arr = new Array(arr.length);
for (let i = 0; i < arr.length; i++) {
new_arr[i] = arr[arr.length - 1 - i];
}
return new_arr;
}
function on_mouse_down(event) {
let pos = get_mouse_pos(event);
for(const sprite of reverse(sprites)) {
//ensures that only the top sprites get clicked
if (sprite instanceof Clickable) |
}
}
function draw() {
//sprites on the top are drawn last
//sprites added most recently are at the end of the list
for(const sprite of sprites) {
sprite.draw();
}
}
function clear() {
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
function render() {
clear();
draw();
}
function end_game(win) {
clearInterval(game_timer);
if (win) {
let popup = new Popup("You win!");
sprites.push(popup);
} else {
let popup = new Popup("You lose!");
sprites.push(popup);
}
sprites.push(new Button(canvas.width*.45, canvas.height*.25, canvas.width*.1, canvas.height*.1, "Play Again", () => init_game(50, 30)));
clearInterval(snake_mover)
}
function update() {
snake_board.check_win();
render();
}
function get_random_color() {
let letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
function move_snake() {
snake_board.step_turn();
}
function init_game(width, height) {
snake_board = new SnakeBoard(width, height);
sprites = [];
//-50 accounts for the space the title plage takes up
let tile_size = Math.min(canvas.width/width, (canvas.height)/height);
for (let i = 0; i < width; i++) {
for (let j = 0; j < height; j++) {
sprites.push(new SnakeTile(tile_size*i, tile_size*j, tile_size, tile_size, i, j));
}
}
snake_mover = setInterval(move_snake, 200);
//play_time = 0;
//game_timer = setInterval(() => play_time++, 1000);
//sprites.push(new Timer(0, 0, 150, 50));
//sprites.push(new Button(150, 0, 150, 50, "New Game", () => {end_game(false);}))
}
//disables the context menu when you left click on the canvas
$('body').on('contextmenu', '#canvas', function(e){ return false; });
const canvas = document.getElementById("canvas");
const ctx = canvas.getContext("2d");
let frame_count = 0;
let sprites = [];
/**
*
* @type {SnakeBoard}
*/
let snake_board = [];
let minefield = [];
let play_time = 0;
let game_timer = 0;
let snake_mover = 0;
// for (let i = 0; i < 10; i++) {
// for (let j = 0; j < 10; j++) {
// sprites.push(new DisplayTimesClicked(30*i, 30*j, 30, 30, get_random_color()))
// }
// }
// sprites.push(new DisplayTimesClicked(50, 50, 50, 50, "#363cff"));
//canvas.addEventListener("mousemove", this.detect_mouse, false);
canvas.addEventListener("mousedown", on_mouse_down, false);
document.onkeydown = function(e) {
switch (e.key) {
case "ArrowDown":
if (snake_board.direction !== DIRECTIONS.SOUTH) {
snake_board.direction = DIRECTIONS.NORTH;
}
break;
case "ArrowLeft":
if (snake_board.direction !== DIRECTIONS.EAST) {
snake_board.direction = DIRECTIONS.WEST;
}
break;
// why down and up have to be reversed if beyond me
case "ArrowUp":
if (snake_board.direction !== DIRECTIONS.NORTH) {
snake_board.direction = DIRECTIONS.SOUTH;
}
break;
case "ArrowRight":
if (snake_board.direction !== DIRECTIONS.WEST) {
snake_board.direction = DIRECTIONS.EAST;
}
break;
}
};
setInterval(update, 100);
init_game(50, 30);
| {
if (sprite.test_click(pos.x, pos.y, event.button)) {
//needs to be unreversed
return
}
} | conditional_block |
main.js | /***
* Defines a rectangular object that can be rendered on the screen
*/
class Sprite {
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
*/
constructor(x, y) {
this.x = x;
this.y = y;
}
/***
* Render the sprite at its position on the screen
*/
draw() {}
}
/***
* Defines a rectangular clickable sprite that does something when clicked
*/
class Clickable extends Sprite {
//we might want to make this a subclass of sprite and just display sprites
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
* @param {number} width the width of the zone
* @param {number} height the height of the zone
*/
constructor(x, y, width, height) {
super(x, y);
this.width = width;
this.height = height;
}
left_click() {}
right_click() {}
test_click(x, y, button) {
if (x >= this.x && x <= this.x + this.width && y >= this.y && y <= this.y + this.height) {
if (button === 0) {
this.left_click();
} else if (button === 2) {
this.right_click()
}
return true;
}
return false;
}
}
class SnakeTile extends Sprite {
constructor(x, y, width, height, trueX, trueY) {
super(x, y);
this.width = width;
this.height = height;
this.trueX = trueX;
this.trueY = trueY;
}
draw() {
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 1;
ctx.strokeRect(this.x, this.y, this.width, this.height);
if (snake_board.grid[this.trueX][this.trueY] === -1) {
ctx.fillStyle = "#0000ff";
ctx.fillRect(this.x, this.y, this.width, this.height);
} else if (snake_board.grid[this.trueX][this.trueY] > 0){
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ff0000";
let scale_factor = (snake_board.grid[this.trueX][this.trueY]/snake_board.length)/4;
ctx.fillRect(this.x + this.width*scale_factor, this.y + this.height*scale_factor, this.width - 2*this.width*scale_factor, this.height - 2*this.height*scale_factor);
}
}
}
class Button extends Clickable{
/**
* Creates a button which runs a function when it is clicked
* @param x
* @param y
* @param width
* @param height
* @param {string} text The text on the button
* @param {function} action The function the button runs on press
*/
constructor(x, y, width, height, text, action) {
super(x, y, width, height);
this.text = text;
this.action = action;
this.font = height + "px Verdana"
}
left_click() {
this.action();
}
draw() {
ctx.fillStyle = "#000099";
ctx.strokeStyle = "#000000";
ctx.lineWidth = 2;
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ffffff";
ctx.font = this.font;
ctx.textAlign = "center";
ctx.fillText(this.text, this.x + this.width/2, this.y + this.height*.9, this.width);
}
}
class Timer extends Sprite {
constructor(x, y, width, height) {
super(x, y);
this.width = width;
this.height = height;
this.font = height + "px Consolas";
}
draw() {
ctx.fillStyle = "#333333";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 2;
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#999999";
ctx.textAlign = "center";
ctx.font = this.font;
ctx.fillText(Math.floor(play_time/60) + ":" + (play_time%60 < 10 ? "0": "") + play_time%60 , this.x + this.width/2, this.y + this.height*.9);
}
}
class Popup extends Clickable {
constructor(title_text) {
//the popup covers the entire screen
super(0, 0, canvas.width, canvas.height);
this.title_text = title_text;
}
draw() {
ctx.fillStyle = "#dfafaf";
ctx.fillRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.strokeStyle = "#333333";
ctx.lineWidth = 3;
ctx.strokeRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.fillStyle = "#ffffff";
ctx.font = "40px Verdana";
ctx.textAlign = "center";
ctx.fillText(this.title_text, this.width/2, this.height*.23, this.width*.33);
}
}
//don't ask me why the signs are reversed
const DIRECTIONS = {
NORTH: {x: 0, y: 1},
EAST: {x: 1, y: 0},
SOUTH: {x: 0, y: -1},
WEST: {x: -1, y: 0}
};
Object.freeze(DIRECTIONS);
const START_SIZE = 5;
const GAIN_FROM_FOOD = 3;
/**
* Represents a snake board. 0 means empty, >0 means snake, -1 means food
* The number on the snake will represent the number of turns that that tile has been alive
* if that number exceeds the length, then the tile is set to 0
*/
class SnakeBoard {
constructor(width, height) {
this.width = width;
this.height = height;
this.direction = DIRECTIONS.EAST;
this.length = START_SIZE;
this.grid = new Array(this.width);
for (let i = 0; i < this.width; i++) {
this.grid[i] = new Array(this.height).fill(0)
}
this.grid[Math.floor(this.width/2)][Math.floor(this.height/2)] = 1;
this.add_food();
}
add_food() {
//get coords of snake and the tiles
let randX = randRange(0, this.width);
let randY = randRange(0, this.height);
//get random values while the tiles are not empty
while (this.grid[randX][randY] !== 0) {
randX = randRange(0, this.width);
randY = randRange(0, this.height);
}
this.grid[randX][randY] = -1;
}
| () {
//updates all the tiles by 1 turn passed
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] > 0) {
this.grid[i][j] = (this.grid[i][j] + 1) % this.length;
}
}
}
let nextX = 0;
let nextY = 0;
//next find the most recent tile and go in that direction
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] === 2) {
nextX = i + this.direction.x;
nextY = j + this.direction.y;
}
}
}
//next check if the direction is valid and the tile is empty(no snake body)
if (nextX >= this.width || nextX < 0 || nextY >= this.height || nextY < 0 || this.grid[nextX][nextY] > 0) {
end_game(false);
return;
}
//account for possible food
if (this.grid[nextX][nextY] === -1) {
this.length += GAIN_FROM_FOOD;
this.add_food();
}
//finally add the next tile
this.grid[nextX][nextY] = 1;
}
check_win() {
if (this.length > this.width*this.height) end_game(true);
}
}
/**
* Return a random int between min(inclusive) and max(exclusive)
* @param min
* @param max
*/
function randRange(min, max) {
return min + Math.floor(Math.random() * (max - min));
}
function get_mouse_pos(event) {
let rect = canvas.getBoundingClientRect();
return {
x: Math.round((event.clientX - rect.left)/(rect.right - rect.left)*canvas.width),
y: Math.round((event.clientY - rect.top)/(rect.bottom - rect.top)*canvas.height)
};
}
function reverse(arr) {
let new_arr = new Array(arr.length);
for (let i = 0; i < arr.length; i++) {
new_arr[i] = arr[arr.length - 1 - i];
}
return new_arr;
}
function on_mouse_down(event) {
let pos = get_mouse_pos(event);
for(const sprite of reverse(sprites)) {
//ensures that only the top sprites get clicked
if (sprite instanceof Clickable) {
if (sprite.test_click(pos.x, pos.y, event.button)) {
//needs to be unreversed
return
}
}
}
}
function draw() {
//sprites on the top are drawn last
//sprites added most recently are at the end of the list
for(const sprite of sprites) {
sprite.draw();
}
}
function clear() {
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
function render() {
clear();
draw();
}
function end_game(win) {
clearInterval(game_timer);
if (win) {
let popup = new Popup("You win!");
sprites.push(popup);
} else {
let popup = new Popup("You lose!");
sprites.push(popup);
}
sprites.push(new Button(canvas.width*.45, canvas.height*.25, canvas.width*.1, canvas.height*.1, "Play Again", () => init_game(50, 30)));
clearInterval(snake_mover)
}
function update() {
snake_board.check_win();
render();
}
function get_random_color() {
let letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
function move_snake() {
snake_board.step_turn();
}
function init_game(width, height) {
snake_board = new SnakeBoard(width, height);
sprites = [];
//-50 accounts for the space the title plage takes up
let tile_size = Math.min(canvas.width/width, (canvas.height)/height);
for (let i = 0; i < width; i++) {
for (let j = 0; j < height; j++) {
sprites.push(new SnakeTile(tile_size*i, tile_size*j, tile_size, tile_size, i, j));
}
}
snake_mover = setInterval(move_snake, 200);
//play_time = 0;
//game_timer = setInterval(() => play_time++, 1000);
//sprites.push(new Timer(0, 0, 150, 50));
//sprites.push(new Button(150, 0, 150, 50, "New Game", () => {end_game(false);}))
}
//disables the context menu when you left click on the canvas
$('body').on('contextmenu', '#canvas', function(e){ return false; });
const canvas = document.getElementById("canvas");
const ctx = canvas.getContext("2d");
let frame_count = 0;
let sprites = [];
/**
*
* @type {SnakeBoard}
*/
let snake_board = [];
let minefield = [];
let play_time = 0;
let game_timer = 0;
let snake_mover = 0;
// for (let i = 0; i < 10; i++) {
// for (let j = 0; j < 10; j++) {
// sprites.push(new DisplayTimesClicked(30*i, 30*j, 30, 30, get_random_color()))
// }
// }
// sprites.push(new DisplayTimesClicked(50, 50, 50, 50, "#363cff"));
//canvas.addEventListener("mousemove", this.detect_mouse, false);
canvas.addEventListener("mousedown", on_mouse_down, false);
document.onkeydown = function(e) {
switch (e.key) {
case "ArrowDown":
if (snake_board.direction !== DIRECTIONS.SOUTH) {
snake_board.direction = DIRECTIONS.NORTH;
}
break;
case "ArrowLeft":
if (snake_board.direction !== DIRECTIONS.EAST) {
snake_board.direction = DIRECTIONS.WEST;
}
break;
// why down and up have to be reversed if beyond me
case "ArrowUp":
if (snake_board.direction !== DIRECTIONS.NORTH) {
snake_board.direction = DIRECTIONS.SOUTH;
}
break;
case "ArrowRight":
if (snake_board.direction !== DIRECTIONS.WEST) {
snake_board.direction = DIRECTIONS.EAST;
}
break;
}
};
setInterval(update, 100);
init_game(50, 30);
| step_turn | identifier_name |
main.js | /***
* Defines a rectangular object that can be rendered on the screen
*/
class Sprite {
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
*/
constructor(x, y) {
this.x = x;
this.y = y;
}
/***
* Render the sprite at its position on the screen
*/
draw() {}
}
/***
* Defines a rectangular clickable sprite that does something when clicked
*/
class Clickable extends Sprite {
//we might want to make this a subclass of sprite and just display sprites
/**
*
* @param {number} x the x position of the top left corner
* @param {number} y the y position of the top left corner
* @param {number} width the width of the zone
* @param {number} height the height of the zone
*/
constructor(x, y, width, height) {
super(x, y); | left_click() {}
right_click() {}
test_click(x, y, button) {
if (x >= this.x && x <= this.x + this.width && y >= this.y && y <= this.y + this.height) {
if (button === 0) {
this.left_click();
} else if (button === 2) {
this.right_click()
}
return true;
}
return false;
}
}
class SnakeTile extends Sprite {
constructor(x, y, width, height, trueX, trueY) {
super(x, y);
this.width = width;
this.height = height;
this.trueX = trueX;
this.trueY = trueY;
}
draw() {
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 1;
ctx.strokeRect(this.x, this.y, this.width, this.height);
if (snake_board.grid[this.trueX][this.trueY] === -1) {
ctx.fillStyle = "#0000ff";
ctx.fillRect(this.x, this.y, this.width, this.height);
} else if (snake_board.grid[this.trueX][this.trueY] > 0){
ctx.fillStyle = "#888888";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ff0000";
let scale_factor = (snake_board.grid[this.trueX][this.trueY]/snake_board.length)/4;
ctx.fillRect(this.x + this.width*scale_factor, this.y + this.height*scale_factor, this.width - 2*this.width*scale_factor, this.height - 2*this.height*scale_factor);
}
}
}
class Button extends Clickable{
/**
* Creates a button which runs a function when it is clicked
* @param x
* @param y
* @param width
* @param height
* @param {string} text The text on the button
* @param {function} action The function the button runs on press
*/
constructor(x, y, width, height, text, action) {
super(x, y, width, height);
this.text = text;
this.action = action;
this.font = height + "px Verdana"
}
left_click() {
this.action();
}
draw() {
ctx.fillStyle = "#000099";
ctx.strokeStyle = "#000000";
ctx.lineWidth = 2;
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#ffffff";
ctx.font = this.font;
ctx.textAlign = "center";
ctx.fillText(this.text, this.x + this.width/2, this.y + this.height*.9, this.width);
}
}
class Timer extends Sprite {
constructor(x, y, width, height) {
super(x, y);
this.width = width;
this.height = height;
this.font = height + "px Consolas";
}
draw() {
ctx.fillStyle = "#333333";
ctx.fillRect(this.x, this.y, this.width, this.height);
ctx.strokeStyle = "#999999";
ctx.lineWidth = 2;
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.fillStyle = "#999999";
ctx.textAlign = "center";
ctx.font = this.font;
ctx.fillText(Math.floor(play_time/60) + ":" + (play_time%60 < 10 ? "0": "") + play_time%60 , this.x + this.width/2, this.y + this.height*.9);
}
}
class Popup extends Clickable {
constructor(title_text) {
//the popup covers the entire screen
super(0, 0, canvas.width, canvas.height);
this.title_text = title_text;
}
draw() {
ctx.fillStyle = "#dfafaf";
ctx.fillRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.strokeStyle = "#333333";
ctx.lineWidth = 3;
ctx.strokeRect(this.width*.33, this.height*.15, this.width*.33, this.height*.30);
ctx.fillStyle = "#ffffff";
ctx.font = "40px Verdana";
ctx.textAlign = "center";
ctx.fillText(this.title_text, this.width/2, this.height*.23, this.width*.33);
}
}
//don't ask me why the signs are reversed
const DIRECTIONS = {
NORTH: {x: 0, y: 1},
EAST: {x: 1, y: 0},
SOUTH: {x: 0, y: -1},
WEST: {x: -1, y: 0}
};
Object.freeze(DIRECTIONS);
const START_SIZE = 5;
const GAIN_FROM_FOOD = 3;
/**
* Represents a snake board. 0 means empty, >0 means snake, -1 means food
* The number on the snake will represent the number of turns that that tile has been alive
* if that number exceeds the length, then the tile is set to 0
*/
class SnakeBoard {
constructor(width, height) {
this.width = width;
this.height = height;
this.direction = DIRECTIONS.EAST;
this.length = START_SIZE;
this.grid = new Array(this.width);
for (let i = 0; i < this.width; i++) {
this.grid[i] = new Array(this.height).fill(0)
}
this.grid[Math.floor(this.width/2)][Math.floor(this.height/2)] = 1;
this.add_food();
}
add_food() {
//get coords of snake and the tiles
let randX = randRange(0, this.width);
let randY = randRange(0, this.height);
//get random values while the tiles are not empty
while (this.grid[randX][randY] !== 0) {
randX = randRange(0, this.width);
randY = randRange(0, this.height);
}
this.grid[randX][randY] = -1;
}
step_turn() {
//updates all the tiles by 1 turn passed
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] > 0) {
this.grid[i][j] = (this.grid[i][j] + 1) % this.length;
}
}
}
let nextX = 0;
let nextY = 0;
//next find the most recent tile and go in that direction
for (let i = 0; i < this.width; i++) {
for (let j = 0; j < this.height; j++) {
if (this.grid[i][j] === 2) {
nextX = i + this.direction.x;
nextY = j + this.direction.y;
}
}
}
//next check if the direction is valid and the tile is empty(no snake body)
if (nextX >= this.width || nextX < 0 || nextY >= this.height || nextY < 0 || this.grid[nextX][nextY] > 0) {
end_game(false);
return;
}
//account for possible food
if (this.grid[nextX][nextY] === -1) {
this.length += GAIN_FROM_FOOD;
this.add_food();
}
//finally add the next tile
this.grid[nextX][nextY] = 1;
}
check_win() {
if (this.length > this.width*this.height) end_game(true);
}
}
/**
* Return a random int between min(inclusive) and max(exclusive)
* @param min
* @param max
*/
function randRange(min, max) {
return min + Math.floor(Math.random() * (max - min));
}
function get_mouse_pos(event) {
let rect = canvas.getBoundingClientRect();
return {
x: Math.round((event.clientX - rect.left)/(rect.right - rect.left)*canvas.width),
y: Math.round((event.clientY - rect.top)/(rect.bottom - rect.top)*canvas.height)
};
}
function reverse(arr) {
let new_arr = new Array(arr.length);
for (let i = 0; i < arr.length; i++) {
new_arr[i] = arr[arr.length - 1 - i];
}
return new_arr;
}
function on_mouse_down(event) {
let pos = get_mouse_pos(event);
for(const sprite of reverse(sprites)) {
//ensures that only the top sprites get clicked
if (sprite instanceof Clickable) {
if (sprite.test_click(pos.x, pos.y, event.button)) {
//needs to be unreversed
return
}
}
}
}
function draw() {
//sprites on the top are drawn last
//sprites added most recently are at the end of the list
for(const sprite of sprites) {
sprite.draw();
}
}
function clear() {
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
function render() {
clear();
draw();
}
function end_game(win) {
clearInterval(game_timer);
if (win) {
let popup = new Popup("You win!");
sprites.push(popup);
} else {
let popup = new Popup("You lose!");
sprites.push(popup);
}
sprites.push(new Button(canvas.width*.45, canvas.height*.25, canvas.width*.1, canvas.height*.1, "Play Again", () => init_game(50, 30)));
clearInterval(snake_mover)
}
function update() {
snake_board.check_win();
render();
}
function get_random_color() {
let letters = '0123456789ABCDEF';
let color = '#';
for (let i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}
function move_snake() {
snake_board.step_turn();
}
function init_game(width, height) {
snake_board = new SnakeBoard(width, height);
sprites = [];
//-50 accounts for the space the title plage takes up
let tile_size = Math.min(canvas.width/width, (canvas.height)/height);
for (let i = 0; i < width; i++) {
for (let j = 0; j < height; j++) {
sprites.push(new SnakeTile(tile_size*i, tile_size*j, tile_size, tile_size, i, j));
}
}
snake_mover = setInterval(move_snake, 200);
//play_time = 0;
//game_timer = setInterval(() => play_time++, 1000);
//sprites.push(new Timer(0, 0, 150, 50));
//sprites.push(new Button(150, 0, 150, 50, "New Game", () => {end_game(false);}))
}
//disables the context menu when you left click on the canvas
$('body').on('contextmenu', '#canvas', function(e){ return false; });
const canvas = document.getElementById("canvas");
const ctx = canvas.getContext("2d");
let frame_count = 0;
let sprites = [];
/**
*
* @type {SnakeBoard}
*/
let snake_board = [];
let minefield = [];
let play_time = 0;
let game_timer = 0;
let snake_mover = 0;
// for (let i = 0; i < 10; i++) {
// for (let j = 0; j < 10; j++) {
// sprites.push(new DisplayTimesClicked(30*i, 30*j, 30, 30, get_random_color()))
// }
// }
// sprites.push(new DisplayTimesClicked(50, 50, 50, 50, "#363cff"));
//canvas.addEventListener("mousemove", this.detect_mouse, false);
canvas.addEventListener("mousedown", on_mouse_down, false);
document.onkeydown = function(e) {
switch (e.key) {
case "ArrowDown":
if (snake_board.direction !== DIRECTIONS.SOUTH) {
snake_board.direction = DIRECTIONS.NORTH;
}
break;
case "ArrowLeft":
if (snake_board.direction !== DIRECTIONS.EAST) {
snake_board.direction = DIRECTIONS.WEST;
}
break;
// why down and up have to be reversed if beyond me
case "ArrowUp":
if (snake_board.direction !== DIRECTIONS.NORTH) {
snake_board.direction = DIRECTIONS.SOUTH;
}
break;
case "ArrowRight":
if (snake_board.direction !== DIRECTIONS.WEST) {
snake_board.direction = DIRECTIONS.EAST;
}
break;
}
};
setInterval(update, 100);
init_game(50, 30); | this.width = width;
this.height = height;
}
| random_line_split |
extern_crate.rs | //! Provides handling of `extern_crate` attributes.
//!
//! # What the generated code looks like
//!
//! ```rust,ignore
//! #[pre::extern_crate(std)]
//! mod pre_std {
//! mod ptr {
//! #[pre(valid_ptr(src, r))]
//! unsafe fn read<T>(src: *const T) -> T;
//!
//! impl<T> NonNull<T> {
//! #[pre(!ptr.is_null())]
//! const unsafe fn new_unchecked(ptr: *mut T) -> NonNull<T>;
//! }
//! }
//! }
//! ```
//!
//! turns into
//!
//! ```rust,ignore
//! #[doc = "..."]
//! mod pre_std {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::*;
//!
//! #[doc = "..."]
//! pub(crate) mod ptr {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::ptr::*;
//!
//! #[doc = "..."]
//! #[pre(!ptr.is_null())]
//! #[pre(no_doc)]
//! #[pre(no_debug_assert)]
//! #[inline(always)]
//! #[allow(non_snake_case)]
//! pub(crate) fn NonNull__impl__new_unchecked__() {}
//!
//! #[pre(valid_ptr(src, r))]
//! #[inline(always)]
//! pub(crate) unsafe fn read<T>(src: *const T) -> T {
//! std::ptr::read(src)
//! }
//! }
//! }
//! ```
use proc_macro2::{Span, TokenStream};
use quote::{quote, quote_spanned, TokenStreamExt};
use std::fmt;
use syn::{
braced,
parse::{Parse, ParseStream},
spanned::Spanned,
token::Brace,
Attribute, FnArg, ForeignItemFn, Ident, ItemUse, Path, PathArguments, PathSegment, Token,
Visibility,
};
use crate::{
documentation::{generate_extern_crate_fn_docs, generate_module_docs},
helpers::{visit_matching_attrs_parsed_mut, AttributeAction, CRATE_NAME},
pre_attr::PreAttr,
};
pub(crate) use impl_block::{impl_block_stub_name, ImplBlock};
mod impl_block;
/// The parsed version of the `extern_crate` attribute content.
pub(crate) struct ExternCrateAttr {
/// The path of the crate/module to which function calls will be forwarded.
path: Path,
}
impl fmt::Display for ExternCrateAttr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#[extern_crate(")?;
if self.path.leading_colon.is_some() {
write!(f, "::")?;
}
for segment in &self.path.segments {
write!(f, "{}", segment.ident)?;
}
write!(f, ")]")
}
}
impl Parse for ExternCrateAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(ExternCrateAttr {
path: input.call(Path::parse_mod_style)?,
})
}
}
/// A parsed `extern_crate` annotated module.
pub(crate) struct Module {
/// The attributes on the module.
attrs: Vec<Attribute>,
/// The visibility on the module.
visibility: Visibility,
/// The `mod` token.
mod_token: Token![mod],
/// The name of the module.
ident: Ident,
/// The braces surrounding the content.
braces: Brace,
/// The impl blocks contained in the module.
impl_blocks: Vec<ImplBlock>,
/// The imports contained in the module.
imports: Vec<ItemUse>,
/// The functions contained in the module.
functions: Vec<ForeignItemFn>,
/// The submodules contained in the module.
modules: Vec<Module>,
}
impl fmt::Display for Module {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.original_token_stream())
}
}
impl Spanned for Module {
fn span(&self) -> Span {
self.visibility
.span()
.join(self.braces.span)
.unwrap_or(self.braces.span)
}
}
impl Parse for Module {
fn parse(input: ParseStream) -> syn::Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let visibility = input.parse()?;
let mod_token = input.parse()?;
let ident = input.parse()?;
let content;
let braces = braced!(content in input);
let mut impl_blocks = Vec::new();
let mut imports = Vec::new();
let mut functions = Vec::new();
let mut modules = Vec::new();
while !content.is_empty() {
if content.peek(Token![impl]) {
impl_blocks.push(content.parse()?);
} else if <ItemUse as Parse>::parse(&content.fork()).is_ok() {
imports.push(content.parse()?);
} else if <ForeignItemFn as Parse>::parse(&content.fork()).is_ok() {
functions.push(content.parse()?);
} else {
modules.push(content.parse().map_err(|err| {
syn::Error::new(
err.span(),
"expected a module, a function signature, an impl block or a use statement",
)
})?);
}
}
Ok(Module {
attrs,
visibility,
mod_token,
ident,
braces,
impl_blocks,
imports,
functions,
modules,
})
}
}
impl Module {
/// Renders this `extern_crate` annotated module to its final result.
pub(crate) fn render(&self, attr: ExternCrateAttr) -> TokenStream {
let mut tokens = TokenStream::new();
self.render_inner(attr.path, &mut tokens, None, &self.ident);
tokens
}
/// A helper function to generate the final token stream.
///
/// This allows passing the top level visibility and the updated path into recursive calls.
fn render_inner(
&self,
mut path: Path,
tokens: &mut TokenStream,
visibility: Option<&TokenStream>,
top_level_module: &Ident,
) {
if visibility.is_some() {
// Update the path only in recursive calls.
path.segments.push(PathSegment {
ident: self.ident.clone(),
arguments: PathArguments::None,
});
}
let mut attrs = self.attrs.clone();
let mut render_docs = true;
visit_matching_attrs_parsed_mut(&mut attrs, "pre", |attr| match attr.content() {
PreAttr::NoDoc(_) => {
render_docs = false;
AttributeAction::Remove
}
_ => AttributeAction::Keep,
});
if render_docs {
let docs = generate_module_docs(self, &path);
tokens.append_all(quote! { #docs });
}
tokens.append_all(attrs);
let visibility = if let Some(visibility) = visibility {
// We're in a recursive call.
// Use the visibility passed to us.
tokens.append_all(quote! { #visibility });
visibility.clone()
} else {
// We're in the outermost call.
// Use the original visibility and decide which visibility to use in recursive calls.
let local_vis = &self.visibility;
tokens.append_all(quote! { #local_vis });
if let Visibility::Public(pub_keyword) = local_vis {
quote! { #pub_keyword }
} else {
let span = match local_vis {
Visibility::Inherited => self.mod_token.span(),
_ => local_vis.span(),
};
quote_spanned! { span=> pub(crate) }
}
};
let mod_token = self.mod_token;
tokens.append_all(quote! { #mod_token });
tokens.append(self.ident.clone());
let mut brace_content = TokenStream::new();
let crate_name = Ident::new(&CRATE_NAME, Span::call_site());
brace_content.append_all(quote! {
#[allow(unused_imports)]
#[doc(no_inline)]
#visibility use #path::*;
#[allow(unused_imports)]
use #crate_name::pre;
});
for impl_block in &self.impl_blocks {
impl_block.render(&mut brace_content, &path, &visibility, top_level_module);
}
for import in &self.imports {
brace_content.append_all(quote! { #import });
}
for function in &self.functions {
render_function(function, &mut brace_content, &path, &visibility);
}
for module in &self.modules {
module.render_inner(
path.clone(),
&mut brace_content,
Some(&visibility),
top_level_module,
);
}
tokens.append_all(quote_spanned! { self.braces.span=> { #brace_content } });
}
/// Generates a token stream that is semantically equivalent to the original token stream.
///
/// This should only be used for debug purposes.
fn original_token_stream(&self) -> TokenStream {
let mut stream = TokenStream::new();
stream.append_all(&self.attrs);
let vis = &self.visibility;
stream.append_all(quote! { #vis });
stream.append_all(quote! { mod });
stream.append(self.ident.clone());
let mut content = TokenStream::new();
content.append_all(
self.impl_blocks
.iter()
.map(|impl_block| impl_block.original_token_stream()),
);
content.append_all(&self.imports);
content.append_all(&self.functions);
content.append_all(self.modules.iter().map(|m| m.original_token_stream()));
stream.append_all(quote! { { #content } });
stream
}
}
/// Generates the code for a function inside a `extern_crate` module.
fn render_function(
function: &ForeignItemFn,
tokens: &mut TokenStream,
path: &Path,
visibility: &TokenStream,
) {
tokens.append_all(&function.attrs);
let doc_header = generate_extern_crate_fn_docs(path, &function.sig, function.span());
tokens.append_all(quote! { #doc_header });
tokens.append_all(quote_spanned! { function.span()=> #[inline(always)] });
tokens.append_all(visibility.clone().into_iter().map(|mut token| {
token.set_span(function.span());
token
}));
let signature = &function.sig;
tokens.append_all(quote! { #signature });
let mut path = path.clone();
path.segments.push(PathSegment {
ident: function.sig.ident.clone(), | // Update the spans of the `::` tokens to lie in the function
for punct in path
.segments
.pairs_mut()
.map(|p| p.into_tuple().1)
.flatten()
{
punct.spans = [function.span(); 2];
}
let mut args_list = TokenStream::new();
args_list.append_separated(
function.sig.inputs.iter().map(|arg| match arg {
FnArg::Receiver(_) => unreachable!("receiver is not valid in a function argument list"),
FnArg::Typed(pat) => &pat.pat,
}),
quote_spanned! { function.span()=> , },
);
tokens.append_all(quote_spanned! { function.span()=> { #path(#args_list) } });
} | arguments: PathArguments::None,
});
| random_line_split |
extern_crate.rs | //! Provides handling of `extern_crate` attributes.
//!
//! # What the generated code looks like
//!
//! ```rust,ignore
//! #[pre::extern_crate(std)]
//! mod pre_std {
//! mod ptr {
//! #[pre(valid_ptr(src, r))]
//! unsafe fn read<T>(src: *const T) -> T;
//!
//! impl<T> NonNull<T> {
//! #[pre(!ptr.is_null())]
//! const unsafe fn new_unchecked(ptr: *mut T) -> NonNull<T>;
//! }
//! }
//! }
//! ```
//!
//! turns into
//!
//! ```rust,ignore
//! #[doc = "..."]
//! mod pre_std {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::*;
//!
//! #[doc = "..."]
//! pub(crate) mod ptr {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::ptr::*;
//!
//! #[doc = "..."]
//! #[pre(!ptr.is_null())]
//! #[pre(no_doc)]
//! #[pre(no_debug_assert)]
//! #[inline(always)]
//! #[allow(non_snake_case)]
//! pub(crate) fn NonNull__impl__new_unchecked__() {}
//!
//! #[pre(valid_ptr(src, r))]
//! #[inline(always)]
//! pub(crate) unsafe fn read<T>(src: *const T) -> T {
//! std::ptr::read(src)
//! }
//! }
//! }
//! ```
use proc_macro2::{Span, TokenStream};
use quote::{quote, quote_spanned, TokenStreamExt};
use std::fmt;
use syn::{
braced,
parse::{Parse, ParseStream},
spanned::Spanned,
token::Brace,
Attribute, FnArg, ForeignItemFn, Ident, ItemUse, Path, PathArguments, PathSegment, Token,
Visibility,
};
use crate::{
documentation::{generate_extern_crate_fn_docs, generate_module_docs},
helpers::{visit_matching_attrs_parsed_mut, AttributeAction, CRATE_NAME},
pre_attr::PreAttr,
};
pub(crate) use impl_block::{impl_block_stub_name, ImplBlock};
mod impl_block;
/// The parsed version of the `extern_crate` attribute content.
pub(crate) struct ExternCrateAttr {
/// The path of the crate/module to which function calls will be forwarded.
path: Path,
}
impl fmt::Display for ExternCrateAttr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#[extern_crate(")?;
if self.path.leading_colon.is_some() {
write!(f, "::")?;
}
for segment in &self.path.segments {
write!(f, "{}", segment.ident)?;
}
write!(f, ")]")
}
}
impl Parse for ExternCrateAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(ExternCrateAttr {
path: input.call(Path::parse_mod_style)?,
})
}
}
/// A parsed `extern_crate` annotated module.
pub(crate) struct Module {
/// The attributes on the module.
attrs: Vec<Attribute>,
/// The visibility on the module.
visibility: Visibility,
/// The `mod` token.
mod_token: Token![mod],
/// The name of the module.
ident: Ident,
/// The braces surrounding the content.
braces: Brace,
/// The impl blocks contained in the module.
impl_blocks: Vec<ImplBlock>,
/// The imports contained in the module.
imports: Vec<ItemUse>,
/// The functions contained in the module.
functions: Vec<ForeignItemFn>,
/// The submodules contained in the module.
modules: Vec<Module>,
}
impl fmt::Display for Module {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.original_token_stream())
}
}
impl Spanned for Module {
fn span(&self) -> Span {
self.visibility
.span()
.join(self.braces.span)
.unwrap_or(self.braces.span)
}
}
impl Parse for Module {
fn parse(input: ParseStream) -> syn::Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let visibility = input.parse()?;
let mod_token = input.parse()?;
let ident = input.parse()?;
let content;
let braces = braced!(content in input);
let mut impl_blocks = Vec::new();
let mut imports = Vec::new();
let mut functions = Vec::new();
let mut modules = Vec::new();
while !content.is_empty() {
if content.peek(Token![impl]) {
impl_blocks.push(content.parse()?);
} else if <ItemUse as Parse>::parse(&content.fork()).is_ok() {
imports.push(content.parse()?);
} else if <ForeignItemFn as Parse>::parse(&content.fork()).is_ok() {
functions.push(content.parse()?);
} else {
modules.push(content.parse().map_err(|err| {
syn::Error::new(
err.span(),
"expected a module, a function signature, an impl block or a use statement",
)
})?);
}
}
Ok(Module {
attrs,
visibility,
mod_token,
ident,
braces,
impl_blocks,
imports,
functions,
modules,
})
}
}
impl Module {
/// Renders this `extern_crate` annotated module to its final result.
pub(crate) fn render(&self, attr: ExternCrateAttr) -> TokenStream {
let mut tokens = TokenStream::new();
self.render_inner(attr.path, &mut tokens, None, &self.ident);
tokens
}
/// A helper function to generate the final token stream.
///
/// This allows passing the top level visibility and the updated path into recursive calls.
fn render_inner(
&self,
mut path: Path,
tokens: &mut TokenStream,
visibility: Option<&TokenStream>,
top_level_module: &Ident,
) {
if visibility.is_some() {
// Update the path only in recursive calls.
path.segments.push(PathSegment {
ident: self.ident.clone(),
arguments: PathArguments::None,
});
}
let mut attrs = self.attrs.clone();
let mut render_docs = true;
visit_matching_attrs_parsed_mut(&mut attrs, "pre", |attr| match attr.content() {
PreAttr::NoDoc(_) => {
render_docs = false;
AttributeAction::Remove
}
_ => AttributeAction::Keep,
});
if render_docs {
let docs = generate_module_docs(self, &path);
tokens.append_all(quote! { #docs });
}
tokens.append_all(attrs);
let visibility = if let Some(visibility) = visibility {
// We're in a recursive call.
// Use the visibility passed to us.
tokens.append_all(quote! { #visibility });
visibility.clone()
} else {
// We're in the outermost call.
// Use the original visibility and decide which visibility to use in recursive calls.
let local_vis = &self.visibility;
tokens.append_all(quote! { #local_vis });
if let Visibility::Public(pub_keyword) = local_vis {
quote! { #pub_keyword }
} else {
let span = match local_vis {
Visibility::Inherited => self.mod_token.span(),
_ => local_vis.span(),
};
quote_spanned! { span=> pub(crate) }
}
};
let mod_token = self.mod_token;
tokens.append_all(quote! { #mod_token });
tokens.append(self.ident.clone());
let mut brace_content = TokenStream::new();
let crate_name = Ident::new(&CRATE_NAME, Span::call_site());
brace_content.append_all(quote! {
#[allow(unused_imports)]
#[doc(no_inline)]
#visibility use #path::*;
#[allow(unused_imports)]
use #crate_name::pre;
});
for impl_block in &self.impl_blocks {
impl_block.render(&mut brace_content, &path, &visibility, top_level_module);
}
for import in &self.imports {
brace_content.append_all(quote! { #import });
}
for function in &self.functions {
render_function(function, &mut brace_content, &path, &visibility);
}
for module in &self.modules {
module.render_inner(
path.clone(),
&mut brace_content,
Some(&visibility),
top_level_module,
);
}
tokens.append_all(quote_spanned! { self.braces.span=> { #brace_content } });
}
/// Generates a token stream that is semantically equivalent to the original token stream.
///
/// This should only be used for debug purposes.
fn original_token_stream(&self) -> TokenStream |
}
/// Generates the code for a function inside a `extern_crate` module.
fn render_function(
function: &ForeignItemFn,
tokens: &mut TokenStream,
path: &Path,
visibility: &TokenStream,
) {
tokens.append_all(&function.attrs);
let doc_header = generate_extern_crate_fn_docs(path, &function.sig, function.span());
tokens.append_all(quote! { #doc_header });
tokens.append_all(quote_spanned! { function.span()=> #[inline(always)] });
tokens.append_all(visibility.clone().into_iter().map(|mut token| {
token.set_span(function.span());
token
}));
let signature = &function.sig;
tokens.append_all(quote! { #signature });
let mut path = path.clone();
path.segments.push(PathSegment {
ident: function.sig.ident.clone(),
arguments: PathArguments::None,
});
// Update the spans of the `::` tokens to lie in the function
for punct in path
.segments
.pairs_mut()
.map(|p| p.into_tuple().1)
.flatten()
{
punct.spans = [function.span(); 2];
}
let mut args_list = TokenStream::new();
args_list.append_separated(
function.sig.inputs.iter().map(|arg| match arg {
FnArg::Receiver(_) => unreachable!("receiver is not valid in a function argument list"),
FnArg::Typed(pat) => &pat.pat,
}),
quote_spanned! { function.span()=> , },
);
tokens.append_all(quote_spanned! { function.span()=> { #path(#args_list) } });
}
| {
let mut stream = TokenStream::new();
stream.append_all(&self.attrs);
let vis = &self.visibility;
stream.append_all(quote! { #vis });
stream.append_all(quote! { mod });
stream.append(self.ident.clone());
let mut content = TokenStream::new();
content.append_all(
self.impl_blocks
.iter()
.map(|impl_block| impl_block.original_token_stream()),
);
content.append_all(&self.imports);
content.append_all(&self.functions);
content.append_all(self.modules.iter().map(|m| m.original_token_stream()));
stream.append_all(quote! { { #content } });
stream
} | identifier_body |
extern_crate.rs | //! Provides handling of `extern_crate` attributes.
//!
//! # What the generated code looks like
//!
//! ```rust,ignore
//! #[pre::extern_crate(std)]
//! mod pre_std {
//! mod ptr {
//! #[pre(valid_ptr(src, r))]
//! unsafe fn read<T>(src: *const T) -> T;
//!
//! impl<T> NonNull<T> {
//! #[pre(!ptr.is_null())]
//! const unsafe fn new_unchecked(ptr: *mut T) -> NonNull<T>;
//! }
//! }
//! }
//! ```
//!
//! turns into
//!
//! ```rust,ignore
//! #[doc = "..."]
//! mod pre_std {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::*;
//!
//! #[doc = "..."]
//! pub(crate) mod ptr {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::ptr::*;
//!
//! #[doc = "..."]
//! #[pre(!ptr.is_null())]
//! #[pre(no_doc)]
//! #[pre(no_debug_assert)]
//! #[inline(always)]
//! #[allow(non_snake_case)]
//! pub(crate) fn NonNull__impl__new_unchecked__() {}
//!
//! #[pre(valid_ptr(src, r))]
//! #[inline(always)]
//! pub(crate) unsafe fn read<T>(src: *const T) -> T {
//! std::ptr::read(src)
//! }
//! }
//! }
//! ```
use proc_macro2::{Span, TokenStream};
use quote::{quote, quote_spanned, TokenStreamExt};
use std::fmt;
use syn::{
braced,
parse::{Parse, ParseStream},
spanned::Spanned,
token::Brace,
Attribute, FnArg, ForeignItemFn, Ident, ItemUse, Path, PathArguments, PathSegment, Token,
Visibility,
};
use crate::{
documentation::{generate_extern_crate_fn_docs, generate_module_docs},
helpers::{visit_matching_attrs_parsed_mut, AttributeAction, CRATE_NAME},
pre_attr::PreAttr,
};
pub(crate) use impl_block::{impl_block_stub_name, ImplBlock};
mod impl_block;
/// The parsed version of the `extern_crate` attribute content.
pub(crate) struct ExternCrateAttr {
/// The path of the crate/module to which function calls will be forwarded.
path: Path,
}
impl fmt::Display for ExternCrateAttr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#[extern_crate(")?;
if self.path.leading_colon.is_some() {
write!(f, "::")?;
}
for segment in &self.path.segments {
write!(f, "{}", segment.ident)?;
}
write!(f, ")]")
}
}
impl Parse for ExternCrateAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(ExternCrateAttr {
path: input.call(Path::parse_mod_style)?,
})
}
}
/// A parsed `extern_crate` annotated module.
pub(crate) struct Module {
/// The attributes on the module.
attrs: Vec<Attribute>,
/// The visibility on the module.
visibility: Visibility,
/// The `mod` token.
mod_token: Token![mod],
/// The name of the module.
ident: Ident,
/// The braces surrounding the content.
braces: Brace,
/// The impl blocks contained in the module.
impl_blocks: Vec<ImplBlock>,
/// The imports contained in the module.
imports: Vec<ItemUse>,
/// The functions contained in the module.
functions: Vec<ForeignItemFn>,
/// The submodules contained in the module.
modules: Vec<Module>,
}
impl fmt::Display for Module {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.original_token_stream())
}
}
impl Spanned for Module {
fn span(&self) -> Span {
self.visibility
.span()
.join(self.braces.span)
.unwrap_or(self.braces.span)
}
}
impl Parse for Module {
fn | (input: ParseStream) -> syn::Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let visibility = input.parse()?;
let mod_token = input.parse()?;
let ident = input.parse()?;
let content;
let braces = braced!(content in input);
let mut impl_blocks = Vec::new();
let mut imports = Vec::new();
let mut functions = Vec::new();
let mut modules = Vec::new();
while !content.is_empty() {
if content.peek(Token![impl]) {
impl_blocks.push(content.parse()?);
} else if <ItemUse as Parse>::parse(&content.fork()).is_ok() {
imports.push(content.parse()?);
} else if <ForeignItemFn as Parse>::parse(&content.fork()).is_ok() {
functions.push(content.parse()?);
} else {
modules.push(content.parse().map_err(|err| {
syn::Error::new(
err.span(),
"expected a module, a function signature, an impl block or a use statement",
)
})?);
}
}
Ok(Module {
attrs,
visibility,
mod_token,
ident,
braces,
impl_blocks,
imports,
functions,
modules,
})
}
}
impl Module {
/// Renders this `extern_crate` annotated module to its final result.
pub(crate) fn render(&self, attr: ExternCrateAttr) -> TokenStream {
let mut tokens = TokenStream::new();
self.render_inner(attr.path, &mut tokens, None, &self.ident);
tokens
}
/// A helper function to generate the final token stream.
///
/// This allows passing the top level visibility and the updated path into recursive calls.
fn render_inner(
&self,
mut path: Path,
tokens: &mut TokenStream,
visibility: Option<&TokenStream>,
top_level_module: &Ident,
) {
if visibility.is_some() {
// Update the path only in recursive calls.
path.segments.push(PathSegment {
ident: self.ident.clone(),
arguments: PathArguments::None,
});
}
let mut attrs = self.attrs.clone();
let mut render_docs = true;
visit_matching_attrs_parsed_mut(&mut attrs, "pre", |attr| match attr.content() {
PreAttr::NoDoc(_) => {
render_docs = false;
AttributeAction::Remove
}
_ => AttributeAction::Keep,
});
if render_docs {
let docs = generate_module_docs(self, &path);
tokens.append_all(quote! { #docs });
}
tokens.append_all(attrs);
let visibility = if let Some(visibility) = visibility {
// We're in a recursive call.
// Use the visibility passed to us.
tokens.append_all(quote! { #visibility });
visibility.clone()
} else {
// We're in the outermost call.
// Use the original visibility and decide which visibility to use in recursive calls.
let local_vis = &self.visibility;
tokens.append_all(quote! { #local_vis });
if let Visibility::Public(pub_keyword) = local_vis {
quote! { #pub_keyword }
} else {
let span = match local_vis {
Visibility::Inherited => self.mod_token.span(),
_ => local_vis.span(),
};
quote_spanned! { span=> pub(crate) }
}
};
let mod_token = self.mod_token;
tokens.append_all(quote! { #mod_token });
tokens.append(self.ident.clone());
let mut brace_content = TokenStream::new();
let crate_name = Ident::new(&CRATE_NAME, Span::call_site());
brace_content.append_all(quote! {
#[allow(unused_imports)]
#[doc(no_inline)]
#visibility use #path::*;
#[allow(unused_imports)]
use #crate_name::pre;
});
for impl_block in &self.impl_blocks {
impl_block.render(&mut brace_content, &path, &visibility, top_level_module);
}
for import in &self.imports {
brace_content.append_all(quote! { #import });
}
for function in &self.functions {
render_function(function, &mut brace_content, &path, &visibility);
}
for module in &self.modules {
module.render_inner(
path.clone(),
&mut brace_content,
Some(&visibility),
top_level_module,
);
}
tokens.append_all(quote_spanned! { self.braces.span=> { #brace_content } });
}
/// Generates a token stream that is semantically equivalent to the original token stream.
///
/// This should only be used for debug purposes.
fn original_token_stream(&self) -> TokenStream {
let mut stream = TokenStream::new();
stream.append_all(&self.attrs);
let vis = &self.visibility;
stream.append_all(quote! { #vis });
stream.append_all(quote! { mod });
stream.append(self.ident.clone());
let mut content = TokenStream::new();
content.append_all(
self.impl_blocks
.iter()
.map(|impl_block| impl_block.original_token_stream()),
);
content.append_all(&self.imports);
content.append_all(&self.functions);
content.append_all(self.modules.iter().map(|m| m.original_token_stream()));
stream.append_all(quote! { { #content } });
stream
}
}
/// Generates the code for a function inside a `extern_crate` module.
fn render_function(
function: &ForeignItemFn,
tokens: &mut TokenStream,
path: &Path,
visibility: &TokenStream,
) {
tokens.append_all(&function.attrs);
let doc_header = generate_extern_crate_fn_docs(path, &function.sig, function.span());
tokens.append_all(quote! { #doc_header });
tokens.append_all(quote_spanned! { function.span()=> #[inline(always)] });
tokens.append_all(visibility.clone().into_iter().map(|mut token| {
token.set_span(function.span());
token
}));
let signature = &function.sig;
tokens.append_all(quote! { #signature });
let mut path = path.clone();
path.segments.push(PathSegment {
ident: function.sig.ident.clone(),
arguments: PathArguments::None,
});
// Update the spans of the `::` tokens to lie in the function
for punct in path
.segments
.pairs_mut()
.map(|p| p.into_tuple().1)
.flatten()
{
punct.spans = [function.span(); 2];
}
let mut args_list = TokenStream::new();
args_list.append_separated(
function.sig.inputs.iter().map(|arg| match arg {
FnArg::Receiver(_) => unreachable!("receiver is not valid in a function argument list"),
FnArg::Typed(pat) => &pat.pat,
}),
quote_spanned! { function.span()=> , },
);
tokens.append_all(quote_spanned! { function.span()=> { #path(#args_list) } });
}
| parse | identifier_name |
extern_crate.rs | //! Provides handling of `extern_crate` attributes.
//!
//! # What the generated code looks like
//!
//! ```rust,ignore
//! #[pre::extern_crate(std)]
//! mod pre_std {
//! mod ptr {
//! #[pre(valid_ptr(src, r))]
//! unsafe fn read<T>(src: *const T) -> T;
//!
//! impl<T> NonNull<T> {
//! #[pre(!ptr.is_null())]
//! const unsafe fn new_unchecked(ptr: *mut T) -> NonNull<T>;
//! }
//! }
//! }
//! ```
//!
//! turns into
//!
//! ```rust,ignore
//! #[doc = "..."]
//! mod pre_std {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::*;
//!
//! #[doc = "..."]
//! pub(crate) mod ptr {
//! #[allow(unused_imports)]
//! use pre::pre;
//! #[allow(unused_imports)]
//! #[doc(no_inline)]
//! pub(crate) use std::ptr::*;
//!
//! #[doc = "..."]
//! #[pre(!ptr.is_null())]
//! #[pre(no_doc)]
//! #[pre(no_debug_assert)]
//! #[inline(always)]
//! #[allow(non_snake_case)]
//! pub(crate) fn NonNull__impl__new_unchecked__() {}
//!
//! #[pre(valid_ptr(src, r))]
//! #[inline(always)]
//! pub(crate) unsafe fn read<T>(src: *const T) -> T {
//! std::ptr::read(src)
//! }
//! }
//! }
//! ```
use proc_macro2::{Span, TokenStream};
use quote::{quote, quote_spanned, TokenStreamExt};
use std::fmt;
use syn::{
braced,
parse::{Parse, ParseStream},
spanned::Spanned,
token::Brace,
Attribute, FnArg, ForeignItemFn, Ident, ItemUse, Path, PathArguments, PathSegment, Token,
Visibility,
};
use crate::{
documentation::{generate_extern_crate_fn_docs, generate_module_docs},
helpers::{visit_matching_attrs_parsed_mut, AttributeAction, CRATE_NAME},
pre_attr::PreAttr,
};
pub(crate) use impl_block::{impl_block_stub_name, ImplBlock};
mod impl_block;
/// The parsed version of the `extern_crate` attribute content.
pub(crate) struct ExternCrateAttr {
/// The path of the crate/module to which function calls will be forwarded.
path: Path,
}
impl fmt::Display for ExternCrateAttr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "#[extern_crate(")?;
if self.path.leading_colon.is_some() {
write!(f, "::")?;
}
for segment in &self.path.segments {
write!(f, "{}", segment.ident)?;
}
write!(f, ")]")
}
}
impl Parse for ExternCrateAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(ExternCrateAttr {
path: input.call(Path::parse_mod_style)?,
})
}
}
/// A parsed `extern_crate` annotated module.
pub(crate) struct Module {
/// The attributes on the module.
attrs: Vec<Attribute>,
/// The visibility on the module.
visibility: Visibility,
/// The `mod` token.
mod_token: Token![mod],
/// The name of the module.
ident: Ident,
/// The braces surrounding the content.
braces: Brace,
/// The impl blocks contained in the module.
impl_blocks: Vec<ImplBlock>,
/// The imports contained in the module.
imports: Vec<ItemUse>,
/// The functions contained in the module.
functions: Vec<ForeignItemFn>,
/// The submodules contained in the module.
modules: Vec<Module>,
}
impl fmt::Display for Module {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.original_token_stream())
}
}
impl Spanned for Module {
fn span(&self) -> Span {
self.visibility
.span()
.join(self.braces.span)
.unwrap_or(self.braces.span)
}
}
impl Parse for Module {
fn parse(input: ParseStream) -> syn::Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let visibility = input.parse()?;
let mod_token = input.parse()?;
let ident = input.parse()?;
let content;
let braces = braced!(content in input);
let mut impl_blocks = Vec::new();
let mut imports = Vec::new();
let mut functions = Vec::new();
let mut modules = Vec::new();
while !content.is_empty() {
if content.peek(Token![impl]) | else if <ItemUse as Parse>::parse(&content.fork()).is_ok() {
imports.push(content.parse()?);
} else if <ForeignItemFn as Parse>::parse(&content.fork()).is_ok() {
functions.push(content.parse()?);
} else {
modules.push(content.parse().map_err(|err| {
syn::Error::new(
err.span(),
"expected a module, a function signature, an impl block or a use statement",
)
})?);
}
}
Ok(Module {
attrs,
visibility,
mod_token,
ident,
braces,
impl_blocks,
imports,
functions,
modules,
})
}
}
impl Module {
/// Renders this `extern_crate` annotated module to its final result.
pub(crate) fn render(&self, attr: ExternCrateAttr) -> TokenStream {
let mut tokens = TokenStream::new();
self.render_inner(attr.path, &mut tokens, None, &self.ident);
tokens
}
/// A helper function to generate the final token stream.
///
/// This allows passing the top level visibility and the updated path into recursive calls.
fn render_inner(
&self,
mut path: Path,
tokens: &mut TokenStream,
visibility: Option<&TokenStream>,
top_level_module: &Ident,
) {
if visibility.is_some() {
// Update the path only in recursive calls.
path.segments.push(PathSegment {
ident: self.ident.clone(),
arguments: PathArguments::None,
});
}
let mut attrs = self.attrs.clone();
let mut render_docs = true;
visit_matching_attrs_parsed_mut(&mut attrs, "pre", |attr| match attr.content() {
PreAttr::NoDoc(_) => {
render_docs = false;
AttributeAction::Remove
}
_ => AttributeAction::Keep,
});
if render_docs {
let docs = generate_module_docs(self, &path);
tokens.append_all(quote! { #docs });
}
tokens.append_all(attrs);
let visibility = if let Some(visibility) = visibility {
// We're in a recursive call.
// Use the visibility passed to us.
tokens.append_all(quote! { #visibility });
visibility.clone()
} else {
// We're in the outermost call.
// Use the original visibility and decide which visibility to use in recursive calls.
let local_vis = &self.visibility;
tokens.append_all(quote! { #local_vis });
if let Visibility::Public(pub_keyword) = local_vis {
quote! { #pub_keyword }
} else {
let span = match local_vis {
Visibility::Inherited => self.mod_token.span(),
_ => local_vis.span(),
};
quote_spanned! { span=> pub(crate) }
}
};
let mod_token = self.mod_token;
tokens.append_all(quote! { #mod_token });
tokens.append(self.ident.clone());
let mut brace_content = TokenStream::new();
let crate_name = Ident::new(&CRATE_NAME, Span::call_site());
brace_content.append_all(quote! {
#[allow(unused_imports)]
#[doc(no_inline)]
#visibility use #path::*;
#[allow(unused_imports)]
use #crate_name::pre;
});
for impl_block in &self.impl_blocks {
impl_block.render(&mut brace_content, &path, &visibility, top_level_module);
}
for import in &self.imports {
brace_content.append_all(quote! { #import });
}
for function in &self.functions {
render_function(function, &mut brace_content, &path, &visibility);
}
for module in &self.modules {
module.render_inner(
path.clone(),
&mut brace_content,
Some(&visibility),
top_level_module,
);
}
tokens.append_all(quote_spanned! { self.braces.span=> { #brace_content } });
}
/// Generates a token stream that is semantically equivalent to the original token stream.
///
/// This should only be used for debug purposes.
fn original_token_stream(&self) -> TokenStream {
let mut stream = TokenStream::new();
stream.append_all(&self.attrs);
let vis = &self.visibility;
stream.append_all(quote! { #vis });
stream.append_all(quote! { mod });
stream.append(self.ident.clone());
let mut content = TokenStream::new();
content.append_all(
self.impl_blocks
.iter()
.map(|impl_block| impl_block.original_token_stream()),
);
content.append_all(&self.imports);
content.append_all(&self.functions);
content.append_all(self.modules.iter().map(|m| m.original_token_stream()));
stream.append_all(quote! { { #content } });
stream
}
}
/// Generates the code for a function inside a `extern_crate` module.
fn render_function(
function: &ForeignItemFn,
tokens: &mut TokenStream,
path: &Path,
visibility: &TokenStream,
) {
tokens.append_all(&function.attrs);
let doc_header = generate_extern_crate_fn_docs(path, &function.sig, function.span());
tokens.append_all(quote! { #doc_header });
tokens.append_all(quote_spanned! { function.span()=> #[inline(always)] });
tokens.append_all(visibility.clone().into_iter().map(|mut token| {
token.set_span(function.span());
token
}));
let signature = &function.sig;
tokens.append_all(quote! { #signature });
let mut path = path.clone();
path.segments.push(PathSegment {
ident: function.sig.ident.clone(),
arguments: PathArguments::None,
});
// Update the spans of the `::` tokens to lie in the function
for punct in path
.segments
.pairs_mut()
.map(|p| p.into_tuple().1)
.flatten()
{
punct.spans = [function.span(); 2];
}
let mut args_list = TokenStream::new();
args_list.append_separated(
function.sig.inputs.iter().map(|arg| match arg {
FnArg::Receiver(_) => unreachable!("receiver is not valid in a function argument list"),
FnArg::Typed(pat) => &pat.pat,
}),
quote_spanned! { function.span()=> , },
);
tokens.append_all(quote_spanned! { function.span()=> { #path(#args_list) } });
}
| {
impl_blocks.push(content.parse()?);
} | conditional_block |
TreeDecomposition.py | # class to implement the nice tree decomposition conversion
# from paper
# 'Solving connectivity problems parameterized by treewidth in
# single exponential time' [Cygan,Nederlof,Pilipczuk,Rooij,Wojtaszczyk]
# TODO assert isIstance etc, class name, class functions, bag as set
# TODO getTreewidth
from BagType import BagType
import copy
import string
index = 0
class TreeDecomposition:
def __init__(self, left=None, right=None, bag=None, bag_type=None):
self.left = left
self.right = right
self.bag = bag
#convertToNiceTree()
self.bag_type = bag_type
self.label = {}
def set_bag_type(self, bag_type):
self.bag_type = bag_type
def get_bag_type(self):
return self.bag_type
def __str__(self):
return str(self.bag)
def get_right(self):
return self.right
def get_left(self):
return self.left
def get_bag(self):
return self.bag
def set_right(self, right):
self.right = right
def set_left(self, left):
self.left = left
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def print_nice_tree_indented(self, level=0):
if self is None:
print(str(level) + ' none')
return
print(str(level) + ' ' + str(self.bag) + ' ' + str(self.get_bag_type()) + ' ' + str(self.get_label()))
print_nice_tree_indented(self.left, level+1)
print_nice_tree_indented(self.right, level+1)
# This function traverses the tree in-order
# and searches for the leaves of the tree
# (both children non-existent)
# when it finds one, it sets its left child
# as the result of create_leaf with its bag
def leaf(tree):
if tree.get_left() is not None:
leaf(tree.get_left())
if tree.get_right() is not None:
leaf(tree.get_right())
if (len(tree.get_bag()) > 0) and (tree.get_left() is None) and (tree.get_right() is None):
tree.set_bag_type(BagType.IV)
tree.set_label(str(tree.get_bag()[0]))
tree.left = create_leaf(tree.get_bag())
# creates as many new nodes until the bag is empty
# (forgetting one vertex per step)
# so that the last node has an empty bag and
# gets the type leaf bag
def create_leaf(bag):
if len(bag) > 1:
new_bag = TreeDecomposition(create_leaf(bag[1:]), None, bag[1:], BagType.IV)
new_bag.set_label(str(bag[1]))
return new_bag
return TreeDecomposition(None, None, [], BagType.L)
def is_child_smaller(old_root):
return get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag()) == 1
# used to define the BagType of the old root
def root(old_root):
bag_diff = get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag())
if bag_diff == 1:
old_root.set_bag_type(BagType.IV)
old_root.set_label(str(bag_diff[0]))
else:
old_root.set_bag_type(BagType.F)
old_root.set_label(str(get_bag_difference(get_child(old_root).get_bag(), old_root.get_bag())[0]))
return init_root(old_root)
# This function gets the old root of the tree
# and introduces new root nodes as long as
# the bag of the old root is not empty by
# forgetting one vertex in every step.
# The newly introduced nodes are marked as
# forget bags and the last one to be introduced
# as root with an empty bag (definition 2.3)
def init_root(old_root):
bag = old_root.get_bag()
while len(bag) > 1:
new_root = TreeDecomposition(old_root, None, bag[1:], BagType.F)
new_root.set_label(str(bag[0]))
return init_root(new_root)
return TreeDecomposition(old_root, None, [], BagType.R)
# The join function traverses the given tree in-order
# and checks for every node if there are two children
# if yes (and their bags are not equal -> already joined)
# then we introduce a join bag and two equal children
# (according to definition 2.3)
# otherwise we continue traversing
def join(tree):
left_node = tree.get_left()
right_node = tree.get_right()
if has_two_children(tree):
right_bag = tree.get_right().get_bag()
left_bag = tree.get_left().get_bag()
if not are_equals_bags(right_bag, left_bag):
tree.set_bag_type(BagType.J)
tree_bag = tree.get_bag()
new_left_node = TreeDecomposition(left_node, None, tree_bag)
new_right_node = TreeDecomposition(right_node, None, tree_bag)
tree.set_left(new_left_node)
tree.set_right(new_right_node)
if left_node is not None:
join(left_node)
if right_node is not None:
join(right_node)
def are_equals_bags(first_bag, scnd_bag):
return len([x for x in first_bag if x not in scnd_bag]) == 0
# A nice tree decomposition (definition 2.3) uses
# structures of the standard nice tree decomposition.
# This function takes care that the third property
# of definition 2.2 of a nice standard nice tree
# decomposition is guaranteed. We achieve this by
# introducing forget bags and introduce vertex bags
# between two connected nodes whose intersection is
# bigger than one and/or if they contain different
# vertices.
# We first 'forget' all vertices of the parent node
# and from there on we introduce all the vertices
# which existed in the previous child node
# Additionally, we use the assumption that if a node
# has two children, we don't have to examine it
# as we executed join beforehand
def add_internal_nodes(ntree):
if has_two_children(ntree):
add_internal_nodes(ntree.get_left())
add_internal_nodes(ntree.get_right())
else:
child = get_child(ntree)
ntree_bag = copy.copy(ntree.get_bag())
if child is not None and ntree_bag is not None:
child_bag = child.get_bag()
intersection = get_intersection(ntree_bag, child_bag)
forget_list = get_bag_difference(ntree_bag, intersection)
introduce_list = get_bag_difference(child_bag, intersection)
# forget_list = list of vertices which have to be removed from the bag
# while traversing downwards the tree (we remove one each step)
# introduce_list = list of vertices which have to be added to the bag
# while traversing downwards the tree (we add one each step)
# if we know that the sum of those two lists equals 1 or less than
# we know we don't have to introduce/forget vertices
# otherwise we have two cases:
# 1: we still have to forget bags
# 2: we have to introduce bags
if (len(forget_list) + len(introduce_list)) > 1:
if len(forget_list) > 0:
#case 1
ntree_bag.remove(forget_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.F)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.IV)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
elif len(introduce_list) > 0:
#case 2
ntree_bag.add(introduce_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.IV)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.F)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
if child.get_bag() is not None:
add_internal_nodes(child)
# calculates the intersection of two bags
# example: [a,b,c] and [b,f,g] -> [b]
def get_intersection(first_bag, scnd_bag):
return list(set(first_bag).intersection(set(scnd_bag)))
# calculates the difference of two bags
# ! result depends on order of parameters !
# example: [a,b,c] and [a] -> [b,c]
def get_bag_difference(first_bag, scnd_bag):
return list(set(first_bag).difference(set(scnd_bag)))
def get_child(ntree):
left = ntree.get_left()
if left is not None:
return left
return ntree.get_right()
def has_two_children(ntree):
if ntree.get_left() is not None and ntree.get_right() is not None:
return True | # execute inorder_edge_bag for each edge
def edge_bags(ntree, edges):
for edge in edges:
inorder_edge_bag(ntree, edge, False)
# this function traverse the tree in-order
# for each edge of the initial graph and
# should place an extra 'introduce edge bag'
# above the first node which contains the edge
def inorder_edge_bag(ntree, edge, found):
if not found:
left_child = ntree.get_left()
right_child = ntree.get_right()
if left_child is not None:
if contains_edge(edge, left_child.get_bag()):
new_node = TreeDecomposition(left_child, None, left_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(left_child, edge, False)
if right_child is not None:
if contains_edge(edge, right_child.get_bag()):
new_node = TreeDecomposition(right_child, None, right_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(right_child, edge, False)
def has_at_least_one_child(ntree):
if ntree.get_left() is None:
if ntree.get_right() is None:
return False
return True
def contains_edge(edge, bag):
return len(set(edge).intersection(set(bag))) == 2
def increment_index():
global index
index += 1
def save_header(file):
file.write("graph NiceTreeDecomposition {\n")
file.write("size=\"1,1\";\n")
file.write("node [shape=box];\n")
def save_nodes(file, ntree):
#write the node
global index
node_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(node_symbol, ntree))
left_child = ntree.get_left()
right_child = ntree.get_right()
left = False
right = False
if left_child is not None:
left_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(left_symbol, left_child))
file.write(node_symbol + " -- " + left_symbol + " [type=s];\n")
left = True
if right_child is not None:
right_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(right_symbol, right_child))
file.write(node_symbol + " -- " + right_symbol + " [type=s];\n")
right = True
if left:
save_nodes(file, left_child)
if right:
save_nodes(file, right_child)
def get_edge_line(symbol, ntree):
return symbol + " [label=\"{{" + str(ntree.get_bag_type().value) + "|" + str(ntree.get_bag()) + "}}\"];\n"
def save_finish(file):
file.write("}")
def save_tree_decomposition(ntree, edges):
file = open("treeDecomposition.txt", "w")
save_header(file)
save_nodes(file, ntree)
#saveEdges(file, edges)
save_finish(file)
file.close()
def get_next_symbol():
alph = list(string.ascii_uppercase)
return alph[index % 24] + str(index // 24) | return False
| random_line_split |
TreeDecomposition.py | # class to implement the nice tree decomposition conversion
# from paper
# 'Solving connectivity problems parameterized by treewidth in
# single exponential time' [Cygan,Nederlof,Pilipczuk,Rooij,Wojtaszczyk]
# TODO assert isIstance etc, class name, class functions, bag as set
# TODO getTreewidth
from BagType import BagType
import copy
import string
index = 0
class TreeDecomposition:
def __init__(self, left=None, right=None, bag=None, bag_type=None):
self.left = left
self.right = right
self.bag = bag
#convertToNiceTree()
self.bag_type = bag_type
self.label = {}
def set_bag_type(self, bag_type):
self.bag_type = bag_type
def get_bag_type(self):
return self.bag_type
def __str__(self):
return str(self.bag)
def get_right(self):
return self.right
def get_left(self):
return self.left
def get_bag(self):
return self.bag
def set_right(self, right):
self.right = right
def set_left(self, left):
self.left = left
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def print_nice_tree_indented(self, level=0):
if self is None:
print(str(level) + ' none')
return
print(str(level) + ' ' + str(self.bag) + ' ' + str(self.get_bag_type()) + ' ' + str(self.get_label()))
print_nice_tree_indented(self.left, level+1)
print_nice_tree_indented(self.right, level+1)
# This function traverses the tree in-order
# and searches for the leaves of the tree
# (both children non-existent)
# when it finds one, it sets its left child
# as the result of create_leaf with its bag
def leaf(tree):
if tree.get_left() is not None:
leaf(tree.get_left())
if tree.get_right() is not None:
leaf(tree.get_right())
if (len(tree.get_bag()) > 0) and (tree.get_left() is None) and (tree.get_right() is None):
tree.set_bag_type(BagType.IV)
tree.set_label(str(tree.get_bag()[0]))
tree.left = create_leaf(tree.get_bag())
# creates as many new nodes until the bag is empty
# (forgetting one vertex per step)
# so that the last node has an empty bag and
# gets the type leaf bag
def create_leaf(bag):
if len(bag) > 1:
new_bag = TreeDecomposition(create_leaf(bag[1:]), None, bag[1:], BagType.IV)
new_bag.set_label(str(bag[1]))
return new_bag
return TreeDecomposition(None, None, [], BagType.L)
def is_child_smaller(old_root):
return get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag()) == 1
# used to define the BagType of the old root
def root(old_root):
bag_diff = get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag())
if bag_diff == 1:
old_root.set_bag_type(BagType.IV)
old_root.set_label(str(bag_diff[0]))
else:
old_root.set_bag_type(BagType.F)
old_root.set_label(str(get_bag_difference(get_child(old_root).get_bag(), old_root.get_bag())[0]))
return init_root(old_root)
# This function gets the old root of the tree
# and introduces new root nodes as long as
# the bag of the old root is not empty by
# forgetting one vertex in every step.
# The newly introduced nodes are marked as
# forget bags and the last one to be introduced
# as root with an empty bag (definition 2.3)
def init_root(old_root):
bag = old_root.get_bag()
while len(bag) > 1:
new_root = TreeDecomposition(old_root, None, bag[1:], BagType.F)
new_root.set_label(str(bag[0]))
return init_root(new_root)
return TreeDecomposition(old_root, None, [], BagType.R)
# The join function traverses the given tree in-order
# and checks for every node if there are two children
# if yes (and their bags are not equal -> already joined)
# then we introduce a join bag and two equal children
# (according to definition 2.3)
# otherwise we continue traversing
def join(tree):
left_node = tree.get_left()
right_node = tree.get_right()
if has_two_children(tree):
right_bag = tree.get_right().get_bag()
left_bag = tree.get_left().get_bag()
if not are_equals_bags(right_bag, left_bag):
tree.set_bag_type(BagType.J)
tree_bag = tree.get_bag()
new_left_node = TreeDecomposition(left_node, None, tree_bag)
new_right_node = TreeDecomposition(right_node, None, tree_bag)
tree.set_left(new_left_node)
tree.set_right(new_right_node)
if left_node is not None:
join(left_node)
if right_node is not None:
join(right_node)
def are_equals_bags(first_bag, scnd_bag):
return len([x for x in first_bag if x not in scnd_bag]) == 0
# A nice tree decomposition (definition 2.3) uses
# structures of the standard nice tree decomposition.
# This function takes care that the third property
# of definition 2.2 of a nice standard nice tree
# decomposition is guaranteed. We achieve this by
# introducing forget bags and introduce vertex bags
# between two connected nodes whose intersection is
# bigger than one and/or if they contain different
# vertices.
# We first 'forget' all vertices of the parent node
# and from there on we introduce all the vertices
# which existed in the previous child node
# Additionally, we use the assumption that if a node
# has two children, we don't have to examine it
# as we executed join beforehand
def add_internal_nodes(ntree):
if has_two_children(ntree):
add_internal_nodes(ntree.get_left())
add_internal_nodes(ntree.get_right())
else:
child = get_child(ntree)
ntree_bag = copy.copy(ntree.get_bag())
if child is not None and ntree_bag is not None:
child_bag = child.get_bag()
intersection = get_intersection(ntree_bag, child_bag)
forget_list = get_bag_difference(ntree_bag, intersection)
introduce_list = get_bag_difference(child_bag, intersection)
# forget_list = list of vertices which have to be removed from the bag
# while traversing downwards the tree (we remove one each step)
# introduce_list = list of vertices which have to be added to the bag
# while traversing downwards the tree (we add one each step)
# if we know that the sum of those two lists equals 1 or less than
# we know we don't have to introduce/forget vertices
# otherwise we have two cases:
# 1: we still have to forget bags
# 2: we have to introduce bags
if (len(forget_list) + len(introduce_list)) > 1:
if len(forget_list) > 0:
#case 1
ntree_bag.remove(forget_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.F)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.IV)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
elif len(introduce_list) > 0:
#case 2
ntree_bag.add(introduce_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.IV)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.F)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
if child.get_bag() is not None:
add_internal_nodes(child)
# calculates the intersection of two bags
# example: [a,b,c] and [b,f,g] -> [b]
def get_intersection(first_bag, scnd_bag):
return list(set(first_bag).intersection(set(scnd_bag)))
# calculates the difference of two bags
# ! result depends on order of parameters !
# example: [a,b,c] and [a] -> [b,c]
def get_bag_difference(first_bag, scnd_bag):
return list(set(first_bag).difference(set(scnd_bag)))
def get_child(ntree):
left = ntree.get_left()
if left is not None:
return left
return ntree.get_right()
def has_two_children(ntree):
if ntree.get_left() is not None and ntree.get_right() is not None:
return True
return False
# execute inorder_edge_bag for each edge
def edge_bags(ntree, edges):
for edge in edges:
inorder_edge_bag(ntree, edge, False)
# this function traverse the tree in-order
# for each edge of the initial graph and
# should place an extra 'introduce edge bag'
# above the first node which contains the edge
def inorder_edge_bag(ntree, edge, found):
if not found:
left_child = ntree.get_left()
right_child = ntree.get_right()
if left_child is not None:
if contains_edge(edge, left_child.get_bag()):
new_node = TreeDecomposition(left_child, None, left_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(left_child, edge, False)
if right_child is not None:
if contains_edge(edge, right_child.get_bag()):
new_node = TreeDecomposition(right_child, None, right_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(right_child, edge, False)
def has_at_least_one_child(ntree):
if ntree.get_left() is None:
if ntree.get_right() is None:
|
return True
def contains_edge(edge, bag):
return len(set(edge).intersection(set(bag))) == 2
def increment_index():
global index
index += 1
def save_header(file):
file.write("graph NiceTreeDecomposition {\n")
file.write("size=\"1,1\";\n")
file.write("node [shape=box];\n")
def save_nodes(file, ntree):
#write the node
global index
node_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(node_symbol, ntree))
left_child = ntree.get_left()
right_child = ntree.get_right()
left = False
right = False
if left_child is not None:
left_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(left_symbol, left_child))
file.write(node_symbol + " -- " + left_symbol + " [type=s];\n")
left = True
if right_child is not None:
right_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(right_symbol, right_child))
file.write(node_symbol + " -- " + right_symbol + " [type=s];\n")
right = True
if left:
save_nodes(file, left_child)
if right:
save_nodes(file, right_child)
def get_edge_line(symbol, ntree):
return symbol + " [label=\"{{" + str(ntree.get_bag_type().value) + "|" + str(ntree.get_bag()) + "}}\"];\n"
def save_finish(file):
file.write("}")
def save_tree_decomposition(ntree, edges):
file = open("treeDecomposition.txt", "w")
save_header(file)
save_nodes(file, ntree)
#saveEdges(file, edges)
save_finish(file)
file.close()
def get_next_symbol():
alph = list(string.ascii_uppercase)
return alph[index % 24] + str(index // 24)
| return False | conditional_block |
TreeDecomposition.py | # class to implement the nice tree decomposition conversion
# from paper
# 'Solving connectivity problems parameterized by treewidth in
# single exponential time' [Cygan,Nederlof,Pilipczuk,Rooij,Wojtaszczyk]
# TODO assert isIstance etc, class name, class functions, bag as set
# TODO getTreewidth
from BagType import BagType
import copy
import string
index = 0
class TreeDecomposition:
def __init__(self, left=None, right=None, bag=None, bag_type=None):
self.left = left
self.right = right
self.bag = bag
#convertToNiceTree()
self.bag_type = bag_type
self.label = {}
def set_bag_type(self, bag_type):
self.bag_type = bag_type
def get_bag_type(self):
return self.bag_type
def __str__(self):
return str(self.bag)
def get_right(self):
return self.right
def get_left(self):
return self.left
def get_bag(self):
return self.bag
def set_right(self, right):
self.right = right
def set_left(self, left):
self.left = left
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def print_nice_tree_indented(self, level=0):
if self is None:
print(str(level) + ' none')
return
print(str(level) + ' ' + str(self.bag) + ' ' + str(self.get_bag_type()) + ' ' + str(self.get_label()))
print_nice_tree_indented(self.left, level+1)
print_nice_tree_indented(self.right, level+1)
# This function traverses the tree in-order
# and searches for the leaves of the tree
# (both children non-existent)
# when it finds one, it sets its left child
# as the result of create_leaf with its bag
def leaf(tree):
if tree.get_left() is not None:
leaf(tree.get_left())
if tree.get_right() is not None:
leaf(tree.get_right())
if (len(tree.get_bag()) > 0) and (tree.get_left() is None) and (tree.get_right() is None):
tree.set_bag_type(BagType.IV)
tree.set_label(str(tree.get_bag()[0]))
tree.left = create_leaf(tree.get_bag())
# creates as many new nodes until the bag is empty
# (forgetting one vertex per step)
# so that the last node has an empty bag and
# gets the type leaf bag
def create_leaf(bag):
if len(bag) > 1:
new_bag = TreeDecomposition(create_leaf(bag[1:]), None, bag[1:], BagType.IV)
new_bag.set_label(str(bag[1]))
return new_bag
return TreeDecomposition(None, None, [], BagType.L)
def is_child_smaller(old_root):
return get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag()) == 1
# used to define the BagType of the old root
def root(old_root):
bag_diff = get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag())
if bag_diff == 1:
old_root.set_bag_type(BagType.IV)
old_root.set_label(str(bag_diff[0]))
else:
old_root.set_bag_type(BagType.F)
old_root.set_label(str(get_bag_difference(get_child(old_root).get_bag(), old_root.get_bag())[0]))
return init_root(old_root)
# This function gets the old root of the tree
# and introduces new root nodes as long as
# the bag of the old root is not empty by
# forgetting one vertex in every step.
# The newly introduced nodes are marked as
# forget bags and the last one to be introduced
# as root with an empty bag (definition 2.3)
def init_root(old_root):
bag = old_root.get_bag()
while len(bag) > 1:
new_root = TreeDecomposition(old_root, None, bag[1:], BagType.F)
new_root.set_label(str(bag[0]))
return init_root(new_root)
return TreeDecomposition(old_root, None, [], BagType.R)
# The join function traverses the given tree in-order
# and checks for every node if there are two children
# if yes (and their bags are not equal -> already joined)
# then we introduce a join bag and two equal children
# (according to definition 2.3)
# otherwise we continue traversing
def join(tree):
left_node = tree.get_left()
right_node = tree.get_right()
if has_two_children(tree):
right_bag = tree.get_right().get_bag()
left_bag = tree.get_left().get_bag()
if not are_equals_bags(right_bag, left_bag):
tree.set_bag_type(BagType.J)
tree_bag = tree.get_bag()
new_left_node = TreeDecomposition(left_node, None, tree_bag)
new_right_node = TreeDecomposition(right_node, None, tree_bag)
tree.set_left(new_left_node)
tree.set_right(new_right_node)
if left_node is not None:
join(left_node)
if right_node is not None:
join(right_node)
def are_equals_bags(first_bag, scnd_bag):
return len([x for x in first_bag if x not in scnd_bag]) == 0
# A nice tree decomposition (definition 2.3) uses
# structures of the standard nice tree decomposition.
# This function takes care that the third property
# of definition 2.2 of a nice standard nice tree
# decomposition is guaranteed. We achieve this by
# introducing forget bags and introduce vertex bags
# between two connected nodes whose intersection is
# bigger than one and/or if they contain different
# vertices.
# We first 'forget' all vertices of the parent node
# and from there on we introduce all the vertices
# which existed in the previous child node
# Additionally, we use the assumption that if a node
# has two children, we don't have to examine it
# as we executed join beforehand
def add_internal_nodes(ntree):
if has_two_children(ntree):
add_internal_nodes(ntree.get_left())
add_internal_nodes(ntree.get_right())
else:
child = get_child(ntree)
ntree_bag = copy.copy(ntree.get_bag())
if child is not None and ntree_bag is not None:
child_bag = child.get_bag()
intersection = get_intersection(ntree_bag, child_bag)
forget_list = get_bag_difference(ntree_bag, intersection)
introduce_list = get_bag_difference(child_bag, intersection)
# forget_list = list of vertices which have to be removed from the bag
# while traversing downwards the tree (we remove one each step)
# introduce_list = list of vertices which have to be added to the bag
# while traversing downwards the tree (we add one each step)
# if we know that the sum of those two lists equals 1 or less than
# we know we don't have to introduce/forget vertices
# otherwise we have two cases:
# 1: we still have to forget bags
# 2: we have to introduce bags
if (len(forget_list) + len(introduce_list)) > 1:
if len(forget_list) > 0:
#case 1
ntree_bag.remove(forget_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.F)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.IV)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
elif len(introduce_list) > 0:
#case 2
ntree_bag.add(introduce_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.IV)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.F)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
if child.get_bag() is not None:
add_internal_nodes(child)
# calculates the intersection of two bags
# example: [a,b,c] and [b,f,g] -> [b]
def get_intersection(first_bag, scnd_bag):
return list(set(first_bag).intersection(set(scnd_bag)))
# calculates the difference of two bags
# ! result depends on order of parameters !
# example: [a,b,c] and [a] -> [b,c]
def get_bag_difference(first_bag, scnd_bag):
return list(set(first_bag).difference(set(scnd_bag)))
def get_child(ntree):
left = ntree.get_left()
if left is not None:
return left
return ntree.get_right()
def has_two_children(ntree):
if ntree.get_left() is not None and ntree.get_right() is not None:
return True
return False
# execute inorder_edge_bag for each edge
def | (ntree, edges):
for edge in edges:
inorder_edge_bag(ntree, edge, False)
# this function traverse the tree in-order
# for each edge of the initial graph and
# should place an extra 'introduce edge bag'
# above the first node which contains the edge
def inorder_edge_bag(ntree, edge, found):
if not found:
left_child = ntree.get_left()
right_child = ntree.get_right()
if left_child is not None:
if contains_edge(edge, left_child.get_bag()):
new_node = TreeDecomposition(left_child, None, left_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(left_child, edge, False)
if right_child is not None:
if contains_edge(edge, right_child.get_bag()):
new_node = TreeDecomposition(right_child, None, right_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(right_child, edge, False)
def has_at_least_one_child(ntree):
if ntree.get_left() is None:
if ntree.get_right() is None:
return False
return True
def contains_edge(edge, bag):
return len(set(edge).intersection(set(bag))) == 2
def increment_index():
global index
index += 1
def save_header(file):
file.write("graph NiceTreeDecomposition {\n")
file.write("size=\"1,1\";\n")
file.write("node [shape=box];\n")
def save_nodes(file, ntree):
#write the node
global index
node_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(node_symbol, ntree))
left_child = ntree.get_left()
right_child = ntree.get_right()
left = False
right = False
if left_child is not None:
left_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(left_symbol, left_child))
file.write(node_symbol + " -- " + left_symbol + " [type=s];\n")
left = True
if right_child is not None:
right_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(right_symbol, right_child))
file.write(node_symbol + " -- " + right_symbol + " [type=s];\n")
right = True
if left:
save_nodes(file, left_child)
if right:
save_nodes(file, right_child)
def get_edge_line(symbol, ntree):
return symbol + " [label=\"{{" + str(ntree.get_bag_type().value) + "|" + str(ntree.get_bag()) + "}}\"];\n"
def save_finish(file):
file.write("}")
def save_tree_decomposition(ntree, edges):
file = open("treeDecomposition.txt", "w")
save_header(file)
save_nodes(file, ntree)
#saveEdges(file, edges)
save_finish(file)
file.close()
def get_next_symbol():
alph = list(string.ascii_uppercase)
return alph[index % 24] + str(index // 24)
| edge_bags | identifier_name |
TreeDecomposition.py | # class to implement the nice tree decomposition conversion
# from paper
# 'Solving connectivity problems parameterized by treewidth in
# single exponential time' [Cygan,Nederlof,Pilipczuk,Rooij,Wojtaszczyk]
# TODO assert isIstance etc, class name, class functions, bag as set
# TODO getTreewidth
from BagType import BagType
import copy
import string
index = 0
class TreeDecomposition:
def __init__(self, left=None, right=None, bag=None, bag_type=None):
self.left = left
self.right = right
self.bag = bag
#convertToNiceTree()
self.bag_type = bag_type
self.label = {}
def set_bag_type(self, bag_type):
self.bag_type = bag_type
def get_bag_type(self):
|
def __str__(self):
return str(self.bag)
def get_right(self):
return self.right
def get_left(self):
return self.left
def get_bag(self):
return self.bag
def set_right(self, right):
self.right = right
def set_left(self, left):
self.left = left
def set_label(self, label):
self.label = label
def get_label(self):
return self.label
def print_nice_tree_indented(self, level=0):
if self is None:
print(str(level) + ' none')
return
print(str(level) + ' ' + str(self.bag) + ' ' + str(self.get_bag_type()) + ' ' + str(self.get_label()))
print_nice_tree_indented(self.left, level+1)
print_nice_tree_indented(self.right, level+1)
# This function traverses the tree in-order
# and searches for the leaves of the tree
# (both children non-existent)
# when it finds one, it sets its left child
# as the result of create_leaf with its bag
def leaf(tree):
if tree.get_left() is not None:
leaf(tree.get_left())
if tree.get_right() is not None:
leaf(tree.get_right())
if (len(tree.get_bag()) > 0) and (tree.get_left() is None) and (tree.get_right() is None):
tree.set_bag_type(BagType.IV)
tree.set_label(str(tree.get_bag()[0]))
tree.left = create_leaf(tree.get_bag())
# creates as many new nodes until the bag is empty
# (forgetting one vertex per step)
# so that the last node has an empty bag and
# gets the type leaf bag
def create_leaf(bag):
if len(bag) > 1:
new_bag = TreeDecomposition(create_leaf(bag[1:]), None, bag[1:], BagType.IV)
new_bag.set_label(str(bag[1]))
return new_bag
return TreeDecomposition(None, None, [], BagType.L)
def is_child_smaller(old_root):
return get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag()) == 1
# used to define the BagType of the old root
def root(old_root):
bag_diff = get_bag_difference(old_root.get_bag(), get_child(old_root).get_bag())
if bag_diff == 1:
old_root.set_bag_type(BagType.IV)
old_root.set_label(str(bag_diff[0]))
else:
old_root.set_bag_type(BagType.F)
old_root.set_label(str(get_bag_difference(get_child(old_root).get_bag(), old_root.get_bag())[0]))
return init_root(old_root)
# This function gets the old root of the tree
# and introduces new root nodes as long as
# the bag of the old root is not empty by
# forgetting one vertex in every step.
# The newly introduced nodes are marked as
# forget bags and the last one to be introduced
# as root with an empty bag (definition 2.3)
def init_root(old_root):
bag = old_root.get_bag()
while len(bag) > 1:
new_root = TreeDecomposition(old_root, None, bag[1:], BagType.F)
new_root.set_label(str(bag[0]))
return init_root(new_root)
return TreeDecomposition(old_root, None, [], BagType.R)
# The join function traverses the given tree in-order
# and checks for every node if there are two children
# if yes (and their bags are not equal -> already joined)
# then we introduce a join bag and two equal children
# (according to definition 2.3)
# otherwise we continue traversing
def join(tree):
left_node = tree.get_left()
right_node = tree.get_right()
if has_two_children(tree):
right_bag = tree.get_right().get_bag()
left_bag = tree.get_left().get_bag()
if not are_equals_bags(right_bag, left_bag):
tree.set_bag_type(BagType.J)
tree_bag = tree.get_bag()
new_left_node = TreeDecomposition(left_node, None, tree_bag)
new_right_node = TreeDecomposition(right_node, None, tree_bag)
tree.set_left(new_left_node)
tree.set_right(new_right_node)
if left_node is not None:
join(left_node)
if right_node is not None:
join(right_node)
def are_equals_bags(first_bag, scnd_bag):
return len([x for x in first_bag if x not in scnd_bag]) == 0
# A nice tree decomposition (definition 2.3) uses
# structures of the standard nice tree decomposition.
# This function takes care that the third property
# of definition 2.2 of a nice standard nice tree
# decomposition is guaranteed. We achieve this by
# introducing forget bags and introduce vertex bags
# between two connected nodes whose intersection is
# bigger than one and/or if they contain different
# vertices.
# We first 'forget' all vertices of the parent node
# and from there on we introduce all the vertices
# which existed in the previous child node
# Additionally, we use the assumption that if a node
# has two children, we don't have to examine it
# as we executed join beforehand
def add_internal_nodes(ntree):
if has_two_children(ntree):
add_internal_nodes(ntree.get_left())
add_internal_nodes(ntree.get_right())
else:
child = get_child(ntree)
ntree_bag = copy.copy(ntree.get_bag())
if child is not None and ntree_bag is not None:
child_bag = child.get_bag()
intersection = get_intersection(ntree_bag, child_bag)
forget_list = get_bag_difference(ntree_bag, intersection)
introduce_list = get_bag_difference(child_bag, intersection)
# forget_list = list of vertices which have to be removed from the bag
# while traversing downwards the tree (we remove one each step)
# introduce_list = list of vertices which have to be added to the bag
# while traversing downwards the tree (we add one each step)
# if we know that the sum of those two lists equals 1 or less than
# we know we don't have to introduce/forget vertices
# otherwise we have two cases:
# 1: we still have to forget bags
# 2: we have to introduce bags
if (len(forget_list) + len(introduce_list)) > 1:
if len(forget_list) > 0:
#case 1
ntree_bag.remove(forget_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.F)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.IV)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
elif len(introduce_list) > 0:
#case 2
ntree_bag.add(introduce_list[0])
new_child = TreeDecomposition(child, None, ntree_bag, BagType.IV)
new_child.set_label(str(introduce_list[0]))
ntree.set_bag_type(BagType.F)
ntree.set_label(str(forget_list[0]))
ntree.set_left(new_child)
add_internal_nodes(new_child)
if child.get_bag() is not None:
add_internal_nodes(child)
# calculates the intersection of two bags
# example: [a,b,c] and [b,f,g] -> [b]
def get_intersection(first_bag, scnd_bag):
return list(set(first_bag).intersection(set(scnd_bag)))
# calculates the difference of two bags
# ! result depends on order of parameters !
# example: [a,b,c] and [a] -> [b,c]
def get_bag_difference(first_bag, scnd_bag):
return list(set(first_bag).difference(set(scnd_bag)))
def get_child(ntree):
left = ntree.get_left()
if left is not None:
return left
return ntree.get_right()
def has_two_children(ntree):
if ntree.get_left() is not None and ntree.get_right() is not None:
return True
return False
# execute inorder_edge_bag for each edge
def edge_bags(ntree, edges):
for edge in edges:
inorder_edge_bag(ntree, edge, False)
# this function traverse the tree in-order
# for each edge of the initial graph and
# should place an extra 'introduce edge bag'
# above the first node which contains the edge
def inorder_edge_bag(ntree, edge, found):
if not found:
left_child = ntree.get_left()
right_child = ntree.get_right()
if left_child is not None:
if contains_edge(edge, left_child.get_bag()):
new_node = TreeDecomposition(left_child, None, left_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(left_child, edge, False)
if right_child is not None:
if contains_edge(edge, right_child.get_bag()):
new_node = TreeDecomposition(right_child, None, right_child.get_bag(), BagType.IE)
new_node.set_label(edge)
ntree.set_left(new_node)
return inorder_edge_bag(ntree, edge, True)
else:
inorder_edge_bag(right_child, edge, False)
def has_at_least_one_child(ntree):
if ntree.get_left() is None:
if ntree.get_right() is None:
return False
return True
def contains_edge(edge, bag):
return len(set(edge).intersection(set(bag))) == 2
def increment_index():
global index
index += 1
def save_header(file):
file.write("graph NiceTreeDecomposition {\n")
file.write("size=\"1,1\";\n")
file.write("node [shape=box];\n")
def save_nodes(file, ntree):
#write the node
global index
node_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(node_symbol, ntree))
left_child = ntree.get_left()
right_child = ntree.get_right()
left = False
right = False
if left_child is not None:
left_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(left_symbol, left_child))
file.write(node_symbol + " -- " + left_symbol + " [type=s];\n")
left = True
if right_child is not None:
right_symbol = get_next_symbol()
increment_index()
file.write(get_edge_line(right_symbol, right_child))
file.write(node_symbol + " -- " + right_symbol + " [type=s];\n")
right = True
if left:
save_nodes(file, left_child)
if right:
save_nodes(file, right_child)
def get_edge_line(symbol, ntree):
return symbol + " [label=\"{{" + str(ntree.get_bag_type().value) + "|" + str(ntree.get_bag()) + "}}\"];\n"
def save_finish(file):
file.write("}")
def save_tree_decomposition(ntree, edges):
file = open("treeDecomposition.txt", "w")
save_header(file)
save_nodes(file, ntree)
#saveEdges(file, edges)
save_finish(file)
file.close()
def get_next_symbol():
alph = list(string.ascii_uppercase)
return alph[index % 24] + str(index // 24)
| return self.bag_type | identifier_body |
webserver.ts | // Strautomator: WebServer
import {strava, paypal} from "strautomator-core"
import express = require("express")
import _ from "lodash"
import fs = require("fs")
import http = require("http")
import https = require("https")
import logger from "anyhow"
import path = require("path")
const settings = require("setmeup").settings
/**
* Exposes the Express server.
*/
class WebServer {
private constructor() {}
private static _instance: WebServer
static get Instance(): WebServer {
return this._instance || (this._instance = new this())
}
/**
* The Express app.
*/
app: express.Express
/**
* The underlying HTTP(S) server.
*/
server: http.Server
// INIT
// --------------------------------------------------------------------------
/**
* Init the web server. If the strautomator.key and strautomator.cert files are
* present, listen on HTTPS, otherwise regular HTTP.
*/
init = async (nuxtRender): Promise<void> => {
try {
this.app = express()
let protocol: string
// Check if certificate files are present.
if (fs.existsSync(`./strautomator.cert`) && fs.existsSync(`./strautomator.key`)) {
const cert = fs.readFileSync("./strautomator.cert", "utf8")
const key = fs.readFileSync("./strautomator.key", "utf8")
const options = {
cert: cert,
key: key
}
this.server = https.createServer(options, this.app)
protocol = "HTTPS"
// HTTPS defaults to port 8443.
if (!settings.app.port) {
settings.app.port = 8443
}
} else {
this.server = http.createServer(this.app)
protocol = "HTTP"
// HTTP defaults to port 8080.
if (!settings.app.port) {
settings.app.port = 8080
}
}
// When running behind a proxy / LB.
this.app.set("trust proxy", settings.app.trustProxy)
// Debug enabled? Log all requests.
if (settings.app.debug) {
this.app.use((req: express.Request, _res, next) => {
logger.debug("WebServer", req.method, req.url)
next()
})
}
// Add body parser.
const bodyParser = require("body-parser")
this.app.use(bodyParser.json())
this.app.use((err: Error, req: express.Request, res: express.Response, next) => {
if (err) {
return this.renderError(req, res, err.toString(), 400)
}
next()
})
// Set API rate limiting (if defined on the settings).
if (settings.api.rateLimit && settings.api.rateLimit.max) {
const rateLimitOptions = _.cloneDeep(settings.api.rateLimit)
rateLimitOptions.handler = (req: express.Request, res: express.Response, next, options) => {
try {
if (!res.headersSent) {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limited")
res.status(options.statusCode).send(options.message)
} else {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, `Rate limited, but status sent as ${res.statusCode}`)
}
} catch (ex) {
logger.error("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limit handler failed", ex)
next()
}
}
const rateLimit = require("express-rate-limit")(settings.api.rateLimit)
this.app.use("/api/*", rateLimit)
this.app.use("/api/*", (req, res, next) => {
const reqRateLimit = (req as any).rateLimit
const statusCode = res.statusCode || "not sent"
if (reqRateLimit && [50, 10, 5, 1].includes(reqRateLimit.remaining)) {
logger.warn("Routes", req.method, req.originalUrl, `From ${req.ip}`, `Status ${statusCode}`, `Rate limit remaining: ${reqRateLimit.remaining}`)
}
next()
})
}
// Only accept connections coming via Cloudflare?
if (settings.api.requireCloudflare) {
this.app.use("/api/*", (req, res, next) => {
if (!req.headers["cf-ray"]) {
logger.error("WebServer.requireCloudflare", req.method, req.originalUrl, "Missing CF-Ray header", req.ip)
if (!res.headersSent) {
return this.renderError(req, res, "Access denied", 401)
} else {
return res.end()
}
}
next()
})
logger.info("WebServer.init", "API requests via Cloudflare required")
}
// Load routes.
const routers = fs.readdirSync(`${__dirname}/routes/api`)
for (let r of routers) {
if (r.indexOf(".d.ts") < 0) {
const basename = path.basename(r).split(".")[0]
this.app.use(`/api/${basename}`, require(`./routes/api/${r}`))
}
}
// Use Nuxt render.
this.app.use(nuxtRender)
// Listen the server.
this.server.listen(settings.app.port)
logger.info("WebServer.init", protocol, `Server ready on port ${settings.app.port}`)
// Setup webhooks.
await this.setupWebhooks()
} catch (ex) {
logger.error("WebServer.init", ex)
process.exit(1)
}
}
/**
* Prepare webhooks with Strava and PayPal.
*/
setupWebhooks = async () => {
let err = null | try {
if (!strava.webhooks.current || strava.webhooks.current.callbackUrl != strava.webhooks.callbackUrl) {
try {
await strava.webhooks.cancelWebhook()
} catch (cancelEx) {
logger.warn("WebServer.setupWebhooks", "Could not cancel the current Strava webhook, will proceed anyways")
}
await strava.webhooks.createWebhook()
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the Strava webhook")
err = ex
}
try {
const webhooks = await paypal.webhooks.getWebhooks()
// PayPal webhooks are disabled on Beta.
if (!settings.beta.enabled) {
const existingWebhook = _.find(webhooks, {url: paypal.webhookUrl})
// No webhooks on PayPal yet? Register one now.
if (!existingWebhook) {
logger.warn("WebServer.setupWebhooks", "No matching webhook (URL) found on PayPal, will register one now")
await paypal.webhooks.createWebhook()
}
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the PayPal webhook")
err = ex
}
if (err) {
logger.error("WebServer.init", `Will retry the webhook setup later`)
const callback = async () => {
await this.setupWebhooks()
}
setTimeout(callback, settings.webhooks.retryInterval)
}
}
// HELPER METHODS
// --------------------------------------------------------------------------
/**
* Render response as JSON data and send to the client.
* @param req The Express request object.
* @param res The Express response object.
* @param data The JSON data to be sent.
* @param status Optional status code, defaults to 200.
* @event renderJson
*/
renderJson = (req: express.Request, res: express.Response, data: any, status?: number) => {
logger.debug("WebServer.renderJson", req.originalUrl, data)
if (_.isString(data)) {
try {
data = JSON.parse(data)
} catch (ex) {
logger.error("WebServer.renderJson", ex)
return this.renderError(req, res, ex)
}
}
// A specific status code was passed?
if (status) {
res.status(status)
}
// Add Access-Control-Allow-Origin if set.
if (settings.app.allowOriginHeader) {
res.setHeader("Access-Control-Allow-Origin", settings.app.allowOriginHeader)
}
// Send JSON response.
res.json(data)
}
/**
* Sends error response as JSON.
* @param req The Express request object.
* @param res The Express response object.
* @param error The error object or message to be sent to the client.
* @param status The response status code, optional, default is 500.
* @event renderError
*/
renderError = (req: express.Request, res: express.Response, error: any, status?: number | string) => {
let message
// Default statuses.
if (status == null) {
status = error.statusCode || error.status || error.code
}
if (status == "ECONNRESET" || status == "ECONNABORTED" || status == "ETIMEDOUT") {
status = 408
}
if (_.isNil(error)) {
error = "Unknown error"
logger.warn("Routes", req.method, req.originalUrl, "Called with empty error")
} else if (["POST", "PUT", "PATCH"].includes(req.method) && req.body) {
logger.error("Routes", req.method, req.originalUrl, error, `Body: ${JSON.stringify(req.body, null, 0)}`)
} else {
logger.error("Routes", req.method, req.originalUrl, error)
}
// Error defaults to 500 if not a valid number.
if (!_.isNumber(status)) {
status = 500
}
try {
// Error inside another .error property?
if (error.error && !error.message && !error.error_description && !error.reason) {
error = error.error
}
if (_.isString(error)) {
message = {message: error}
} else {
message = {}
message.message = error.message || error.error_description || error.description
// No message found? Just use the default .toString() then.
if (!message.message) {
message.message = error.toString()
}
if (error.friendlyMessage) {
message.friendlyMessage = error.friendlyMessage
}
if (error.reason) {
message.reason = error.reason
}
if (error.code) {
message.code = error.code
} else if (error.status) {
message.code = error.status
}
}
} catch (ex) {
logger.error("WebServer.renderError", error, ex)
}
// Send error JSON to client.
res.status(status as number).json(message)
}
}
// Exports...
export = WebServer.Instance | random_line_split |
|
webserver.ts | // Strautomator: WebServer
import {strava, paypal} from "strautomator-core"
import express = require("express")
import _ from "lodash"
import fs = require("fs")
import http = require("http")
import https = require("https")
import logger from "anyhow"
import path = require("path")
const settings = require("setmeup").settings
/**
* Exposes the Express server.
*/
class WebServer {
private constructor() {}
private static _instance: WebServer
static get | (): WebServer {
return this._instance || (this._instance = new this())
}
/**
* The Express app.
*/
app: express.Express
/**
* The underlying HTTP(S) server.
*/
server: http.Server
// INIT
// --------------------------------------------------------------------------
/**
* Init the web server. If the strautomator.key and strautomator.cert files are
* present, listen on HTTPS, otherwise regular HTTP.
*/
init = async (nuxtRender): Promise<void> => {
try {
this.app = express()
let protocol: string
// Check if certificate files are present.
if (fs.existsSync(`./strautomator.cert`) && fs.existsSync(`./strautomator.key`)) {
const cert = fs.readFileSync("./strautomator.cert", "utf8")
const key = fs.readFileSync("./strautomator.key", "utf8")
const options = {
cert: cert,
key: key
}
this.server = https.createServer(options, this.app)
protocol = "HTTPS"
// HTTPS defaults to port 8443.
if (!settings.app.port) {
settings.app.port = 8443
}
} else {
this.server = http.createServer(this.app)
protocol = "HTTP"
// HTTP defaults to port 8080.
if (!settings.app.port) {
settings.app.port = 8080
}
}
// When running behind a proxy / LB.
this.app.set("trust proxy", settings.app.trustProxy)
// Debug enabled? Log all requests.
if (settings.app.debug) {
this.app.use((req: express.Request, _res, next) => {
logger.debug("WebServer", req.method, req.url)
next()
})
}
// Add body parser.
const bodyParser = require("body-parser")
this.app.use(bodyParser.json())
this.app.use((err: Error, req: express.Request, res: express.Response, next) => {
if (err) {
return this.renderError(req, res, err.toString(), 400)
}
next()
})
// Set API rate limiting (if defined on the settings).
if (settings.api.rateLimit && settings.api.rateLimit.max) {
const rateLimitOptions = _.cloneDeep(settings.api.rateLimit)
rateLimitOptions.handler = (req: express.Request, res: express.Response, next, options) => {
try {
if (!res.headersSent) {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limited")
res.status(options.statusCode).send(options.message)
} else {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, `Rate limited, but status sent as ${res.statusCode}`)
}
} catch (ex) {
logger.error("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limit handler failed", ex)
next()
}
}
const rateLimit = require("express-rate-limit")(settings.api.rateLimit)
this.app.use("/api/*", rateLimit)
this.app.use("/api/*", (req, res, next) => {
const reqRateLimit = (req as any).rateLimit
const statusCode = res.statusCode || "not sent"
if (reqRateLimit && [50, 10, 5, 1].includes(reqRateLimit.remaining)) {
logger.warn("Routes", req.method, req.originalUrl, `From ${req.ip}`, `Status ${statusCode}`, `Rate limit remaining: ${reqRateLimit.remaining}`)
}
next()
})
}
// Only accept connections coming via Cloudflare?
if (settings.api.requireCloudflare) {
this.app.use("/api/*", (req, res, next) => {
if (!req.headers["cf-ray"]) {
logger.error("WebServer.requireCloudflare", req.method, req.originalUrl, "Missing CF-Ray header", req.ip)
if (!res.headersSent) {
return this.renderError(req, res, "Access denied", 401)
} else {
return res.end()
}
}
next()
})
logger.info("WebServer.init", "API requests via Cloudflare required")
}
// Load routes.
const routers = fs.readdirSync(`${__dirname}/routes/api`)
for (let r of routers) {
if (r.indexOf(".d.ts") < 0) {
const basename = path.basename(r).split(".")[0]
this.app.use(`/api/${basename}`, require(`./routes/api/${r}`))
}
}
// Use Nuxt render.
this.app.use(nuxtRender)
// Listen the server.
this.server.listen(settings.app.port)
logger.info("WebServer.init", protocol, `Server ready on port ${settings.app.port}`)
// Setup webhooks.
await this.setupWebhooks()
} catch (ex) {
logger.error("WebServer.init", ex)
process.exit(1)
}
}
/**
* Prepare webhooks with Strava and PayPal.
*/
setupWebhooks = async () => {
let err = null
try {
if (!strava.webhooks.current || strava.webhooks.current.callbackUrl != strava.webhooks.callbackUrl) {
try {
await strava.webhooks.cancelWebhook()
} catch (cancelEx) {
logger.warn("WebServer.setupWebhooks", "Could not cancel the current Strava webhook, will proceed anyways")
}
await strava.webhooks.createWebhook()
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the Strava webhook")
err = ex
}
try {
const webhooks = await paypal.webhooks.getWebhooks()
// PayPal webhooks are disabled on Beta.
if (!settings.beta.enabled) {
const existingWebhook = _.find(webhooks, {url: paypal.webhookUrl})
// No webhooks on PayPal yet? Register one now.
if (!existingWebhook) {
logger.warn("WebServer.setupWebhooks", "No matching webhook (URL) found on PayPal, will register one now")
await paypal.webhooks.createWebhook()
}
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the PayPal webhook")
err = ex
}
if (err) {
logger.error("WebServer.init", `Will retry the webhook setup later`)
const callback = async () => {
await this.setupWebhooks()
}
setTimeout(callback, settings.webhooks.retryInterval)
}
}
// HELPER METHODS
// --------------------------------------------------------------------------
/**
* Render response as JSON data and send to the client.
* @param req The Express request object.
* @param res The Express response object.
* @param data The JSON data to be sent.
* @param status Optional status code, defaults to 200.
* @event renderJson
*/
renderJson = (req: express.Request, res: express.Response, data: any, status?: number) => {
logger.debug("WebServer.renderJson", req.originalUrl, data)
if (_.isString(data)) {
try {
data = JSON.parse(data)
} catch (ex) {
logger.error("WebServer.renderJson", ex)
return this.renderError(req, res, ex)
}
}
// A specific status code was passed?
if (status) {
res.status(status)
}
// Add Access-Control-Allow-Origin if set.
if (settings.app.allowOriginHeader) {
res.setHeader("Access-Control-Allow-Origin", settings.app.allowOriginHeader)
}
// Send JSON response.
res.json(data)
}
/**
* Sends error response as JSON.
* @param req The Express request object.
* @param res The Express response object.
* @param error The error object or message to be sent to the client.
* @param status The response status code, optional, default is 500.
* @event renderError
*/
renderError = (req: express.Request, res: express.Response, error: any, status?: number | string) => {
let message
// Default statuses.
if (status == null) {
status = error.statusCode || error.status || error.code
}
if (status == "ECONNRESET" || status == "ECONNABORTED" || status == "ETIMEDOUT") {
status = 408
}
if (_.isNil(error)) {
error = "Unknown error"
logger.warn("Routes", req.method, req.originalUrl, "Called with empty error")
} else if (["POST", "PUT", "PATCH"].includes(req.method) && req.body) {
logger.error("Routes", req.method, req.originalUrl, error, `Body: ${JSON.stringify(req.body, null, 0)}`)
} else {
logger.error("Routes", req.method, req.originalUrl, error)
}
// Error defaults to 500 if not a valid number.
if (!_.isNumber(status)) {
status = 500
}
try {
// Error inside another .error property?
if (error.error && !error.message && !error.error_description && !error.reason) {
error = error.error
}
if (_.isString(error)) {
message = {message: error}
} else {
message = {}
message.message = error.message || error.error_description || error.description
// No message found? Just use the default .toString() then.
if (!message.message) {
message.message = error.toString()
}
if (error.friendlyMessage) {
message.friendlyMessage = error.friendlyMessage
}
if (error.reason) {
message.reason = error.reason
}
if (error.code) {
message.code = error.code
} else if (error.status) {
message.code = error.status
}
}
} catch (ex) {
logger.error("WebServer.renderError", error, ex)
}
// Send error JSON to client.
res.status(status as number).json(message)
}
}
// Exports...
export = WebServer.Instance
| Instance | identifier_name |
webserver.ts | // Strautomator: WebServer
import {strava, paypal} from "strautomator-core"
import express = require("express")
import _ from "lodash"
import fs = require("fs")
import http = require("http")
import https = require("https")
import logger from "anyhow"
import path = require("path")
const settings = require("setmeup").settings
/**
* Exposes the Express server.
*/
class WebServer {
private constructor() {}
private static _instance: WebServer
static get Instance(): WebServer {
return this._instance || (this._instance = new this())
}
/**
* The Express app.
*/
app: express.Express
/**
* The underlying HTTP(S) server.
*/
server: http.Server
// INIT
// --------------------------------------------------------------------------
/**
* Init the web server. If the strautomator.key and strautomator.cert files are
* present, listen on HTTPS, otherwise regular HTTP.
*/
init = async (nuxtRender): Promise<void> => {
try {
this.app = express()
let protocol: string
// Check if certificate files are present.
if (fs.existsSync(`./strautomator.cert`) && fs.existsSync(`./strautomator.key`)) {
const cert = fs.readFileSync("./strautomator.cert", "utf8")
const key = fs.readFileSync("./strautomator.key", "utf8")
const options = {
cert: cert,
key: key
}
this.server = https.createServer(options, this.app)
protocol = "HTTPS"
// HTTPS defaults to port 8443.
if (!settings.app.port) {
settings.app.port = 8443
}
} else {
this.server = http.createServer(this.app)
protocol = "HTTP"
// HTTP defaults to port 8080.
if (!settings.app.port) {
settings.app.port = 8080
}
}
// When running behind a proxy / LB.
this.app.set("trust proxy", settings.app.trustProxy)
// Debug enabled? Log all requests.
if (settings.app.debug) {
this.app.use((req: express.Request, _res, next) => {
logger.debug("WebServer", req.method, req.url)
next()
})
}
// Add body parser.
const bodyParser = require("body-parser")
this.app.use(bodyParser.json())
this.app.use((err: Error, req: express.Request, res: express.Response, next) => {
if (err) {
return this.renderError(req, res, err.toString(), 400)
}
next()
})
// Set API rate limiting (if defined on the settings).
if (settings.api.rateLimit && settings.api.rateLimit.max) {
const rateLimitOptions = _.cloneDeep(settings.api.rateLimit)
rateLimitOptions.handler = (req: express.Request, res: express.Response, next, options) => {
try {
if (!res.headersSent) {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limited")
res.status(options.statusCode).send(options.message)
} else {
logger.warn("Routes", req.method, req.originalUrl, `From: ${req.ip}`, `Rate limited, but status sent as ${res.statusCode}`)
}
} catch (ex) {
logger.error("Routes", req.method, req.originalUrl, `From: ${req.ip}`, "Rate limit handler failed", ex)
next()
}
}
const rateLimit = require("express-rate-limit")(settings.api.rateLimit)
this.app.use("/api/*", rateLimit)
this.app.use("/api/*", (req, res, next) => {
const reqRateLimit = (req as any).rateLimit
const statusCode = res.statusCode || "not sent"
if (reqRateLimit && [50, 10, 5, 1].includes(reqRateLimit.remaining)) {
logger.warn("Routes", req.method, req.originalUrl, `From ${req.ip}`, `Status ${statusCode}`, `Rate limit remaining: ${reqRateLimit.remaining}`)
}
next()
})
}
// Only accept connections coming via Cloudflare?
if (settings.api.requireCloudflare) {
this.app.use("/api/*", (req, res, next) => {
if (!req.headers["cf-ray"]) {
logger.error("WebServer.requireCloudflare", req.method, req.originalUrl, "Missing CF-Ray header", req.ip)
if (!res.headersSent) {
return this.renderError(req, res, "Access denied", 401)
} else {
return res.end()
}
}
next()
})
logger.info("WebServer.init", "API requests via Cloudflare required")
}
// Load routes.
const routers = fs.readdirSync(`${__dirname}/routes/api`)
for (let r of routers) {
if (r.indexOf(".d.ts") < 0) {
const basename = path.basename(r).split(".")[0]
this.app.use(`/api/${basename}`, require(`./routes/api/${r}`))
}
}
// Use Nuxt render.
this.app.use(nuxtRender)
// Listen the server.
this.server.listen(settings.app.port)
logger.info("WebServer.init", protocol, `Server ready on port ${settings.app.port}`)
// Setup webhooks.
await this.setupWebhooks()
} catch (ex) {
logger.error("WebServer.init", ex)
process.exit(1)
}
}
/**
* Prepare webhooks with Strava and PayPal.
*/
setupWebhooks = async () => {
let err = null
try {
if (!strava.webhooks.current || strava.webhooks.current.callbackUrl != strava.webhooks.callbackUrl) {
try {
await strava.webhooks.cancelWebhook()
} catch (cancelEx) {
logger.warn("WebServer.setupWebhooks", "Could not cancel the current Strava webhook, will proceed anyways")
}
await strava.webhooks.createWebhook()
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the Strava webhook")
err = ex
}
try {
const webhooks = await paypal.webhooks.getWebhooks()
// PayPal webhooks are disabled on Beta.
if (!settings.beta.enabled) {
const existingWebhook = _.find(webhooks, {url: paypal.webhookUrl})
// No webhooks on PayPal yet? Register one now.
if (!existingWebhook) {
logger.warn("WebServer.setupWebhooks", "No matching webhook (URL) found on PayPal, will register one now")
await paypal.webhooks.createWebhook()
}
}
} catch (ex) {
logger.error("WebServer.setupWebhooks", "Could not setup the PayPal webhook")
err = ex
}
if (err) {
logger.error("WebServer.init", `Will retry the webhook setup later`)
const callback = async () => {
await this.setupWebhooks()
}
setTimeout(callback, settings.webhooks.retryInterval)
}
}
// HELPER METHODS
// --------------------------------------------------------------------------
/**
* Render response as JSON data and send to the client.
* @param req The Express request object.
* @param res The Express response object.
* @param data The JSON data to be sent.
* @param status Optional status code, defaults to 200.
* @event renderJson
*/
renderJson = (req: express.Request, res: express.Response, data: any, status?: number) => {
logger.debug("WebServer.renderJson", req.originalUrl, data)
if (_.isString(data)) {
try {
data = JSON.parse(data)
} catch (ex) {
logger.error("WebServer.renderJson", ex)
return this.renderError(req, res, ex)
}
}
// A specific status code was passed?
if (status) {
res.status(status)
}
// Add Access-Control-Allow-Origin if set.
if (settings.app.allowOriginHeader) {
res.setHeader("Access-Control-Allow-Origin", settings.app.allowOriginHeader)
}
// Send JSON response.
res.json(data)
}
/**
* Sends error response as JSON.
* @param req The Express request object.
* @param res The Express response object.
* @param error The error object or message to be sent to the client.
* @param status The response status code, optional, default is 500.
* @event renderError
*/
renderError = (req: express.Request, res: express.Response, error: any, status?: number | string) => {
let message
// Default statuses.
if (status == null) {
status = error.statusCode || error.status || error.code
}
if (status == "ECONNRESET" || status == "ECONNABORTED" || status == "ETIMEDOUT") {
status = 408
}
if (_.isNil(error)) {
error = "Unknown error"
logger.warn("Routes", req.method, req.originalUrl, "Called with empty error")
} else if (["POST", "PUT", "PATCH"].includes(req.method) && req.body) {
logger.error("Routes", req.method, req.originalUrl, error, `Body: ${JSON.stringify(req.body, null, 0)}`)
} else {
logger.error("Routes", req.method, req.originalUrl, error)
}
// Error defaults to 500 if not a valid number.
if (!_.isNumber(status)) |
try {
// Error inside another .error property?
if (error.error && !error.message && !error.error_description && !error.reason) {
error = error.error
}
if (_.isString(error)) {
message = {message: error}
} else {
message = {}
message.message = error.message || error.error_description || error.description
// No message found? Just use the default .toString() then.
if (!message.message) {
message.message = error.toString()
}
if (error.friendlyMessage) {
message.friendlyMessage = error.friendlyMessage
}
if (error.reason) {
message.reason = error.reason
}
if (error.code) {
message.code = error.code
} else if (error.status) {
message.code = error.status
}
}
} catch (ex) {
logger.error("WebServer.renderError", error, ex)
}
// Send error JSON to client.
res.status(status as number).json(message)
}
}
// Exports...
export = WebServer.Instance
| {
status = 500
} | conditional_block |
app.js | "use strict";
function _typeof(obj) |
var app;
app = angular.module('app', ['angularMoment', 'ngCookies', 'ngStorage', 'angular-loading-bar', 'ui.bootstrap', 'ngContextMenu', 'ngSidebarJS', 'ng.ckeditor', 'ng.multicombo'], ["$locationProvider", "$sceProvider", function ($locationProvider, $sceProvider) {
$locationProvider.html5Mode(true);
}]); //$sceProvider.enabled(false);
// Completely disable SCE. For demonstration purposes only!
// Do not use in new projects or libraries.
//$sceProvider.enabled(false);
app.config(['$localStorageProvider', function ($localStorageProvider) {
var dateObjPrefix, deserializer, isDate, parseISOString, serializer; // var serializer = angular.toJson;
// var deserializer = angular.fromJson;
dateObjPrefix = 'ngStorage-Type-[object Date]:';
serializer = function serializer(v) {
if (isDate(v)) {
return dateObjPrefix + v.getTime();
} else {
return angular.toJson(v);
}
};
deserializer = function deserializer(v) {
if (v.startsWith(dateObjPrefix)) {
return parseISOString(v);
} else {
return angular.fromJson(v);
}
};
isDate = function isDate(date) {
return date && Object.prototype.toString.call(date) === '[object Date]' && !isNaN(date);
};
parseISOString = function parseISOString(s) {
var b;
b = s.replace(dateObjPrefix, '').replace('"', '').split(/\D+/);
return new Date(parseInt(b[0]));
}; //return (console.log("parse:" + new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6])), new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6]));
$localStorageProvider.setSerializer(serializer);
$localStorageProvider.setDeserializer(deserializer);
}]);
app.constant('_', window._).run(["$rootScope", function ($rootScope) {
$rootScope._ = window._;
}]);
app.constant('jQuery', window.jQuery).run(["$rootScope", function ($rootScope) {
$rootScope.jQuery = window.jQuery;
}]);
app.run(["amMoment", function (amMoment) {
amMoment.changeLocale('au');
}]);
app.directive('upload', ['$http', function ($http) {
return {
restrict: 'E',
replace: true,
scope: {},
require: '?ngModel',
template: '<div class="asset-upload">Drag files here to upload</div>',
link: function link(scope, element, attrs, ngModel) {}
};
}]); // Code goes here
app.directive('ngConfirmClick', [function () {
return {
link: function link(scope, element, attr) {
var clickAction, msg;
msg = attr.ngConfirmClick || 'Are you sure?';
clickAction = attr.confirmedClick;
element.bind('click', function (event) {
if (window.confirm(msg)) {
scope.$eval(clickAction);
}
});
}
};
}]);
app.directive('ngEnter', function () {
return function (scope, element, attrs) {
element.bind('keydown keypress', function (event) {
if (event.which === 13) {
scope.$apply(function () {
scope.$eval(attrs.ngEnter, {
'event': event
});
});
event.preventDefault();
}
});
};
}); // app.directive('ngContextMenu', ['$document', '$window', function ($document, $window) {
// // Runs during compile
// return {
// restrict: 'A',
// link: function ($scope, $element, $attr) {
// var contextMenuElm,
// $contextMenuElm,
// windowWidth = window.innerWidth,
// windowHeight = window.innerHeight,
// contextMenuWidth,
// contextMenuHeight,
// contextMenuLeftPos = 0,
// contextMenuTopPos = 0,
// $w = $($window),
// caretClass = {
// topRight: 'context-caret-top-right',
// topLeft: 'context-caret-top-left',
// bottomRight: 'context-caret-bottom-right',
// bottomLeft: 'context-caret-bottom-left'
// },
// menuItems = $attr.menuItems;
// function createContextMenu() {
// var fragment = document.createDocumentFragment();
// contextMenuElm = document.createElement('ul'),
// $contextMenuElm = $(contextMenuElm);
// contextMenuElm.setAttribute('id', 'context-menu');
// contextMenuElm.setAttribute('class', 'custom-context-menu');
// mountContextMenu($scope[menuItems], fragment);
// contextMenuElm.appendChild(fragment);
// document.body.appendChild(contextMenuElm);
// contextMenuWidth = $contextMenuElm.outerWidth(true);
// contextMenuHeight = $contextMenuElm.outerHeight(true);
// }
// function mountContextMenu(menuItems, fragment) {
// menuItems.forEach(function (_item) {
// var li = document.createElement('li');
// li.innerHTML = '<a>' + _item.label + ' <span class="right-caret"></span></a>';
// if (_item.action && _item.active) {
// li.addEventListener('click', function () {
// if (typeof $scope[_item.action] isnt 'function') return false;
// $scope[_item.action]($attr, $scope);
// }, false);
// }
// if (_item.divider) {
// addContextMenuDivider(fragment);
// }
// if (!_item.active) li.setAttribute('class', 'disabled');
// if (_item.subItems) {
// addSubmenuItems(_item.subItems, li, _item.active)
// }
// fragment.appendChild(li);
// });
// }
// function addSubmenuItems(subItems, parentLi, parentIsActive) {
// parentLi.setAttribute('class', 'dropdown-submenu')
// if (!parentIsActive) parentLi.setAttribute('class', 'disabled')
// var ul = document.createElement('ul')
// ul.setAttribute('class', 'dropdown-menu')
// mountContextMenu(subItems, ul)
// parentLi.appendChild(ul)
// }
// function addContextMenuDivider(fragment) {
// var divider = document.createElement('li');
// divider.className = 'divider'
// fragment.appendChild(divider);
// }
// /**
// * Removing context menu DOM from page.
// * @return {[type]} [description]
// */
// function removeContextMenu() {
// $('.custom-context-menu').remove();
// }
// /**
// * Apply new css class for right positioning.
// * @param {[type]} cssClass [description]
// * @return {[type]} [description]
// */
// function updateCssClass(cssClass) {
// $contextMenuElm.attr('class', 'custom-context-menu');
// $contextMenuElm.addClass(cssClass);
// }
// /**
// * [setMenuPosition description]
// * @param {[type]} e [event arg for finding clicked position]
// * @param {[type]} leftPos [if menu has to be pointed to any pre-fixed element like caret or corner of box.]
// * @param {[type]} topPos [as above but top]
// */
// function setMenuPosition(e, leftPos, topPos) {
// contextMenuLeftPos = leftPos || e.pageX;
// contextMenuTopPos = topPos - $w.scrollTop() || e.pageY - $w.scrollTop();
// if (window.innerWidth - contextMenuLeftPos < contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.topRight);
// } else if (window.innerWidth - contextMenuLeftPos > contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// updateCssClass(caretClass.topLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos > contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// updateCssClass(caretClass.bottomLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos < contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.bottomRight);
// }
// $contextMenuElm.css({
// left: contextMenuLeftPos,
// top: contextMenuTopPos
// }).addClass('context-caret shown');
// }
// /**
// * CONTEXT MENU
// * @param {[type]} evt [description]
// * @return {[type]} [description]
// */
// $element.on('contextmenu.dirContextMenu', function (evt) {
// evt.preventDefault();
// removeContextMenu();
// createContextMenu();
// /**
// * If pointer node has specified, let the context menu
// * apprear right below to that elem no matter
// * where user clicks within that element.
// */
// if ($attr.pointerNode) {
// var $pointer = $(this).find($attr.pointerNode);
// contextMenuLeftPos = $pointer.offset().left + ($pointer.outerWidth(true) / 2);
// contextMenuTopPos = $pointer.offset().top + $pointer.outerHeight(true);
// setMenuPosition(evt, contextMenuLeftPos, contextMenuTopPos);
// } else {
// setMenuPosition(evt);
// }
// $w.on('keydown.dirContextMenu', function (e) {
// if (e.keyCode is 27) {
// removeContextMenu();
// }
// })
// }); //END (on)click.dirContextMenu
// $document.off('click.dirContextMenu').on('click.dirContextMenu', function (e) {
// if (!$(e.target).is('.custom-context-menu') && !$(e.target).parents().is('.custom-context-menu')) {
// removeContextMenu();
// }
// });
// $w.off('scroll.dirContextMenu').on('scroll.dirContextMenu', function () {
// removeContextMenu();
// });
// $w.on('resize.dirContextMenu', function () {
// windowWidth = window.innerWidth;
// windowHeight = window.innerHeight;
// removeContextMenu();
// });
// }
// }
// }]);
app.filter('toDate', ["moment", function (moment) {
return function (input) {
input = input || '';
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return moment(input).toDate();
} else {
return null;
}
};
}]);
app.filter('dispNA', function () {
return function (input) {
var prop;
input = input || '';
if (_typeof(input) === 'object') {
for (prop in input) {
if (ad.hasOwnProperty(prop)) {
return input;
}
}
} else if (input !== '') {
return input;
}
return 'N/A';
};
});
app.filter('dispNil', function () {
return function (input) {
input = input || '';
if (input === '' || input === void 0 || input === null) {
return null;
}
if (typeof input === 'string' && (input.indexOf('1975-01-01') > -1 || input.indexOf('157737600000') > -1)) {
return null;
}
if (_typeof(input) === 'object') {
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return input;
} else {
return null;
}
} else if (input !== '') {
return input;
}
return '';
};
});
app.filter('strReplace', function () {
return function (input, from, to) {
input = input || '';
from = from || '';
to = to || '';
return input.replace(new RegExp(from, 'g'), to);
};
});
app.filter('range', function () {
return function (input, total) {
var i;
total = parseInt(total);
i = 0;
while (i < total) {
input.push(i);
i++;
}
return input;
};
});
app.hex_to_ascii = function (str1) {
var hex, n, str;
hex = str1.toString();
str = '';
n = 0;
while (n < hex.length) {
str += String.fromCharCode(parseInt(hex.substr(n, 2), 16));
n += 2;
}
return str;
};
/*
Custom functions
*/
angular.isEmpty = function (val) {
return angular.isUndefined(val) || val === null || val === '';
};
angular.isDateValid = function (date) {
if (date === void 0 || date === null) {
return false;
}
if (!moment(date).isValid()) {
return;
}
if (moment(date) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return true;
}
return false;
};
angular.extractPath = function (path) {
var ext, withoutExt;
ext = path.split('.').length > 1 ? path.substring(path.lastIndexOf('.'), path.length) : '';
withoutExt = path.split('.').slice(0, -1).join('.');
return {
org: path,
nameWithoutExt: withoutExt,
extension: ext
};
};
if (!String.prototype.format) {
String.prototype.format = function () {
var args;
args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
if (typeof args[number] !== 'undefined') {
return args[number];
} else {
return match;
}
});
};
}
//# sourceMappingURL=../../maps/app/modules/app.js.map
| { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); } | identifier_body |
app.js | "use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
var app;
app = angular.module('app', ['angularMoment', 'ngCookies', 'ngStorage', 'angular-loading-bar', 'ui.bootstrap', 'ngContextMenu', 'ngSidebarJS', 'ng.ckeditor', 'ng.multicombo'], ["$locationProvider", "$sceProvider", function ($locationProvider, $sceProvider) {
$locationProvider.html5Mode(true);
}]); //$sceProvider.enabled(false);
// Completely disable SCE. For demonstration purposes only!
// Do not use in new projects or libraries.
//$sceProvider.enabled(false);
app.config(['$localStorageProvider', function ($localStorageProvider) {
var dateObjPrefix, deserializer, isDate, parseISOString, serializer; // var serializer = angular.toJson;
// var deserializer = angular.fromJson;
dateObjPrefix = 'ngStorage-Type-[object Date]:';
serializer = function serializer(v) {
if (isDate(v)) {
return dateObjPrefix + v.getTime();
} else {
return angular.toJson(v);
}
};
deserializer = function deserializer(v) {
if (v.startsWith(dateObjPrefix)) {
return parseISOString(v);
} else {
return angular.fromJson(v);
}
};
isDate = function isDate(date) {
return date && Object.prototype.toString.call(date) === '[object Date]' && !isNaN(date);
};
parseISOString = function parseISOString(s) {
var b;
b = s.replace(dateObjPrefix, '').replace('"', '').split(/\D+/);
return new Date(parseInt(b[0]));
}; //return (console.log("parse:" + new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6])), new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6]));
$localStorageProvider.setSerializer(serializer);
$localStorageProvider.setDeserializer(deserializer);
}]);
app.constant('_', window._).run(["$rootScope", function ($rootScope) {
$rootScope._ = window._;
}]);
app.constant('jQuery', window.jQuery).run(["$rootScope", function ($rootScope) {
$rootScope.jQuery = window.jQuery;
}]);
app.run(["amMoment", function (amMoment) {
amMoment.changeLocale('au');
}]);
app.directive('upload', ['$http', function ($http) {
return {
restrict: 'E',
replace: true,
scope: {},
require: '?ngModel',
template: '<div class="asset-upload">Drag files here to upload</div>',
link: function link(scope, element, attrs, ngModel) {}
};
}]); // Code goes here
app.directive('ngConfirmClick', [function () {
return {
link: function link(scope, element, attr) {
var clickAction, msg;
msg = attr.ngConfirmClick || 'Are you sure?';
clickAction = attr.confirmedClick;
element.bind('click', function (event) {
if (window.confirm(msg)) {
scope.$eval(clickAction);
}
});
}
};
}]);
app.directive('ngEnter', function () {
return function (scope, element, attrs) {
element.bind('keydown keypress', function (event) {
if (event.which === 13) {
scope.$apply(function () {
scope.$eval(attrs.ngEnter, {
'event': event
});
});
event.preventDefault();
}
});
};
}); // app.directive('ngContextMenu', ['$document', '$window', function ($document, $window) {
// // Runs during compile
// return {
// restrict: 'A',
// link: function ($scope, $element, $attr) {
// var contextMenuElm,
// $contextMenuElm,
// windowWidth = window.innerWidth,
// windowHeight = window.innerHeight,
// contextMenuWidth,
// contextMenuHeight,
// contextMenuLeftPos = 0,
// contextMenuTopPos = 0,
// $w = $($window),
// caretClass = {
// topRight: 'context-caret-top-right',
// topLeft: 'context-caret-top-left',
// bottomRight: 'context-caret-bottom-right',
// bottomLeft: 'context-caret-bottom-left'
// },
// menuItems = $attr.menuItems;
// function createContextMenu() {
// var fragment = document.createDocumentFragment();
// contextMenuElm = document.createElement('ul'),
// $contextMenuElm = $(contextMenuElm);
// contextMenuElm.setAttribute('id', 'context-menu');
// contextMenuElm.setAttribute('class', 'custom-context-menu');
// mountContextMenu($scope[menuItems], fragment);
// contextMenuElm.appendChild(fragment);
// document.body.appendChild(contextMenuElm);
// contextMenuWidth = $contextMenuElm.outerWidth(true);
// contextMenuHeight = $contextMenuElm.outerHeight(true);
// }
// function mountContextMenu(menuItems, fragment) {
// menuItems.forEach(function (_item) {
// var li = document.createElement('li');
// li.innerHTML = '<a>' + _item.label + ' <span class="right-caret"></span></a>';
// if (_item.action && _item.active) {
// li.addEventListener('click', function () {
// if (typeof $scope[_item.action] isnt 'function') return false;
// $scope[_item.action]($attr, $scope);
// }, false);
// }
// if (_item.divider) {
// addContextMenuDivider(fragment);
// }
// if (!_item.active) li.setAttribute('class', 'disabled');
// if (_item.subItems) {
// addSubmenuItems(_item.subItems, li, _item.active)
// }
// fragment.appendChild(li);
// });
// }
// function addSubmenuItems(subItems, parentLi, parentIsActive) {
// parentLi.setAttribute('class', 'dropdown-submenu')
// if (!parentIsActive) parentLi.setAttribute('class', 'disabled')
// var ul = document.createElement('ul')
// ul.setAttribute('class', 'dropdown-menu')
// mountContextMenu(subItems, ul)
// parentLi.appendChild(ul)
// }
// function addContextMenuDivider(fragment) {
// var divider = document.createElement('li');
// divider.className = 'divider'
// fragment.appendChild(divider);
// }
// /**
// * Removing context menu DOM from page.
// * @return {[type]} [description]
// */
// function removeContextMenu() {
// $('.custom-context-menu').remove();
// }
// /**
// * Apply new css class for right positioning.
// * @param {[type]} cssClass [description]
// * @return {[type]} [description]
// */
// function updateCssClass(cssClass) {
// $contextMenuElm.attr('class', 'custom-context-menu');
// $contextMenuElm.addClass(cssClass);
// }
// /**
// * [setMenuPosition description]
// * @param {[type]} e [event arg for finding clicked position]
// * @param {[type]} leftPos [if menu has to be pointed to any pre-fixed element like caret or corner of box.]
// * @param {[type]} topPos [as above but top]
// */
// function setMenuPosition(e, leftPos, topPos) {
// contextMenuLeftPos = leftPos || e.pageX;
// contextMenuTopPos = topPos - $w.scrollTop() || e.pageY - $w.scrollTop();
// if (window.innerWidth - contextMenuLeftPos < contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.topRight);
// } else if (window.innerWidth - contextMenuLeftPos > contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// updateCssClass(caretClass.topLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos > contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// updateCssClass(caretClass.bottomLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos < contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.bottomRight);
// }
// $contextMenuElm.css({
// left: contextMenuLeftPos,
// top: contextMenuTopPos
// }).addClass('context-caret shown');
// }
// /**
// * CONTEXT MENU
// * @param {[type]} evt [description]
// * @return {[type]} [description]
// */
// $element.on('contextmenu.dirContextMenu', function (evt) {
// evt.preventDefault();
// removeContextMenu();
// createContextMenu();
// /**
// * If pointer node has specified, let the context menu
// * apprear right below to that elem no matter
// * where user clicks within that element.
// */
// if ($attr.pointerNode) {
// var $pointer = $(this).find($attr.pointerNode);
// contextMenuLeftPos = $pointer.offset().left + ($pointer.outerWidth(true) / 2);
// contextMenuTopPos = $pointer.offset().top + $pointer.outerHeight(true);
// setMenuPosition(evt, contextMenuLeftPos, contextMenuTopPos);
// } else {
// setMenuPosition(evt);
// }
// $w.on('keydown.dirContextMenu', function (e) {
// if (e.keyCode is 27) {
// removeContextMenu();
// }
// })
// }); //END (on)click.dirContextMenu
// $document.off('click.dirContextMenu').on('click.dirContextMenu', function (e) {
// if (!$(e.target).is('.custom-context-menu') && !$(e.target).parents().is('.custom-context-menu')) {
// removeContextMenu();
// }
// });
// $w.off('scroll.dirContextMenu').on('scroll.dirContextMenu', function () {
// removeContextMenu();
// });
// $w.on('resize.dirContextMenu', function () {
// windowWidth = window.innerWidth;
// windowHeight = window.innerHeight;
// removeContextMenu();
// });
// }
// }
// }]);
app.filter('toDate', ["moment", function (moment) {
return function (input) {
input = input || '';
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return moment(input).toDate();
} else {
return null;
}
};
}]);
app.filter('dispNA', function () {
return function (input) {
var prop;
input = input || '';
if (_typeof(input) === 'object') {
for (prop in input) {
if (ad.hasOwnProperty(prop)) {
return input;
}
}
} else if (input !== '') {
return input;
}
return 'N/A';
};
});
app.filter('dispNil', function () {
return function (input) {
input = input || '';
if (input === '' || input === void 0 || input === null) {
return null;
}
if (typeof input === 'string' && (input.indexOf('1975-01-01') > -1 || input.indexOf('157737600000') > -1)) {
return null;
}
if (_typeof(input) === 'object') {
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return input;
} else {
return null;
}
} else if (input !== '') {
return input;
}
return '';
};
});
app.filter('strReplace', function () {
return function (input, from, to) {
input = input || '';
from = from || '';
to = to || '';
return input.replace(new RegExp(from, 'g'), to);
};
});
app.filter('range', function () {
return function (input, total) {
var i;
total = parseInt(total);
i = 0;
while (i < total) {
input.push(i);
i++;
}
return input;
};
});
app.hex_to_ascii = function (str1) {
var hex, n, str;
hex = str1.toString();
str = '';
n = 0;
while (n < hex.length) |
return str;
};
/*
Custom functions
*/
angular.isEmpty = function (val) {
return angular.isUndefined(val) || val === null || val === '';
};
angular.isDateValid = function (date) {
if (date === void 0 || date === null) {
return false;
}
if (!moment(date).isValid()) {
return;
}
if (moment(date) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return true;
}
return false;
};
angular.extractPath = function (path) {
var ext, withoutExt;
ext = path.split('.').length > 1 ? path.substring(path.lastIndexOf('.'), path.length) : '';
withoutExt = path.split('.').slice(0, -1).join('.');
return {
org: path,
nameWithoutExt: withoutExt,
extension: ext
};
};
if (!String.prototype.format) {
String.prototype.format = function () {
var args;
args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
if (typeof args[number] !== 'undefined') {
return args[number];
} else {
return match;
}
});
};
}
//# sourceMappingURL=../../maps/app/modules/app.js.map
| {
str += String.fromCharCode(parseInt(hex.substr(n, 2), 16));
n += 2;
} | conditional_block |
app.js | "use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
var app;
app = angular.module('app', ['angularMoment', 'ngCookies', 'ngStorage', 'angular-loading-bar', 'ui.bootstrap', 'ngContextMenu', 'ngSidebarJS', 'ng.ckeditor', 'ng.multicombo'], ["$locationProvider", "$sceProvider", function ($locationProvider, $sceProvider) {
$locationProvider.html5Mode(true);
}]); //$sceProvider.enabled(false);
// Completely disable SCE. For demonstration purposes only!
// Do not use in new projects or libraries.
//$sceProvider.enabled(false);
app.config(['$localStorageProvider', function ($localStorageProvider) {
var dateObjPrefix, deserializer, isDate, parseISOString, serializer; // var serializer = angular.toJson;
// var deserializer = angular.fromJson;
dateObjPrefix = 'ngStorage-Type-[object Date]:';
serializer = function serializer(v) {
if (isDate(v)) {
return dateObjPrefix + v.getTime();
} else {
return angular.toJson(v);
}
};
deserializer = function deserializer(v) {
if (v.startsWith(dateObjPrefix)) {
return parseISOString(v);
} else {
return angular.fromJson(v);
}
};
isDate = function isDate(date) {
return date && Object.prototype.toString.call(date) === '[object Date]' && !isNaN(date);
};
parseISOString = function parseISOString(s) {
var b;
b = s.replace(dateObjPrefix, '').replace('"', '').split(/\D+/);
return new Date(parseInt(b[0]));
}; //return (console.log("parse:" + new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6])), new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6]));
$localStorageProvider.setSerializer(serializer);
$localStorageProvider.setDeserializer(deserializer);
}]);
app.constant('_', window._).run(["$rootScope", function ($rootScope) {
$rootScope._ = window._;
}]);
app.constant('jQuery', window.jQuery).run(["$rootScope", function ($rootScope) {
$rootScope.jQuery = window.jQuery;
}]);
app.run(["amMoment", function (amMoment) {
amMoment.changeLocale('au');
}]);
app.directive('upload', ['$http', function ($http) {
return {
restrict: 'E',
replace: true,
scope: {},
require: '?ngModel',
template: '<div class="asset-upload">Drag files here to upload</div>',
link: function link(scope, element, attrs, ngModel) {}
};
}]); // Code goes here
app.directive('ngConfirmClick', [function () {
return {
link: function link(scope, element, attr) {
var clickAction, msg;
msg = attr.ngConfirmClick || 'Are you sure?';
clickAction = attr.confirmedClick;
element.bind('click', function (event) {
if (window.confirm(msg)) {
scope.$eval(clickAction);
}
});
}
};
}]);
app.directive('ngEnter', function () {
return function (scope, element, attrs) {
element.bind('keydown keypress', function (event) {
if (event.which === 13) {
scope.$apply(function () {
scope.$eval(attrs.ngEnter, {
'event': event
});
});
event.preventDefault();
}
});
};
}); // app.directive('ngContextMenu', ['$document', '$window', function ($document, $window) {
// // Runs during compile
// return {
// restrict: 'A',
// link: function ($scope, $element, $attr) {
// var contextMenuElm,
// $contextMenuElm,
// windowWidth = window.innerWidth,
// windowHeight = window.innerHeight,
// contextMenuWidth,
// contextMenuHeight,
// contextMenuLeftPos = 0,
// contextMenuTopPos = 0,
// $w = $($window),
// caretClass = {
// topRight: 'context-caret-top-right',
// topLeft: 'context-caret-top-left',
// bottomRight: 'context-caret-bottom-right',
// bottomLeft: 'context-caret-bottom-left'
// },
// menuItems = $attr.menuItems;
// function createContextMenu() {
// var fragment = document.createDocumentFragment();
// contextMenuElm = document.createElement('ul'),
// $contextMenuElm = $(contextMenuElm);
// contextMenuElm.setAttribute('id', 'context-menu');
// contextMenuElm.setAttribute('class', 'custom-context-menu');
// mountContextMenu($scope[menuItems], fragment);
// contextMenuElm.appendChild(fragment);
// document.body.appendChild(contextMenuElm);
// contextMenuWidth = $contextMenuElm.outerWidth(true);
// contextMenuHeight = $contextMenuElm.outerHeight(true);
// }
// function mountContextMenu(menuItems, fragment) {
// menuItems.forEach(function (_item) {
// var li = document.createElement('li');
// li.innerHTML = '<a>' + _item.label + ' <span class="right-caret"></span></a>';
// if (_item.action && _item.active) {
// li.addEventListener('click', function () {
// if (typeof $scope[_item.action] isnt 'function') return false;
// $scope[_item.action]($attr, $scope);
// }, false);
// }
// if (_item.divider) {
// addContextMenuDivider(fragment);
// }
// if (!_item.active) li.setAttribute('class', 'disabled');
// if (_item.subItems) {
// addSubmenuItems(_item.subItems, li, _item.active)
// }
// fragment.appendChild(li);
// });
// }
// function addSubmenuItems(subItems, parentLi, parentIsActive) {
// parentLi.setAttribute('class', 'dropdown-submenu')
// if (!parentIsActive) parentLi.setAttribute('class', 'disabled')
// var ul = document.createElement('ul')
// ul.setAttribute('class', 'dropdown-menu')
// mountContextMenu(subItems, ul)
// parentLi.appendChild(ul)
// }
// function addContextMenuDivider(fragment) {
// var divider = document.createElement('li'); | // * Removing context menu DOM from page.
// * @return {[type]} [description]
// */
// function removeContextMenu() {
// $('.custom-context-menu').remove();
// }
// /**
// * Apply new css class for right positioning.
// * @param {[type]} cssClass [description]
// * @return {[type]} [description]
// */
// function updateCssClass(cssClass) {
// $contextMenuElm.attr('class', 'custom-context-menu');
// $contextMenuElm.addClass(cssClass);
// }
// /**
// * [setMenuPosition description]
// * @param {[type]} e [event arg for finding clicked position]
// * @param {[type]} leftPos [if menu has to be pointed to any pre-fixed element like caret or corner of box.]
// * @param {[type]} topPos [as above but top]
// */
// function setMenuPosition(e, leftPos, topPos) {
// contextMenuLeftPos = leftPos || e.pageX;
// contextMenuTopPos = topPos - $w.scrollTop() || e.pageY - $w.scrollTop();
// if (window.innerWidth - contextMenuLeftPos < contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.topRight);
// } else if (window.innerWidth - contextMenuLeftPos > contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// updateCssClass(caretClass.topLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos > contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// updateCssClass(caretClass.bottomLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos < contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.bottomRight);
// }
// $contextMenuElm.css({
// left: contextMenuLeftPos,
// top: contextMenuTopPos
// }).addClass('context-caret shown');
// }
// /**
// * CONTEXT MENU
// * @param {[type]} evt [description]
// * @return {[type]} [description]
// */
// $element.on('contextmenu.dirContextMenu', function (evt) {
// evt.preventDefault();
// removeContextMenu();
// createContextMenu();
// /**
// * If pointer node has specified, let the context menu
// * apprear right below to that elem no matter
// * where user clicks within that element.
// */
// if ($attr.pointerNode) {
// var $pointer = $(this).find($attr.pointerNode);
// contextMenuLeftPos = $pointer.offset().left + ($pointer.outerWidth(true) / 2);
// contextMenuTopPos = $pointer.offset().top + $pointer.outerHeight(true);
// setMenuPosition(evt, contextMenuLeftPos, contextMenuTopPos);
// } else {
// setMenuPosition(evt);
// }
// $w.on('keydown.dirContextMenu', function (e) {
// if (e.keyCode is 27) {
// removeContextMenu();
// }
// })
// }); //END (on)click.dirContextMenu
// $document.off('click.dirContextMenu').on('click.dirContextMenu', function (e) {
// if (!$(e.target).is('.custom-context-menu') && !$(e.target).parents().is('.custom-context-menu')) {
// removeContextMenu();
// }
// });
// $w.off('scroll.dirContextMenu').on('scroll.dirContextMenu', function () {
// removeContextMenu();
// });
// $w.on('resize.dirContextMenu', function () {
// windowWidth = window.innerWidth;
// windowHeight = window.innerHeight;
// removeContextMenu();
// });
// }
// }
// }]);
app.filter('toDate', ["moment", function (moment) {
return function (input) {
input = input || '';
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return moment(input).toDate();
} else {
return null;
}
};
}]);
app.filter('dispNA', function () {
return function (input) {
var prop;
input = input || '';
if (_typeof(input) === 'object') {
for (prop in input) {
if (ad.hasOwnProperty(prop)) {
return input;
}
}
} else if (input !== '') {
return input;
}
return 'N/A';
};
});
app.filter('dispNil', function () {
return function (input) {
input = input || '';
if (input === '' || input === void 0 || input === null) {
return null;
}
if (typeof input === 'string' && (input.indexOf('1975-01-01') > -1 || input.indexOf('157737600000') > -1)) {
return null;
}
if (_typeof(input) === 'object') {
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return input;
} else {
return null;
}
} else if (input !== '') {
return input;
}
return '';
};
});
app.filter('strReplace', function () {
return function (input, from, to) {
input = input || '';
from = from || '';
to = to || '';
return input.replace(new RegExp(from, 'g'), to);
};
});
app.filter('range', function () {
return function (input, total) {
var i;
total = parseInt(total);
i = 0;
while (i < total) {
input.push(i);
i++;
}
return input;
};
});
app.hex_to_ascii = function (str1) {
var hex, n, str;
hex = str1.toString();
str = '';
n = 0;
while (n < hex.length) {
str += String.fromCharCode(parseInt(hex.substr(n, 2), 16));
n += 2;
}
return str;
};
/*
Custom functions
*/
angular.isEmpty = function (val) {
return angular.isUndefined(val) || val === null || val === '';
};
angular.isDateValid = function (date) {
if (date === void 0 || date === null) {
return false;
}
if (!moment(date).isValid()) {
return;
}
if (moment(date) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return true;
}
return false;
};
angular.extractPath = function (path) {
var ext, withoutExt;
ext = path.split('.').length > 1 ? path.substring(path.lastIndexOf('.'), path.length) : '';
withoutExt = path.split('.').slice(0, -1).join('.');
return {
org: path,
nameWithoutExt: withoutExt,
extension: ext
};
};
if (!String.prototype.format) {
String.prototype.format = function () {
var args;
args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
if (typeof args[number] !== 'undefined') {
return args[number];
} else {
return match;
}
});
};
}
//# sourceMappingURL=../../maps/app/modules/app.js.map | // divider.className = 'divider'
// fragment.appendChild(divider);
// }
// /** | random_line_split |
app.js | "use strict";
function | (obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
var app;
app = angular.module('app', ['angularMoment', 'ngCookies', 'ngStorage', 'angular-loading-bar', 'ui.bootstrap', 'ngContextMenu', 'ngSidebarJS', 'ng.ckeditor', 'ng.multicombo'], ["$locationProvider", "$sceProvider", function ($locationProvider, $sceProvider) {
$locationProvider.html5Mode(true);
}]); //$sceProvider.enabled(false);
// Completely disable SCE. For demonstration purposes only!
// Do not use in new projects or libraries.
//$sceProvider.enabled(false);
app.config(['$localStorageProvider', function ($localStorageProvider) {
var dateObjPrefix, deserializer, isDate, parseISOString, serializer; // var serializer = angular.toJson;
// var deserializer = angular.fromJson;
dateObjPrefix = 'ngStorage-Type-[object Date]:';
serializer = function serializer(v) {
if (isDate(v)) {
return dateObjPrefix + v.getTime();
} else {
return angular.toJson(v);
}
};
deserializer = function deserializer(v) {
if (v.startsWith(dateObjPrefix)) {
return parseISOString(v);
} else {
return angular.fromJson(v);
}
};
isDate = function isDate(date) {
return date && Object.prototype.toString.call(date) === '[object Date]' && !isNaN(date);
};
parseISOString = function parseISOString(s) {
var b;
b = s.replace(dateObjPrefix, '').replace('"', '').split(/\D+/);
return new Date(parseInt(b[0]));
}; //return (console.log("parse:" + new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6])), new Date(b[0], b[1] -1, b[2], b[3], b[4], b[5], b[6]));
$localStorageProvider.setSerializer(serializer);
$localStorageProvider.setDeserializer(deserializer);
}]);
app.constant('_', window._).run(["$rootScope", function ($rootScope) {
$rootScope._ = window._;
}]);
app.constant('jQuery', window.jQuery).run(["$rootScope", function ($rootScope) {
$rootScope.jQuery = window.jQuery;
}]);
app.run(["amMoment", function (amMoment) {
amMoment.changeLocale('au');
}]);
app.directive('upload', ['$http', function ($http) {
return {
restrict: 'E',
replace: true,
scope: {},
require: '?ngModel',
template: '<div class="asset-upload">Drag files here to upload</div>',
link: function link(scope, element, attrs, ngModel) {}
};
}]); // Code goes here
app.directive('ngConfirmClick', [function () {
return {
link: function link(scope, element, attr) {
var clickAction, msg;
msg = attr.ngConfirmClick || 'Are you sure?';
clickAction = attr.confirmedClick;
element.bind('click', function (event) {
if (window.confirm(msg)) {
scope.$eval(clickAction);
}
});
}
};
}]);
app.directive('ngEnter', function () {
return function (scope, element, attrs) {
element.bind('keydown keypress', function (event) {
if (event.which === 13) {
scope.$apply(function () {
scope.$eval(attrs.ngEnter, {
'event': event
});
});
event.preventDefault();
}
});
};
}); // app.directive('ngContextMenu', ['$document', '$window', function ($document, $window) {
// // Runs during compile
// return {
// restrict: 'A',
// link: function ($scope, $element, $attr) {
// var contextMenuElm,
// $contextMenuElm,
// windowWidth = window.innerWidth,
// windowHeight = window.innerHeight,
// contextMenuWidth,
// contextMenuHeight,
// contextMenuLeftPos = 0,
// contextMenuTopPos = 0,
// $w = $($window),
// caretClass = {
// topRight: 'context-caret-top-right',
// topLeft: 'context-caret-top-left',
// bottomRight: 'context-caret-bottom-right',
// bottomLeft: 'context-caret-bottom-left'
// },
// menuItems = $attr.menuItems;
// function createContextMenu() {
// var fragment = document.createDocumentFragment();
// contextMenuElm = document.createElement('ul'),
// $contextMenuElm = $(contextMenuElm);
// contextMenuElm.setAttribute('id', 'context-menu');
// contextMenuElm.setAttribute('class', 'custom-context-menu');
// mountContextMenu($scope[menuItems], fragment);
// contextMenuElm.appendChild(fragment);
// document.body.appendChild(contextMenuElm);
// contextMenuWidth = $contextMenuElm.outerWidth(true);
// contextMenuHeight = $contextMenuElm.outerHeight(true);
// }
// function mountContextMenu(menuItems, fragment) {
// menuItems.forEach(function (_item) {
// var li = document.createElement('li');
// li.innerHTML = '<a>' + _item.label + ' <span class="right-caret"></span></a>';
// if (_item.action && _item.active) {
// li.addEventListener('click', function () {
// if (typeof $scope[_item.action] isnt 'function') return false;
// $scope[_item.action]($attr, $scope);
// }, false);
// }
// if (_item.divider) {
// addContextMenuDivider(fragment);
// }
// if (!_item.active) li.setAttribute('class', 'disabled');
// if (_item.subItems) {
// addSubmenuItems(_item.subItems, li, _item.active)
// }
// fragment.appendChild(li);
// });
// }
// function addSubmenuItems(subItems, parentLi, parentIsActive) {
// parentLi.setAttribute('class', 'dropdown-submenu')
// if (!parentIsActive) parentLi.setAttribute('class', 'disabled')
// var ul = document.createElement('ul')
// ul.setAttribute('class', 'dropdown-menu')
// mountContextMenu(subItems, ul)
// parentLi.appendChild(ul)
// }
// function addContextMenuDivider(fragment) {
// var divider = document.createElement('li');
// divider.className = 'divider'
// fragment.appendChild(divider);
// }
// /**
// * Removing context menu DOM from page.
// * @return {[type]} [description]
// */
// function removeContextMenu() {
// $('.custom-context-menu').remove();
// }
// /**
// * Apply new css class for right positioning.
// * @param {[type]} cssClass [description]
// * @return {[type]} [description]
// */
// function updateCssClass(cssClass) {
// $contextMenuElm.attr('class', 'custom-context-menu');
// $contextMenuElm.addClass(cssClass);
// }
// /**
// * [setMenuPosition description]
// * @param {[type]} e [event arg for finding clicked position]
// * @param {[type]} leftPos [if menu has to be pointed to any pre-fixed element like caret or corner of box.]
// * @param {[type]} topPos [as above but top]
// */
// function setMenuPosition(e, leftPos, topPos) {
// contextMenuLeftPos = leftPos || e.pageX;
// contextMenuTopPos = topPos - $w.scrollTop() || e.pageY - $w.scrollTop();
// if (window.innerWidth - contextMenuLeftPos < contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.topRight);
// } else if (window.innerWidth - contextMenuLeftPos > contextMenuWidth && window.innerHeight - contextMenuTopPos > contextMenuHeight) {
// updateCssClass(caretClass.topLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos > contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// updateCssClass(caretClass.bottomLeft);
// } else if (windowHeight - contextMenuTopPos < contextMenuHeight && windowWidth - contextMenuLeftPos < contextMenuWidth) {
// contextMenuTopPos -= contextMenuHeight;
// contextMenuLeftPos -= contextMenuWidth;
// updateCssClass(caretClass.bottomRight);
// }
// $contextMenuElm.css({
// left: contextMenuLeftPos,
// top: contextMenuTopPos
// }).addClass('context-caret shown');
// }
// /**
// * CONTEXT MENU
// * @param {[type]} evt [description]
// * @return {[type]} [description]
// */
// $element.on('contextmenu.dirContextMenu', function (evt) {
// evt.preventDefault();
// removeContextMenu();
// createContextMenu();
// /**
// * If pointer node has specified, let the context menu
// * apprear right below to that elem no matter
// * where user clicks within that element.
// */
// if ($attr.pointerNode) {
// var $pointer = $(this).find($attr.pointerNode);
// contextMenuLeftPos = $pointer.offset().left + ($pointer.outerWidth(true) / 2);
// contextMenuTopPos = $pointer.offset().top + $pointer.outerHeight(true);
// setMenuPosition(evt, contextMenuLeftPos, contextMenuTopPos);
// } else {
// setMenuPosition(evt);
// }
// $w.on('keydown.dirContextMenu', function (e) {
// if (e.keyCode is 27) {
// removeContextMenu();
// }
// })
// }); //END (on)click.dirContextMenu
// $document.off('click.dirContextMenu').on('click.dirContextMenu', function (e) {
// if (!$(e.target).is('.custom-context-menu') && !$(e.target).parents().is('.custom-context-menu')) {
// removeContextMenu();
// }
// });
// $w.off('scroll.dirContextMenu').on('scroll.dirContextMenu', function () {
// removeContextMenu();
// });
// $w.on('resize.dirContextMenu', function () {
// windowWidth = window.innerWidth;
// windowHeight = window.innerHeight;
// removeContextMenu();
// });
// }
// }
// }]);
app.filter('toDate', ["moment", function (moment) {
return function (input) {
input = input || '';
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return moment(input).toDate();
} else {
return null;
}
};
}]);
app.filter('dispNA', function () {
return function (input) {
var prop;
input = input || '';
if (_typeof(input) === 'object') {
for (prop in input) {
if (ad.hasOwnProperty(prop)) {
return input;
}
}
} else if (input !== '') {
return input;
}
return 'N/A';
};
});
app.filter('dispNil', function () {
return function (input) {
input = input || '';
if (input === '' || input === void 0 || input === null) {
return null;
}
if (typeof input === 'string' && (input.indexOf('1975-01-01') > -1 || input.indexOf('157737600000') > -1)) {
return null;
}
if (_typeof(input) === 'object') {
if (moment(input) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return input;
} else {
return null;
}
} else if (input !== '') {
return input;
}
return '';
};
});
app.filter('strReplace', function () {
return function (input, from, to) {
input = input || '';
from = from || '';
to = to || '';
return input.replace(new RegExp(from, 'g'), to);
};
});
app.filter('range', function () {
return function (input, total) {
var i;
total = parseInt(total);
i = 0;
while (i < total) {
input.push(i);
i++;
}
return input;
};
});
app.hex_to_ascii = function (str1) {
var hex, n, str;
hex = str1.toString();
str = '';
n = 0;
while (n < hex.length) {
str += String.fromCharCode(parseInt(hex.substr(n, 2), 16));
n += 2;
}
return str;
};
/*
Custom functions
*/
angular.isEmpty = function (val) {
return angular.isUndefined(val) || val === null || val === '';
};
angular.isDateValid = function (date) {
if (date === void 0 || date === null) {
return false;
}
if (!moment(date).isValid()) {
return;
}
if (moment(date) > moment('2000-01-01', 'YYYY-MM-DD', true)) {
return true;
}
return false;
};
angular.extractPath = function (path) {
var ext, withoutExt;
ext = path.split('.').length > 1 ? path.substring(path.lastIndexOf('.'), path.length) : '';
withoutExt = path.split('.').slice(0, -1).join('.');
return {
org: path,
nameWithoutExt: withoutExt,
extension: ext
};
};
if (!String.prototype.format) {
String.prototype.format = function () {
var args;
args = arguments;
return this.replace(/{(\d+)}/g, function (match, number) {
if (typeof args[number] !== 'undefined') {
return args[number];
} else {
return match;
}
});
};
}
//# sourceMappingURL=../../maps/app/modules/app.js.map
| _typeof | identifier_name |
app.js | 'use strict';
// IDs constants
const GAME_CONTAINER = 'game-container';
const WELCOME_SCREEN = 'welcome-screen';
const START_GAME = 'start-game';
const FINISH_GAME = 'finish-game';
const START_BTN = 'start-btn';
// Classes constants
const CONTAINER = 'container';
const DECK = 'deck';
const CARD = 'card';
const OPEN_CARD = 'open';
const SHOW_CARD = 'show';
const MATCH_CARD = 'match';
const NO_MATCH_CARD = 'no-match';
const FONT_AWESOME = 'fa';
const STARS = 'stars';
const EMPTY_STAR = 'fa-star-o';
const FULL_STAR = 'fa-star';
const CHECK_ICON = 'fa-check';
const CHECK_MARK = 'check-mark';
const GREEN_BTN = 'green-btn';
const TIMER = 'timer';
const MOVES_COUNTER = 'moves-counter';
const RESTART = 'restart';
const CARD_TYPES = [
'fa-anchor',
'fa-anchor',
'fa-bicycle',
'fa-bicycle',
'fa-bolt',
'fa-bolt',
'fa-bomb',
'fa-bomb',
'fa-cube',
'fa-cube',
'fa-diamond',
'fa-diamond',
'fa-leaf',
'fa-leaf',
'fa-paper-plane-o',
'fa-paper-plane-o'
];
// Global variables
let pairOfCards = [];
let openedCardsCounter = 0;
let timerInterval = null;
// Timeout constants
const HIDING_CARDS_DELAY = 2000;
const NO_MATCH_DELAY = 150;
const HIDING_NO_MATCH_DELAY = 600;
const FINISH_GAME_DELAY = 1000;
/*
*
* Helper functions
*
*/
function getTimer() {
performance.measure('timer', 'start-timer', 'now-timer');
const measures = performance.getEntriesByName('timer');
const duration = measures[0].duration;
performance.clearMeasures('timer');
return new Date(duration).toISOString().slice(14, -5);
}
function updateScorePanel() {
const moves = document.querySelector(`.${MOVES_COUNTER}`);
let movesNumber = Number(moves.innerText);
moves.innerText = ++movesNumber;
const stars = document.querySelectorAll(`.${STARS}>li`);
// Based on the number of moves replace the FULL_STAR with EMPTY_STAR
switch (movesNumber) {
case 10:
stars[2].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 20:
stars[1].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 30:
stars[0].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
}
}
function getScores() {
const moves = document.querySelector(`.${MOVES_COUNTER}`).innerText;
const stars = document.querySelector(`.${STARS}`);
const duration = getTimer();
return [moves, stars, duration];
}
function resetStars() {
const stars = document.querySelectorAll(`.${STARS}>li`);
for (let star of stars) {
star.firstElementChild.classList.replace(EMPTY_STAR, FULL_STAR);
}
}
function resetMoves() {
document.querySelector(`.${MOVES_COUNTER}`).textContent = 0;
}
function hideCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to remove each of open, show and match classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, false);
card.classList.toggle(SHOW_CARD, false);
card.classList.toggle(MATCH_CARD, false);
}
}
function showCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to add each of open and show classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, true);
card.classList.toggle(SHOW_CARD, true);
}
}
function placeCards(shuffledTypes) {
const cards = document.querySelector(`.${DECK}`).children;
// Loop through the cards to replace the old type with the shuffled one
for (let i = 0; i < cards.length; i++) {
const oldType = cards[i].firstElementChild.classList[1];
cards[i].firstElementChild.classList.replace(oldType, shuffledTypes[i]);
}
}
function createCardsElements(cardTypes) {
const deck = document.createElement('ul');
deck.classList.add('deck');
// Loop through card types to create cards elements
for (let cardType of cardTypes) {
const cardElement = document.createElement('li');
const iElement = document.createElement('i');
cardElement.classList.add(CARD, OPEN_CARD, SHOW_CARD);
iElement.classList.add(FONT_AWESOME);
iElement.classList.add(cardType);
cardElement.appendChild(iElement);
deck.appendChild(cardElement);
}
const startGame = document.querySelector(`#${START_GAME}`);
startGame.appendChild(deck);
startGame.style.display = 'flex';
}
function matchPair(pairOfCards) {
const [firstCard, secondCard] = pairOfCards;
const typeOfFirst = firstCard.firstElementChild.classList[1];
const typeOfSecond = secondCard.firstElementChild.classList[1];
// Check if the pair of cards match or else no match
if (typeOfFirst === typeOfSecond) {
firstCard.classList.add(MATCH_CARD);
secondCard.classList.add(MATCH_CARD);
openedCardsCounter += 2;
// If remain only two cards show them and finish the game
if (openedCardsCounter === CARD_TYPES.length - 2) {
// Remove listeners from deck and restart
const deck = document.querySelector(`.${DECK}`);
const restartBtn = document.querySelector(`.${RESTART}`);
deck.removeEventListener('click', cardsListener);
restartBtn.removeEventListener('click', restartListener);
showCards();
setTimeout(finishGame, FINISH_GAME_DELAY);
openedCardsCounter = 0;
}
} else {
setTimeout(() => {
firstCard.classList.add(NO_MATCH_CARD);
secondCard.classList.add(NO_MATCH_CARD);
}, NO_MATCH_DELAY);
setTimeout(() => {
firstCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
secondCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
}, HIDING_NO_MATCH_DELAY);
}
}
// Shuffle function from http://stackoverflow.com/a/2450976
function shuffle(array) {
var currentIndex = array.length,
temporaryValue,
randomIndex;
while (currentIndex !== 0) {
randomIndex = Math.floor(Math.random() * currentIndex); |
return array;
}
/*
*
* Event listners functions
*
*/
function startListener(event) {
document.querySelector(`#${WELCOME_SCREEN}`).style.display = 'none';
event.currentTarget.removeEventListener('click', startListener);
startGame();
}
function cardsListener(event) {
const { target } = event;
const { tagName, classList } = target;
// If the card is not opend yet then add OPEN_CARD and SHOW_CARD classes
if (tagName.toLowerCase() === 'li' && !classList.contains(OPEN_CARD)) {
classList.add(OPEN_CARD, SHOW_CARD);
pairOfCards.push(target);
// If a pair of cards are opened then match them
if (pairOfCards.length === 2) {
matchPair(pairOfCards);
pairOfCards = [];
updateScorePanel();
}
}
}
function restartListener() {
// Reset opened cards counter
openedCardsCounter = 0;
// Reset Timer and clear marks
clearInterval(timerInterval);
document.querySelector(`.${TIMER}`).innerText = '00:00';
performance.clearMarks();
// Reset stars and moves counter
resetStars();
resetMoves();
// Hide cards and then shuffle the types and place them
hideCards();
const shuffledTypes = shuffle(CARD_TYPES);
placeCards(shuffledTypes);
// Show shuffled cards and start playing
showCards();
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
}
function playAgainListener(event) {
document.querySelector(`#${FINISH_GAME}`).remove();
document.querySelector(`#${START_GAME}`).style.display = 'flex';
addClickListenerOnRestart();
restartListener();
event.currentTarget.removeEventListener('click', playAgainListener);
}
/*
*
* Add listeners
*
*/
function addClickListenerOnCards() {
performance.mark('start-timer');
timerInterval = setInterval(function() {
performance.mark('now-timer');
const timer = document.querySelector(`.${TIMER}`);
timer.innerText = getTimer();
}, 1000);
hideCards();
const deck = document.querySelector(`.${DECK}`);
deck.addEventListener('click', cardsListener);
}
function addClickListenerOnRestart() {
const restart = document.querySelector(`.${RESTART}`);
restart.addEventListener('click', restartListener);
}
/*
*
* The main functions welcomeScreen, startGame and finishGame
*
*/
function welcomeScreen() {
const startBtn = document.querySelector(`#${START_BTN}`);
startBtn.addEventListener('click', startListener);
}
function startGame() {
const shuffledTypes = shuffle(CARD_TYPES);
createCardsElements(shuffledTypes);
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
addClickListenerOnRestart();
}
function finishGame() {
// Hide the start-game section
document.querySelector(`#${START_GAME}`).style.display = 'none';
// Create finish-game container
const finishContainer = document.createElement('div');
finishContainer.classList.add(CONTAINER);
finishContainer.id = 'finish-game';
// Create congratulations elements
const checkIcon = document.createElement('i');
checkIcon.classList.add(FONT_AWESOME, CHECK_ICON, CHECK_MARK);
const congratsHeading = document.createElement('h2');
congratsHeading.innerText = 'Congratulations you finshed the game!';
// Create elements for score details
const [moves, stars, duration] = getScores();
const scoresDetails = document.createElement('p');
scoresDetails.innerHTML = `
Moves: ${moves}<br>
${stars.outerHTML}<br>
Duration: ${duration}`;
// Create restart button
const playAgain = document.createElement('button');
playAgain.classList.add(GREEN_BTN);
playAgain.innerText = 'Play again';
// Append all elements to the finish-game section
finishContainer.appendChild(checkIcon);
finishContainer.appendChild(congratsHeading);
finishContainer.appendChild(scoresDetails);
finishContainer.appendChild(playAgain);
// Append finish-game to game-container
document.querySelector(`#${GAME_CONTAINER}`).appendChild(finishContainer);
playAgain.addEventListener('click', playAgainListener);
}
// Call the welcomeScreen function
// The first call is here
welcomeScreen(); | currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
} | random_line_split |
app.js | 'use strict';
// IDs constants
const GAME_CONTAINER = 'game-container';
const WELCOME_SCREEN = 'welcome-screen';
const START_GAME = 'start-game';
const FINISH_GAME = 'finish-game';
const START_BTN = 'start-btn';
// Classes constants
const CONTAINER = 'container';
const DECK = 'deck';
const CARD = 'card';
const OPEN_CARD = 'open';
const SHOW_CARD = 'show';
const MATCH_CARD = 'match';
const NO_MATCH_CARD = 'no-match';
const FONT_AWESOME = 'fa';
const STARS = 'stars';
const EMPTY_STAR = 'fa-star-o';
const FULL_STAR = 'fa-star';
const CHECK_ICON = 'fa-check';
const CHECK_MARK = 'check-mark';
const GREEN_BTN = 'green-btn';
const TIMER = 'timer';
const MOVES_COUNTER = 'moves-counter';
const RESTART = 'restart';
const CARD_TYPES = [
'fa-anchor',
'fa-anchor',
'fa-bicycle',
'fa-bicycle',
'fa-bolt',
'fa-bolt',
'fa-bomb',
'fa-bomb',
'fa-cube',
'fa-cube',
'fa-diamond',
'fa-diamond',
'fa-leaf',
'fa-leaf',
'fa-paper-plane-o',
'fa-paper-plane-o'
];
// Global variables
let pairOfCards = [];
let openedCardsCounter = 0;
let timerInterval = null;
// Timeout constants
const HIDING_CARDS_DELAY = 2000;
const NO_MATCH_DELAY = 150;
const HIDING_NO_MATCH_DELAY = 600;
const FINISH_GAME_DELAY = 1000;
/*
*
* Helper functions
*
*/
function getTimer() {
performance.measure('timer', 'start-timer', 'now-timer');
const measures = performance.getEntriesByName('timer');
const duration = measures[0].duration;
performance.clearMeasures('timer');
return new Date(duration).toISOString().slice(14, -5);
}
function updateScorePanel() {
const moves = document.querySelector(`.${MOVES_COUNTER}`);
let movesNumber = Number(moves.innerText);
moves.innerText = ++movesNumber;
const stars = document.querySelectorAll(`.${STARS}>li`);
// Based on the number of moves replace the FULL_STAR with EMPTY_STAR
switch (movesNumber) {
case 10:
stars[2].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 20:
stars[1].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 30:
stars[0].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
}
}
function getScores() {
const moves = document.querySelector(`.${MOVES_COUNTER}`).innerText;
const stars = document.querySelector(`.${STARS}`);
const duration = getTimer();
return [moves, stars, duration];
}
function resetStars() {
const stars = document.querySelectorAll(`.${STARS}>li`);
for (let star of stars) {
star.firstElementChild.classList.replace(EMPTY_STAR, FULL_STAR);
}
}
function resetMoves() {
document.querySelector(`.${MOVES_COUNTER}`).textContent = 0;
}
function hideCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to remove each of open, show and match classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, false);
card.classList.toggle(SHOW_CARD, false);
card.classList.toggle(MATCH_CARD, false);
}
}
function showCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to add each of open and show classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, true);
card.classList.toggle(SHOW_CARD, true);
}
}
function placeCards(shuffledTypes) {
const cards = document.querySelector(`.${DECK}`).children;
// Loop through the cards to replace the old type with the shuffled one
for (let i = 0; i < cards.length; i++) {
const oldType = cards[i].firstElementChild.classList[1];
cards[i].firstElementChild.classList.replace(oldType, shuffledTypes[i]);
}
}
function createCardsElements(cardTypes) {
const deck = document.createElement('ul');
deck.classList.add('deck');
// Loop through card types to create cards elements
for (let cardType of cardTypes) {
const cardElement = document.createElement('li');
const iElement = document.createElement('i');
cardElement.classList.add(CARD, OPEN_CARD, SHOW_CARD);
iElement.classList.add(FONT_AWESOME);
iElement.classList.add(cardType);
cardElement.appendChild(iElement);
deck.appendChild(cardElement);
}
const startGame = document.querySelector(`#${START_GAME}`);
startGame.appendChild(deck);
startGame.style.display = 'flex';
}
function matchPair(pairOfCards) |
// Shuffle function from http://stackoverflow.com/a/2450976
function shuffle(array) {
var currentIndex = array.length,
temporaryValue,
randomIndex;
while (currentIndex !== 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
/*
*
* Event listners functions
*
*/
function startListener(event) {
document.querySelector(`#${WELCOME_SCREEN}`).style.display = 'none';
event.currentTarget.removeEventListener('click', startListener);
startGame();
}
function cardsListener(event) {
const { target } = event;
const { tagName, classList } = target;
// If the card is not opend yet then add OPEN_CARD and SHOW_CARD classes
if (tagName.toLowerCase() === 'li' && !classList.contains(OPEN_CARD)) {
classList.add(OPEN_CARD, SHOW_CARD);
pairOfCards.push(target);
// If a pair of cards are opened then match them
if (pairOfCards.length === 2) {
matchPair(pairOfCards);
pairOfCards = [];
updateScorePanel();
}
}
}
function restartListener() {
// Reset opened cards counter
openedCardsCounter = 0;
// Reset Timer and clear marks
clearInterval(timerInterval);
document.querySelector(`.${TIMER}`).innerText = '00:00';
performance.clearMarks();
// Reset stars and moves counter
resetStars();
resetMoves();
// Hide cards and then shuffle the types and place them
hideCards();
const shuffledTypes = shuffle(CARD_TYPES);
placeCards(shuffledTypes);
// Show shuffled cards and start playing
showCards();
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
}
function playAgainListener(event) {
document.querySelector(`#${FINISH_GAME}`).remove();
document.querySelector(`#${START_GAME}`).style.display = 'flex';
addClickListenerOnRestart();
restartListener();
event.currentTarget.removeEventListener('click', playAgainListener);
}
/*
*
* Add listeners
*
*/
function addClickListenerOnCards() {
performance.mark('start-timer');
timerInterval = setInterval(function() {
performance.mark('now-timer');
const timer = document.querySelector(`.${TIMER}`);
timer.innerText = getTimer();
}, 1000);
hideCards();
const deck = document.querySelector(`.${DECK}`);
deck.addEventListener('click', cardsListener);
}
function addClickListenerOnRestart() {
const restart = document.querySelector(`.${RESTART}`);
restart.addEventListener('click', restartListener);
}
/*
*
* The main functions welcomeScreen, startGame and finishGame
*
*/
function welcomeScreen() {
const startBtn = document.querySelector(`#${START_BTN}`);
startBtn.addEventListener('click', startListener);
}
function startGame() {
const shuffledTypes = shuffle(CARD_TYPES);
createCardsElements(shuffledTypes);
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
addClickListenerOnRestart();
}
function finishGame() {
// Hide the start-game section
document.querySelector(`#${START_GAME}`).style.display = 'none';
// Create finish-game container
const finishContainer = document.createElement('div');
finishContainer.classList.add(CONTAINER);
finishContainer.id = 'finish-game';
// Create congratulations elements
const checkIcon = document.createElement('i');
checkIcon.classList.add(FONT_AWESOME, CHECK_ICON, CHECK_MARK);
const congratsHeading = document.createElement('h2');
congratsHeading.innerText = 'Congratulations you finshed the game!';
// Create elements for score details
const [moves, stars, duration] = getScores();
const scoresDetails = document.createElement('p');
scoresDetails.innerHTML = `
Moves: ${moves}<br>
${stars.outerHTML}<br>
Duration: ${duration}`;
// Create restart button
const playAgain = document.createElement('button');
playAgain.classList.add(GREEN_BTN);
playAgain.innerText = 'Play again';
// Append all elements to the finish-game section
finishContainer.appendChild(checkIcon);
finishContainer.appendChild(congratsHeading);
finishContainer.appendChild(scoresDetails);
finishContainer.appendChild(playAgain);
// Append finish-game to game-container
document.querySelector(`#${GAME_CONTAINER}`).appendChild(finishContainer);
playAgain.addEventListener('click', playAgainListener);
}
// Call the welcomeScreen function
// The first call is here
welcomeScreen();
| {
const [firstCard, secondCard] = pairOfCards;
const typeOfFirst = firstCard.firstElementChild.classList[1];
const typeOfSecond = secondCard.firstElementChild.classList[1];
// Check if the pair of cards match or else no match
if (typeOfFirst === typeOfSecond) {
firstCard.classList.add(MATCH_CARD);
secondCard.classList.add(MATCH_CARD);
openedCardsCounter += 2;
// If remain only two cards show them and finish the game
if (openedCardsCounter === CARD_TYPES.length - 2) {
// Remove listeners from deck and restart
const deck = document.querySelector(`.${DECK}`);
const restartBtn = document.querySelector(`.${RESTART}`);
deck.removeEventListener('click', cardsListener);
restartBtn.removeEventListener('click', restartListener);
showCards();
setTimeout(finishGame, FINISH_GAME_DELAY);
openedCardsCounter = 0;
}
} else {
setTimeout(() => {
firstCard.classList.add(NO_MATCH_CARD);
secondCard.classList.add(NO_MATCH_CARD);
}, NO_MATCH_DELAY);
setTimeout(() => {
firstCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
secondCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
}, HIDING_NO_MATCH_DELAY);
}
} | identifier_body |
app.js | 'use strict';
// IDs constants
const GAME_CONTAINER = 'game-container';
const WELCOME_SCREEN = 'welcome-screen';
const START_GAME = 'start-game';
const FINISH_GAME = 'finish-game';
const START_BTN = 'start-btn';
// Classes constants
const CONTAINER = 'container';
const DECK = 'deck';
const CARD = 'card';
const OPEN_CARD = 'open';
const SHOW_CARD = 'show';
const MATCH_CARD = 'match';
const NO_MATCH_CARD = 'no-match';
const FONT_AWESOME = 'fa';
const STARS = 'stars';
const EMPTY_STAR = 'fa-star-o';
const FULL_STAR = 'fa-star';
const CHECK_ICON = 'fa-check';
const CHECK_MARK = 'check-mark';
const GREEN_BTN = 'green-btn';
const TIMER = 'timer';
const MOVES_COUNTER = 'moves-counter';
const RESTART = 'restart';
const CARD_TYPES = [
'fa-anchor',
'fa-anchor',
'fa-bicycle',
'fa-bicycle',
'fa-bolt',
'fa-bolt',
'fa-bomb',
'fa-bomb',
'fa-cube',
'fa-cube',
'fa-diamond',
'fa-diamond',
'fa-leaf',
'fa-leaf',
'fa-paper-plane-o',
'fa-paper-plane-o'
];
// Global variables
let pairOfCards = [];
let openedCardsCounter = 0;
let timerInterval = null;
// Timeout constants
const HIDING_CARDS_DELAY = 2000;
const NO_MATCH_DELAY = 150;
const HIDING_NO_MATCH_DELAY = 600;
const FINISH_GAME_DELAY = 1000;
/*
*
* Helper functions
*
*/
function getTimer() {
performance.measure('timer', 'start-timer', 'now-timer');
const measures = performance.getEntriesByName('timer');
const duration = measures[0].duration;
performance.clearMeasures('timer');
return new Date(duration).toISOString().slice(14, -5);
}
function | () {
const moves = document.querySelector(`.${MOVES_COUNTER}`);
let movesNumber = Number(moves.innerText);
moves.innerText = ++movesNumber;
const stars = document.querySelectorAll(`.${STARS}>li`);
// Based on the number of moves replace the FULL_STAR with EMPTY_STAR
switch (movesNumber) {
case 10:
stars[2].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 20:
stars[1].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 30:
stars[0].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
}
}
function getScores() {
const moves = document.querySelector(`.${MOVES_COUNTER}`).innerText;
const stars = document.querySelector(`.${STARS}`);
const duration = getTimer();
return [moves, stars, duration];
}
function resetStars() {
const stars = document.querySelectorAll(`.${STARS}>li`);
for (let star of stars) {
star.firstElementChild.classList.replace(EMPTY_STAR, FULL_STAR);
}
}
function resetMoves() {
document.querySelector(`.${MOVES_COUNTER}`).textContent = 0;
}
function hideCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to remove each of open, show and match classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, false);
card.classList.toggle(SHOW_CARD, false);
card.classList.toggle(MATCH_CARD, false);
}
}
function showCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to add each of open and show classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, true);
card.classList.toggle(SHOW_CARD, true);
}
}
function placeCards(shuffledTypes) {
const cards = document.querySelector(`.${DECK}`).children;
// Loop through the cards to replace the old type with the shuffled one
for (let i = 0; i < cards.length; i++) {
const oldType = cards[i].firstElementChild.classList[1];
cards[i].firstElementChild.classList.replace(oldType, shuffledTypes[i]);
}
}
function createCardsElements(cardTypes) {
const deck = document.createElement('ul');
deck.classList.add('deck');
// Loop through card types to create cards elements
for (let cardType of cardTypes) {
const cardElement = document.createElement('li');
const iElement = document.createElement('i');
cardElement.classList.add(CARD, OPEN_CARD, SHOW_CARD);
iElement.classList.add(FONT_AWESOME);
iElement.classList.add(cardType);
cardElement.appendChild(iElement);
deck.appendChild(cardElement);
}
const startGame = document.querySelector(`#${START_GAME}`);
startGame.appendChild(deck);
startGame.style.display = 'flex';
}
function matchPair(pairOfCards) {
const [firstCard, secondCard] = pairOfCards;
const typeOfFirst = firstCard.firstElementChild.classList[1];
const typeOfSecond = secondCard.firstElementChild.classList[1];
// Check if the pair of cards match or else no match
if (typeOfFirst === typeOfSecond) {
firstCard.classList.add(MATCH_CARD);
secondCard.classList.add(MATCH_CARD);
openedCardsCounter += 2;
// If remain only two cards show them and finish the game
if (openedCardsCounter === CARD_TYPES.length - 2) {
// Remove listeners from deck and restart
const deck = document.querySelector(`.${DECK}`);
const restartBtn = document.querySelector(`.${RESTART}`);
deck.removeEventListener('click', cardsListener);
restartBtn.removeEventListener('click', restartListener);
showCards();
setTimeout(finishGame, FINISH_GAME_DELAY);
openedCardsCounter = 0;
}
} else {
setTimeout(() => {
firstCard.classList.add(NO_MATCH_CARD);
secondCard.classList.add(NO_MATCH_CARD);
}, NO_MATCH_DELAY);
setTimeout(() => {
firstCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
secondCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
}, HIDING_NO_MATCH_DELAY);
}
}
// Shuffle function from http://stackoverflow.com/a/2450976
function shuffle(array) {
var currentIndex = array.length,
temporaryValue,
randomIndex;
while (currentIndex !== 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
/*
*
* Event listners functions
*
*/
function startListener(event) {
document.querySelector(`#${WELCOME_SCREEN}`).style.display = 'none';
event.currentTarget.removeEventListener('click', startListener);
startGame();
}
function cardsListener(event) {
const { target } = event;
const { tagName, classList } = target;
// If the card is not opend yet then add OPEN_CARD and SHOW_CARD classes
if (tagName.toLowerCase() === 'li' && !classList.contains(OPEN_CARD)) {
classList.add(OPEN_CARD, SHOW_CARD);
pairOfCards.push(target);
// If a pair of cards are opened then match them
if (pairOfCards.length === 2) {
matchPair(pairOfCards);
pairOfCards = [];
updateScorePanel();
}
}
}
function restartListener() {
// Reset opened cards counter
openedCardsCounter = 0;
// Reset Timer and clear marks
clearInterval(timerInterval);
document.querySelector(`.${TIMER}`).innerText = '00:00';
performance.clearMarks();
// Reset stars and moves counter
resetStars();
resetMoves();
// Hide cards and then shuffle the types and place them
hideCards();
const shuffledTypes = shuffle(CARD_TYPES);
placeCards(shuffledTypes);
// Show shuffled cards and start playing
showCards();
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
}
function playAgainListener(event) {
document.querySelector(`#${FINISH_GAME}`).remove();
document.querySelector(`#${START_GAME}`).style.display = 'flex';
addClickListenerOnRestart();
restartListener();
event.currentTarget.removeEventListener('click', playAgainListener);
}
/*
*
* Add listeners
*
*/
function addClickListenerOnCards() {
performance.mark('start-timer');
timerInterval = setInterval(function() {
performance.mark('now-timer');
const timer = document.querySelector(`.${TIMER}`);
timer.innerText = getTimer();
}, 1000);
hideCards();
const deck = document.querySelector(`.${DECK}`);
deck.addEventListener('click', cardsListener);
}
function addClickListenerOnRestart() {
const restart = document.querySelector(`.${RESTART}`);
restart.addEventListener('click', restartListener);
}
/*
*
* The main functions welcomeScreen, startGame and finishGame
*
*/
function welcomeScreen() {
const startBtn = document.querySelector(`#${START_BTN}`);
startBtn.addEventListener('click', startListener);
}
function startGame() {
const shuffledTypes = shuffle(CARD_TYPES);
createCardsElements(shuffledTypes);
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
addClickListenerOnRestart();
}
function finishGame() {
// Hide the start-game section
document.querySelector(`#${START_GAME}`).style.display = 'none';
// Create finish-game container
const finishContainer = document.createElement('div');
finishContainer.classList.add(CONTAINER);
finishContainer.id = 'finish-game';
// Create congratulations elements
const checkIcon = document.createElement('i');
checkIcon.classList.add(FONT_AWESOME, CHECK_ICON, CHECK_MARK);
const congratsHeading = document.createElement('h2');
congratsHeading.innerText = 'Congratulations you finshed the game!';
// Create elements for score details
const [moves, stars, duration] = getScores();
const scoresDetails = document.createElement('p');
scoresDetails.innerHTML = `
Moves: ${moves}<br>
${stars.outerHTML}<br>
Duration: ${duration}`;
// Create restart button
const playAgain = document.createElement('button');
playAgain.classList.add(GREEN_BTN);
playAgain.innerText = 'Play again';
// Append all elements to the finish-game section
finishContainer.appendChild(checkIcon);
finishContainer.appendChild(congratsHeading);
finishContainer.appendChild(scoresDetails);
finishContainer.appendChild(playAgain);
// Append finish-game to game-container
document.querySelector(`#${GAME_CONTAINER}`).appendChild(finishContainer);
playAgain.addEventListener('click', playAgainListener);
}
// Call the welcomeScreen function
// The first call is here
welcomeScreen();
| updateScorePanel | identifier_name |
app.js | 'use strict';
// IDs constants
const GAME_CONTAINER = 'game-container';
const WELCOME_SCREEN = 'welcome-screen';
const START_GAME = 'start-game';
const FINISH_GAME = 'finish-game';
const START_BTN = 'start-btn';
// Classes constants
const CONTAINER = 'container';
const DECK = 'deck';
const CARD = 'card';
const OPEN_CARD = 'open';
const SHOW_CARD = 'show';
const MATCH_CARD = 'match';
const NO_MATCH_CARD = 'no-match';
const FONT_AWESOME = 'fa';
const STARS = 'stars';
const EMPTY_STAR = 'fa-star-o';
const FULL_STAR = 'fa-star';
const CHECK_ICON = 'fa-check';
const CHECK_MARK = 'check-mark';
const GREEN_BTN = 'green-btn';
const TIMER = 'timer';
const MOVES_COUNTER = 'moves-counter';
const RESTART = 'restart';
const CARD_TYPES = [
'fa-anchor',
'fa-anchor',
'fa-bicycle',
'fa-bicycle',
'fa-bolt',
'fa-bolt',
'fa-bomb',
'fa-bomb',
'fa-cube',
'fa-cube',
'fa-diamond',
'fa-diamond',
'fa-leaf',
'fa-leaf',
'fa-paper-plane-o',
'fa-paper-plane-o'
];
// Global variables
let pairOfCards = [];
let openedCardsCounter = 0;
let timerInterval = null;
// Timeout constants
const HIDING_CARDS_DELAY = 2000;
const NO_MATCH_DELAY = 150;
const HIDING_NO_MATCH_DELAY = 600;
const FINISH_GAME_DELAY = 1000;
/*
*
* Helper functions
*
*/
function getTimer() {
performance.measure('timer', 'start-timer', 'now-timer');
const measures = performance.getEntriesByName('timer');
const duration = measures[0].duration;
performance.clearMeasures('timer');
return new Date(duration).toISOString().slice(14, -5);
}
function updateScorePanel() {
const moves = document.querySelector(`.${MOVES_COUNTER}`);
let movesNumber = Number(moves.innerText);
moves.innerText = ++movesNumber;
const stars = document.querySelectorAll(`.${STARS}>li`);
// Based on the number of moves replace the FULL_STAR with EMPTY_STAR
switch (movesNumber) {
case 10:
stars[2].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 20:
stars[1].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
case 30:
stars[0].firstElementChild.classList.replace(FULL_STAR, EMPTY_STAR);
break;
}
}
function getScores() {
const moves = document.querySelector(`.${MOVES_COUNTER}`).innerText;
const stars = document.querySelector(`.${STARS}`);
const duration = getTimer();
return [moves, stars, duration];
}
function resetStars() {
const stars = document.querySelectorAll(`.${STARS}>li`);
for (let star of stars) {
star.firstElementChild.classList.replace(EMPTY_STAR, FULL_STAR);
}
}
function resetMoves() {
document.querySelector(`.${MOVES_COUNTER}`).textContent = 0;
}
function hideCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to remove each of open, show and match classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, false);
card.classList.toggle(SHOW_CARD, false);
card.classList.toggle(MATCH_CARD, false);
}
}
function showCards() {
const cards = document.querySelectorAll(`.${CARD}`);
// Loop through the cards to add each of open and show classes
for (let card of cards) {
card.classList.toggle(OPEN_CARD, true);
card.classList.toggle(SHOW_CARD, true);
}
}
function placeCards(shuffledTypes) {
const cards = document.querySelector(`.${DECK}`).children;
// Loop through the cards to replace the old type with the shuffled one
for (let i = 0; i < cards.length; i++) {
const oldType = cards[i].firstElementChild.classList[1];
cards[i].firstElementChild.classList.replace(oldType, shuffledTypes[i]);
}
}
function createCardsElements(cardTypes) {
const deck = document.createElement('ul');
deck.classList.add('deck');
// Loop through card types to create cards elements
for (let cardType of cardTypes) {
const cardElement = document.createElement('li');
const iElement = document.createElement('i');
cardElement.classList.add(CARD, OPEN_CARD, SHOW_CARD);
iElement.classList.add(FONT_AWESOME);
iElement.classList.add(cardType);
cardElement.appendChild(iElement);
deck.appendChild(cardElement);
}
const startGame = document.querySelector(`#${START_GAME}`);
startGame.appendChild(deck);
startGame.style.display = 'flex';
}
function matchPair(pairOfCards) {
const [firstCard, secondCard] = pairOfCards;
const typeOfFirst = firstCard.firstElementChild.classList[1];
const typeOfSecond = secondCard.firstElementChild.classList[1];
// Check if the pair of cards match or else no match
if (typeOfFirst === typeOfSecond) {
firstCard.classList.add(MATCH_CARD);
secondCard.classList.add(MATCH_CARD);
openedCardsCounter += 2;
// If remain only two cards show them and finish the game
if (openedCardsCounter === CARD_TYPES.length - 2) {
// Remove listeners from deck and restart
const deck = document.querySelector(`.${DECK}`);
const restartBtn = document.querySelector(`.${RESTART}`);
deck.removeEventListener('click', cardsListener);
restartBtn.removeEventListener('click', restartListener);
showCards();
setTimeout(finishGame, FINISH_GAME_DELAY);
openedCardsCounter = 0;
}
} else |
}
// Shuffle function from http://stackoverflow.com/a/2450976
function shuffle(array) {
var currentIndex = array.length,
temporaryValue,
randomIndex;
while (currentIndex !== 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
/*
*
* Event listners functions
*
*/
function startListener(event) {
document.querySelector(`#${WELCOME_SCREEN}`).style.display = 'none';
event.currentTarget.removeEventListener('click', startListener);
startGame();
}
function cardsListener(event) {
const { target } = event;
const { tagName, classList } = target;
// If the card is not opend yet then add OPEN_CARD and SHOW_CARD classes
if (tagName.toLowerCase() === 'li' && !classList.contains(OPEN_CARD)) {
classList.add(OPEN_CARD, SHOW_CARD);
pairOfCards.push(target);
// If a pair of cards are opened then match them
if (pairOfCards.length === 2) {
matchPair(pairOfCards);
pairOfCards = [];
updateScorePanel();
}
}
}
function restartListener() {
// Reset opened cards counter
openedCardsCounter = 0;
// Reset Timer and clear marks
clearInterval(timerInterval);
document.querySelector(`.${TIMER}`).innerText = '00:00';
performance.clearMarks();
// Reset stars and moves counter
resetStars();
resetMoves();
// Hide cards and then shuffle the types and place them
hideCards();
const shuffledTypes = shuffle(CARD_TYPES);
placeCards(shuffledTypes);
// Show shuffled cards and start playing
showCards();
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
}
function playAgainListener(event) {
document.querySelector(`#${FINISH_GAME}`).remove();
document.querySelector(`#${START_GAME}`).style.display = 'flex';
addClickListenerOnRestart();
restartListener();
event.currentTarget.removeEventListener('click', playAgainListener);
}
/*
*
* Add listeners
*
*/
function addClickListenerOnCards() {
performance.mark('start-timer');
timerInterval = setInterval(function() {
performance.mark('now-timer');
const timer = document.querySelector(`.${TIMER}`);
timer.innerText = getTimer();
}, 1000);
hideCards();
const deck = document.querySelector(`.${DECK}`);
deck.addEventListener('click', cardsListener);
}
function addClickListenerOnRestart() {
const restart = document.querySelector(`.${RESTART}`);
restart.addEventListener('click', restartListener);
}
/*
*
* The main functions welcomeScreen, startGame and finishGame
*
*/
function welcomeScreen() {
const startBtn = document.querySelector(`#${START_BTN}`);
startBtn.addEventListener('click', startListener);
}
function startGame() {
const shuffledTypes = shuffle(CARD_TYPES);
createCardsElements(shuffledTypes);
setTimeout(addClickListenerOnCards, HIDING_CARDS_DELAY);
addClickListenerOnRestart();
}
function finishGame() {
// Hide the start-game section
document.querySelector(`#${START_GAME}`).style.display = 'none';
// Create finish-game container
const finishContainer = document.createElement('div');
finishContainer.classList.add(CONTAINER);
finishContainer.id = 'finish-game';
// Create congratulations elements
const checkIcon = document.createElement('i');
checkIcon.classList.add(FONT_AWESOME, CHECK_ICON, CHECK_MARK);
const congratsHeading = document.createElement('h2');
congratsHeading.innerText = 'Congratulations you finshed the game!';
// Create elements for score details
const [moves, stars, duration] = getScores();
const scoresDetails = document.createElement('p');
scoresDetails.innerHTML = `
Moves: ${moves}<br>
${stars.outerHTML}<br>
Duration: ${duration}`;
// Create restart button
const playAgain = document.createElement('button');
playAgain.classList.add(GREEN_BTN);
playAgain.innerText = 'Play again';
// Append all elements to the finish-game section
finishContainer.appendChild(checkIcon);
finishContainer.appendChild(congratsHeading);
finishContainer.appendChild(scoresDetails);
finishContainer.appendChild(playAgain);
// Append finish-game to game-container
document.querySelector(`#${GAME_CONTAINER}`).appendChild(finishContainer);
playAgain.addEventListener('click', playAgainListener);
}
// Call the welcomeScreen function
// The first call is here
welcomeScreen();
| {
setTimeout(() => {
firstCard.classList.add(NO_MATCH_CARD);
secondCard.classList.add(NO_MATCH_CARD);
}, NO_MATCH_DELAY);
setTimeout(() => {
firstCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
secondCard.classList.remove(OPEN_CARD, SHOW_CARD, NO_MATCH_CARD);
}, HIDING_NO_MATCH_DELAY);
} | conditional_block |
lib.rs | #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use std::any::Any;
use std::borrow::Cow;
use std::fmt::Display;
use std::sync::mpsc::channel;
use std::sync::Arc;
use std::thread;
use crossbeam::channel::{Receiver, Sender};
use tuikit::prelude::{Event as TermEvent, *};
pub use crate::ansi::AnsiString;
pub use crate::engine::fuzzy::FuzzyAlgorithm;
use crate::event::{EventReceiver, EventSender};
use crate::model::Model;
pub use crate::options::SkimOptions;
pub use crate::output::SkimOutput;
use crate::reader::Reader;
mod ansi;
mod engine;
mod event;
pub mod field;
mod global;
mod header;
mod helper;
mod input;
mod item;
mod matcher;
mod model;
mod options;
mod orderedvec;
mod output;
pub mod prelude;
mod previewer;
mod query;
mod reader;
mod selection;
mod spinlock;
mod theme;
mod util;
//------------------------------------------------------------------------------
pub trait AsAny {
fn as_any(&self) -> &dyn Any;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl<T: Any> AsAny for T {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
/// A `SkimItem` defines what's been processed(fetched, matched, previewed and returned) by skim
///
/// # Downcast Example
/// Skim will return the item back, but in `Arc<dyn SkimItem>` form. We might want a reference
/// to the concrete type instead of trait object. Skim provide a somehow "complicated" way to
/// `downcast` it back to the reference of the original concrete type.
///
/// ```rust
/// use skim::prelude::*;
///
/// struct MyItem {}
/// impl SkimItem for MyItem {
/// fn text(&self) -> Cow<str> {
/// unimplemented!()
/// }
/// }
///
/// impl MyItem {
/// pub fn mutable(&mut self) -> i32 {
/// 1
/// }
///
/// pub fn immutable(&self) -> i32 {
/// 0
/// }
/// }
///
/// let mut ret: Arc<dyn SkimItem> = Arc::new(MyItem{});
/// let mutable: &mut MyItem = Arc::get_mut(&mut ret)
/// .expect("item is referenced by others")
/// .as_any_mut() // cast to Any
/// .downcast_mut::<MyItem>() // downcast to (mut) concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(mutable.mutable(), 1);
///
/// let immutable: &MyItem = (*ret).as_any() // cast to Any
/// .downcast_ref::<MyItem>() // downcast to concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(immutable.immutable(), 0)
/// ```
pub trait SkimItem: AsAny + Send + Sync + 'static {
/// The string to be used for matching (without color)
fn text(&self) -> Cow<str>;
/// The content to be displayed on the item list, could contain ANSI properties
fn display<'a>(&'a self, context: DisplayContext<'a>) -> AnsiString<'a> {
AnsiString::from(context)
}
/// Custom preview content, default to `ItemPreview::Global` which will use global preview
/// setting(i.e. the command set by `preview` option)
fn preview(&self, _context: PreviewContext) -> ItemPreview {
ItemPreview::Global
}
/// Get output text(after accept), default to `text()`
/// Note that this function is intended to be used by the caller of skim and will not be used by
/// skim. And since skim will return the item back in `SkimOutput`, if string is not what you
/// want, you could still use `downcast` to retain the pointer to the original struct.
fn output(&self) -> Cow<str> {
self.text()
}
/// we could limit the matching ranges of the `get_text` of the item.
/// providing (start_byte, end_byte) of the range
fn get_matching_ranges(&self) -> Option<&[(usize, usize)]> {
None
}
}
//------------------------------------------------------------------------------
// Implement SkimItem for raw strings
impl<T: AsRef<str> + Send + Sync + 'static> SkimItem for T {
fn text(&self) -> Cow<str> |
}
//------------------------------------------------------------------------------
// Display Context
pub enum Matches<'a> {
None,
CharIndices(&'a [usize]),
CharRange(usize, usize),
ByteRange(usize, usize),
}
pub struct DisplayContext<'a> {
pub text: &'a str,
pub score: i32,
pub matches: Matches<'a>,
pub container_width: usize,
pub highlight_attr: Attr,
}
impl<'a> From<DisplayContext<'a>> for AnsiString<'a> {
fn from(context: DisplayContext<'a>) -> Self {
match context.matches {
Matches::CharIndices(indices) => AnsiString::from((context.text, indices, context.highlight_attr)),
Matches::CharRange(start, end) => {
AnsiString::new_str(context.text, vec![(context.highlight_attr, (start as u32, end as u32))])
}
Matches::ByteRange(start, end) => {
let ch_start = context.text[..start].chars().count();
let ch_end = ch_start + context.text[start..end].chars().count();
AnsiString::new_str(
context.text,
vec![(context.highlight_attr, (ch_start as u32, ch_end as u32))],
)
}
Matches::None => AnsiString::new_str(context.text, vec![]),
}
}
}
//------------------------------------------------------------------------------
// Preview Context
pub struct PreviewContext<'a> {
pub query: &'a str,
pub cmd_query: &'a str,
pub width: usize,
pub height: usize,
pub current_index: usize,
pub current_selection: &'a str,
/// selected item indices (may or may not include current item)
pub selected_indices: &'a [usize],
/// selected item texts (may or may not include current item)
pub selections: &'a [&'a str],
}
//------------------------------------------------------------------------------
// Preview
#[derive(Default, Copy, Clone, Debug)]
pub struct PreviewPosition {
pub h_scroll: Size,
pub h_offset: Size,
pub v_scroll: Size,
pub v_offset: Size,
}
pub enum ItemPreview {
/// execute the command and print the command's output
Command(String),
/// Display the prepared text(lines)
Text(String),
/// Display the colored text(lines)
AnsiText(String),
CommandWithPos(String, PreviewPosition),
TextWithPos(String, PreviewPosition),
AnsiWithPos(String, PreviewPosition),
/// Use global command settings to preview the item
Global,
}
//==============================================================================
// A match engine will execute the matching algorithm
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum CaseMatching {
Respect,
Ignore,
Smart,
}
impl Default for CaseMatching {
fn default() -> Self {
CaseMatching::Smart
}
}
#[derive(PartialEq, Eq, Clone, Debug)]
#[allow(dead_code)]
pub enum MatchRange {
ByteRange(usize, usize),
// range of bytes
Chars(Vec<usize>), // individual character indices matched
}
pub type Rank = [i32; 4];
#[derive(Clone)]
pub struct MatchResult {
pub rank: Rank,
pub matched_range: MatchRange,
}
impl MatchResult {
pub fn range_char_indices(&self, text: &str) -> Vec<usize> {
match &self.matched_range {
&MatchRange::ByteRange(start, end) => {
let first = text[..start].chars().count();
let last = first + text[start..end].chars().count();
(first..last).collect()
}
MatchRange::Chars(vec) => vec.clone(),
}
}
}
pub trait MatchEngine: Sync + Send + Display {
fn match_item(&self, item: Arc<dyn SkimItem>) -> Option<MatchResult>;
}
pub trait MatchEngineFactory {
fn create_engine_with_case(&self, query: &str, case: CaseMatching) -> Box<dyn MatchEngine>;
fn create_engine(&self, query: &str) -> Box<dyn MatchEngine> {
self.create_engine_with_case(query, CaseMatching::default())
}
}
//------------------------------------------------------------------------------
// Preselection
/// A selector that determines whether an item should be "pre-selected" in multi-selection mode
pub trait Selector {
fn should_select(&self, index: usize, item: &dyn SkimItem) -> bool;
}
//------------------------------------------------------------------------------
pub type SkimItemSender = Sender<Arc<dyn SkimItem>>;
pub type SkimItemReceiver = Receiver<Arc<dyn SkimItem>>;
pub struct Skim {}
impl Skim {
/// params:
/// - options: the "complex" options that control how skim behaves
/// - source: a stream of items to be passed to skim for filtering.
/// If None is given, skim will invoke the command given to fetch the items.
///
/// return:
/// - None: on internal errors.
/// - SkimOutput: the collected key, event, query, selected items, etc.
pub fn run_with(options: &SkimOptions, source: Option<SkimItemReceiver>) -> Option<SkimOutput> {
let min_height = options
.min_height
.map(Skim::parse_height_string)
.expect("min_height should have default values");
let height = options
.height
.map(Skim::parse_height_string)
.expect("height should have default values");
let (tx, rx): (EventSender, EventReceiver) = channel();
let term = Arc::new(
Term::with_options(
TermOptions::default()
.min_height(min_height)
.height(height)
.clear_on_exit(!options.no_clear)
.disable_alternate_screen(options.no_clear_start)
.clear_on_start(!options.no_clear_start)
.hold(options.select1 || options.exit0 || options.sync),
)
.unwrap(),
);
if !options.no_mouse {
let _ = term.enable_mouse_support();
}
//------------------------------------------------------------------------------
// input
let mut input = input::Input::new();
input.parse_keymaps(&options.bind);
input.parse_expect_keys(options.expect.as_deref());
let tx_clone = tx.clone();
let term_clone = term.clone();
let input_thread = thread::spawn(move || loop {
if let Ok(key) = term_clone.poll_event() {
if key == TermEvent::User(()) {
break;
}
let (key, action_chain) = input.translate_event(key);
for event in action_chain.into_iter() {
let _ = tx_clone.send((key, event));
}
}
});
//------------------------------------------------------------------------------
// reader
let reader = Reader::with_options(options).source(source);
//------------------------------------------------------------------------------
// model + previewer
let mut model = Model::new(rx, tx, reader, term.clone(), options);
let ret = model.start();
let _ = term.send_event(TermEvent::User(())); // interrupt the input thread
let _ = input_thread.join();
ret
}
// 10 -> TermHeight::Fixed(10)
// 10% -> TermHeight::Percent(10)
fn parse_height_string(string: &str) -> TermHeight {
if string.ends_with('%') {
TermHeight::Percent(string[0..string.len() - 1].parse().unwrap_or(100))
} else {
TermHeight::Fixed(string.parse().unwrap_or(0))
}
}
}
| {
Cow::Borrowed(self.as_ref())
} | identifier_body |
lib.rs | #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use std::any::Any;
use std::borrow::Cow;
use std::fmt::Display;
use std::sync::mpsc::channel;
use std::sync::Arc;
use std::thread;
use crossbeam::channel::{Receiver, Sender};
use tuikit::prelude::{Event as TermEvent, *};
pub use crate::ansi::AnsiString;
pub use crate::engine::fuzzy::FuzzyAlgorithm;
use crate::event::{EventReceiver, EventSender};
use crate::model::Model;
pub use crate::options::SkimOptions;
pub use crate::output::SkimOutput;
use crate::reader::Reader;
mod ansi;
mod engine;
mod event;
pub mod field;
mod global;
mod header;
mod helper;
mod input;
mod item;
mod matcher;
mod model;
mod options;
mod orderedvec;
mod output;
pub mod prelude;
mod previewer;
mod query;
mod reader;
mod selection;
mod spinlock;
mod theme;
mod util;
//------------------------------------------------------------------------------
pub trait AsAny {
fn as_any(&self) -> &dyn Any;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl<T: Any> AsAny for T {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
/// A `SkimItem` defines what's been processed(fetched, matched, previewed and returned) by skim
///
/// # Downcast Example
/// Skim will return the item back, but in `Arc<dyn SkimItem>` form. We might want a reference
/// to the concrete type instead of trait object. Skim provide a somehow "complicated" way to
/// `downcast` it back to the reference of the original concrete type.
///
/// ```rust
/// use skim::prelude::*;
///
/// struct MyItem {}
/// impl SkimItem for MyItem {
/// fn text(&self) -> Cow<str> {
/// unimplemented!()
/// }
/// }
///
/// impl MyItem {
/// pub fn mutable(&mut self) -> i32 {
/// 1
/// }
///
/// pub fn immutable(&self) -> i32 {
/// 0
/// }
/// }
///
/// let mut ret: Arc<dyn SkimItem> = Arc::new(MyItem{});
/// let mutable: &mut MyItem = Arc::get_mut(&mut ret)
/// .expect("item is referenced by others")
/// .as_any_mut() // cast to Any
/// .downcast_mut::<MyItem>() // downcast to (mut) concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(mutable.mutable(), 1);
///
/// let immutable: &MyItem = (*ret).as_any() // cast to Any
/// .downcast_ref::<MyItem>() // downcast to concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(immutable.immutable(), 0)
/// ```
pub trait SkimItem: AsAny + Send + Sync + 'static {
/// The string to be used for matching (without color)
fn text(&self) -> Cow<str>;
/// The content to be displayed on the item list, could contain ANSI properties
fn display<'a>(&'a self, context: DisplayContext<'a>) -> AnsiString<'a> {
AnsiString::from(context)
}
/// Custom preview content, default to `ItemPreview::Global` which will use global preview
/// setting(i.e. the command set by `preview` option)
fn preview(&self, _context: PreviewContext) -> ItemPreview {
ItemPreview::Global
}
| /// want, you could still use `downcast` to retain the pointer to the original struct.
fn output(&self) -> Cow<str> {
self.text()
}
/// we could limit the matching ranges of the `get_text` of the item.
/// providing (start_byte, end_byte) of the range
fn get_matching_ranges(&self) -> Option<&[(usize, usize)]> {
None
}
}
//------------------------------------------------------------------------------
// Implement SkimItem for raw strings
impl<T: AsRef<str> + Send + Sync + 'static> SkimItem for T {
fn text(&self) -> Cow<str> {
Cow::Borrowed(self.as_ref())
}
}
//------------------------------------------------------------------------------
// Display Context
pub enum Matches<'a> {
None,
CharIndices(&'a [usize]),
CharRange(usize, usize),
ByteRange(usize, usize),
}
pub struct DisplayContext<'a> {
pub text: &'a str,
pub score: i32,
pub matches: Matches<'a>,
pub container_width: usize,
pub highlight_attr: Attr,
}
impl<'a> From<DisplayContext<'a>> for AnsiString<'a> {
fn from(context: DisplayContext<'a>) -> Self {
match context.matches {
Matches::CharIndices(indices) => AnsiString::from((context.text, indices, context.highlight_attr)),
Matches::CharRange(start, end) => {
AnsiString::new_str(context.text, vec![(context.highlight_attr, (start as u32, end as u32))])
}
Matches::ByteRange(start, end) => {
let ch_start = context.text[..start].chars().count();
let ch_end = ch_start + context.text[start..end].chars().count();
AnsiString::new_str(
context.text,
vec![(context.highlight_attr, (ch_start as u32, ch_end as u32))],
)
}
Matches::None => AnsiString::new_str(context.text, vec![]),
}
}
}
//------------------------------------------------------------------------------
// Preview Context
pub struct PreviewContext<'a> {
pub query: &'a str,
pub cmd_query: &'a str,
pub width: usize,
pub height: usize,
pub current_index: usize,
pub current_selection: &'a str,
/// selected item indices (may or may not include current item)
pub selected_indices: &'a [usize],
/// selected item texts (may or may not include current item)
pub selections: &'a [&'a str],
}
//------------------------------------------------------------------------------
// Preview
#[derive(Default, Copy, Clone, Debug)]
pub struct PreviewPosition {
pub h_scroll: Size,
pub h_offset: Size,
pub v_scroll: Size,
pub v_offset: Size,
}
pub enum ItemPreview {
/// execute the command and print the command's output
Command(String),
/// Display the prepared text(lines)
Text(String),
/// Display the colored text(lines)
AnsiText(String),
CommandWithPos(String, PreviewPosition),
TextWithPos(String, PreviewPosition),
AnsiWithPos(String, PreviewPosition),
/// Use global command settings to preview the item
Global,
}
//==============================================================================
// A match engine will execute the matching algorithm
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum CaseMatching {
Respect,
Ignore,
Smart,
}
impl Default for CaseMatching {
fn default() -> Self {
CaseMatching::Smart
}
}
#[derive(PartialEq, Eq, Clone, Debug)]
#[allow(dead_code)]
pub enum MatchRange {
ByteRange(usize, usize),
// range of bytes
Chars(Vec<usize>), // individual character indices matched
}
pub type Rank = [i32; 4];
#[derive(Clone)]
pub struct MatchResult {
pub rank: Rank,
pub matched_range: MatchRange,
}
impl MatchResult {
pub fn range_char_indices(&self, text: &str) -> Vec<usize> {
match &self.matched_range {
&MatchRange::ByteRange(start, end) => {
let first = text[..start].chars().count();
let last = first + text[start..end].chars().count();
(first..last).collect()
}
MatchRange::Chars(vec) => vec.clone(),
}
}
}
pub trait MatchEngine: Sync + Send + Display {
fn match_item(&self, item: Arc<dyn SkimItem>) -> Option<MatchResult>;
}
pub trait MatchEngineFactory {
fn create_engine_with_case(&self, query: &str, case: CaseMatching) -> Box<dyn MatchEngine>;
fn create_engine(&self, query: &str) -> Box<dyn MatchEngine> {
self.create_engine_with_case(query, CaseMatching::default())
}
}
//------------------------------------------------------------------------------
// Preselection
/// A selector that determines whether an item should be "pre-selected" in multi-selection mode
pub trait Selector {
fn should_select(&self, index: usize, item: &dyn SkimItem) -> bool;
}
//------------------------------------------------------------------------------
pub type SkimItemSender = Sender<Arc<dyn SkimItem>>;
pub type SkimItemReceiver = Receiver<Arc<dyn SkimItem>>;
pub struct Skim {}
impl Skim {
/// params:
/// - options: the "complex" options that control how skim behaves
/// - source: a stream of items to be passed to skim for filtering.
/// If None is given, skim will invoke the command given to fetch the items.
///
/// return:
/// - None: on internal errors.
/// - SkimOutput: the collected key, event, query, selected items, etc.
pub fn run_with(options: &SkimOptions, source: Option<SkimItemReceiver>) -> Option<SkimOutput> {
let min_height = options
.min_height
.map(Skim::parse_height_string)
.expect("min_height should have default values");
let height = options
.height
.map(Skim::parse_height_string)
.expect("height should have default values");
let (tx, rx): (EventSender, EventReceiver) = channel();
let term = Arc::new(
Term::with_options(
TermOptions::default()
.min_height(min_height)
.height(height)
.clear_on_exit(!options.no_clear)
.disable_alternate_screen(options.no_clear_start)
.clear_on_start(!options.no_clear_start)
.hold(options.select1 || options.exit0 || options.sync),
)
.unwrap(),
);
if !options.no_mouse {
let _ = term.enable_mouse_support();
}
//------------------------------------------------------------------------------
// input
let mut input = input::Input::new();
input.parse_keymaps(&options.bind);
input.parse_expect_keys(options.expect.as_deref());
let tx_clone = tx.clone();
let term_clone = term.clone();
let input_thread = thread::spawn(move || loop {
if let Ok(key) = term_clone.poll_event() {
if key == TermEvent::User(()) {
break;
}
let (key, action_chain) = input.translate_event(key);
for event in action_chain.into_iter() {
let _ = tx_clone.send((key, event));
}
}
});
//------------------------------------------------------------------------------
// reader
let reader = Reader::with_options(options).source(source);
//------------------------------------------------------------------------------
// model + previewer
let mut model = Model::new(rx, tx, reader, term.clone(), options);
let ret = model.start();
let _ = term.send_event(TermEvent::User(())); // interrupt the input thread
let _ = input_thread.join();
ret
}
// 10 -> TermHeight::Fixed(10)
// 10% -> TermHeight::Percent(10)
fn parse_height_string(string: &str) -> TermHeight {
if string.ends_with('%') {
TermHeight::Percent(string[0..string.len() - 1].parse().unwrap_or(100))
} else {
TermHeight::Fixed(string.parse().unwrap_or(0))
}
}
} | /// Get output text(after accept), default to `text()`
/// Note that this function is intended to be used by the caller of skim and will not be used by
/// skim. And since skim will return the item back in `SkimOutput`, if string is not what you | random_line_split |
lib.rs | #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
use std::any::Any;
use std::borrow::Cow;
use std::fmt::Display;
use std::sync::mpsc::channel;
use std::sync::Arc;
use std::thread;
use crossbeam::channel::{Receiver, Sender};
use tuikit::prelude::{Event as TermEvent, *};
pub use crate::ansi::AnsiString;
pub use crate::engine::fuzzy::FuzzyAlgorithm;
use crate::event::{EventReceiver, EventSender};
use crate::model::Model;
pub use crate::options::SkimOptions;
pub use crate::output::SkimOutput;
use crate::reader::Reader;
mod ansi;
mod engine;
mod event;
pub mod field;
mod global;
mod header;
mod helper;
mod input;
mod item;
mod matcher;
mod model;
mod options;
mod orderedvec;
mod output;
pub mod prelude;
mod previewer;
mod query;
mod reader;
mod selection;
mod spinlock;
mod theme;
mod util;
//------------------------------------------------------------------------------
pub trait AsAny {
fn as_any(&self) -> &dyn Any;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
impl<T: Any> AsAny for T {
fn as_any(&self) -> &dyn Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn Any {
self
}
}
/// A `SkimItem` defines what's been processed(fetched, matched, previewed and returned) by skim
///
/// # Downcast Example
/// Skim will return the item back, but in `Arc<dyn SkimItem>` form. We might want a reference
/// to the concrete type instead of trait object. Skim provide a somehow "complicated" way to
/// `downcast` it back to the reference of the original concrete type.
///
/// ```rust
/// use skim::prelude::*;
///
/// struct MyItem {}
/// impl SkimItem for MyItem {
/// fn text(&self) -> Cow<str> {
/// unimplemented!()
/// }
/// }
///
/// impl MyItem {
/// pub fn mutable(&mut self) -> i32 {
/// 1
/// }
///
/// pub fn immutable(&self) -> i32 {
/// 0
/// }
/// }
///
/// let mut ret: Arc<dyn SkimItem> = Arc::new(MyItem{});
/// let mutable: &mut MyItem = Arc::get_mut(&mut ret)
/// .expect("item is referenced by others")
/// .as_any_mut() // cast to Any
/// .downcast_mut::<MyItem>() // downcast to (mut) concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(mutable.mutable(), 1);
///
/// let immutable: &MyItem = (*ret).as_any() // cast to Any
/// .downcast_ref::<MyItem>() // downcast to concrete type
/// .expect("something wrong with downcast");
/// assert_eq!(immutable.immutable(), 0)
/// ```
pub trait SkimItem: AsAny + Send + Sync + 'static {
/// The string to be used for matching (without color)
fn text(&self) -> Cow<str>;
/// The content to be displayed on the item list, could contain ANSI properties
fn display<'a>(&'a self, context: DisplayContext<'a>) -> AnsiString<'a> {
AnsiString::from(context)
}
/// Custom preview content, default to `ItemPreview::Global` which will use global preview
/// setting(i.e. the command set by `preview` option)
fn preview(&self, _context: PreviewContext) -> ItemPreview {
ItemPreview::Global
}
/// Get output text(after accept), default to `text()`
/// Note that this function is intended to be used by the caller of skim and will not be used by
/// skim. And since skim will return the item back in `SkimOutput`, if string is not what you
/// want, you could still use `downcast` to retain the pointer to the original struct.
fn output(&self) -> Cow<str> {
self.text()
}
/// we could limit the matching ranges of the `get_text` of the item.
/// providing (start_byte, end_byte) of the range
fn get_matching_ranges(&self) -> Option<&[(usize, usize)]> {
None
}
}
//------------------------------------------------------------------------------
// Implement SkimItem for raw strings
impl<T: AsRef<str> + Send + Sync + 'static> SkimItem for T {
fn text(&self) -> Cow<str> {
Cow::Borrowed(self.as_ref())
}
}
//------------------------------------------------------------------------------
// Display Context
pub enum Matches<'a> {
None,
CharIndices(&'a [usize]),
CharRange(usize, usize),
ByteRange(usize, usize),
}
pub struct DisplayContext<'a> {
pub text: &'a str,
pub score: i32,
pub matches: Matches<'a>,
pub container_width: usize,
pub highlight_attr: Attr,
}
impl<'a> From<DisplayContext<'a>> for AnsiString<'a> {
fn from(context: DisplayContext<'a>) -> Self {
match context.matches {
Matches::CharIndices(indices) => AnsiString::from((context.text, indices, context.highlight_attr)),
Matches::CharRange(start, end) => {
AnsiString::new_str(context.text, vec![(context.highlight_attr, (start as u32, end as u32))])
}
Matches::ByteRange(start, end) => {
let ch_start = context.text[..start].chars().count();
let ch_end = ch_start + context.text[start..end].chars().count();
AnsiString::new_str(
context.text,
vec![(context.highlight_attr, (ch_start as u32, ch_end as u32))],
)
}
Matches::None => AnsiString::new_str(context.text, vec![]),
}
}
}
//------------------------------------------------------------------------------
// Preview Context
pub struct PreviewContext<'a> {
pub query: &'a str,
pub cmd_query: &'a str,
pub width: usize,
pub height: usize,
pub current_index: usize,
pub current_selection: &'a str,
/// selected item indices (may or may not include current item)
pub selected_indices: &'a [usize],
/// selected item texts (may or may not include current item)
pub selections: &'a [&'a str],
}
//------------------------------------------------------------------------------
// Preview
#[derive(Default, Copy, Clone, Debug)]
pub struct PreviewPosition {
pub h_scroll: Size,
pub h_offset: Size,
pub v_scroll: Size,
pub v_offset: Size,
}
pub enum ItemPreview {
/// execute the command and print the command's output
Command(String),
/// Display the prepared text(lines)
Text(String),
/// Display the colored text(lines)
AnsiText(String),
CommandWithPos(String, PreviewPosition),
TextWithPos(String, PreviewPosition),
AnsiWithPos(String, PreviewPosition),
/// Use global command settings to preview the item
Global,
}
//==============================================================================
// A match engine will execute the matching algorithm
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum CaseMatching {
Respect,
Ignore,
Smart,
}
impl Default for CaseMatching {
fn | () -> Self {
CaseMatching::Smart
}
}
#[derive(PartialEq, Eq, Clone, Debug)]
#[allow(dead_code)]
pub enum MatchRange {
ByteRange(usize, usize),
// range of bytes
Chars(Vec<usize>), // individual character indices matched
}
pub type Rank = [i32; 4];
#[derive(Clone)]
pub struct MatchResult {
pub rank: Rank,
pub matched_range: MatchRange,
}
impl MatchResult {
pub fn range_char_indices(&self, text: &str) -> Vec<usize> {
match &self.matched_range {
&MatchRange::ByteRange(start, end) => {
let first = text[..start].chars().count();
let last = first + text[start..end].chars().count();
(first..last).collect()
}
MatchRange::Chars(vec) => vec.clone(),
}
}
}
pub trait MatchEngine: Sync + Send + Display {
fn match_item(&self, item: Arc<dyn SkimItem>) -> Option<MatchResult>;
}
pub trait MatchEngineFactory {
fn create_engine_with_case(&self, query: &str, case: CaseMatching) -> Box<dyn MatchEngine>;
fn create_engine(&self, query: &str) -> Box<dyn MatchEngine> {
self.create_engine_with_case(query, CaseMatching::default())
}
}
//------------------------------------------------------------------------------
// Preselection
/// A selector that determines whether an item should be "pre-selected" in multi-selection mode
pub trait Selector {
fn should_select(&self, index: usize, item: &dyn SkimItem) -> bool;
}
//------------------------------------------------------------------------------
pub type SkimItemSender = Sender<Arc<dyn SkimItem>>;
pub type SkimItemReceiver = Receiver<Arc<dyn SkimItem>>;
pub struct Skim {}
impl Skim {
/// params:
/// - options: the "complex" options that control how skim behaves
/// - source: a stream of items to be passed to skim for filtering.
/// If None is given, skim will invoke the command given to fetch the items.
///
/// return:
/// - None: on internal errors.
/// - SkimOutput: the collected key, event, query, selected items, etc.
pub fn run_with(options: &SkimOptions, source: Option<SkimItemReceiver>) -> Option<SkimOutput> {
let min_height = options
.min_height
.map(Skim::parse_height_string)
.expect("min_height should have default values");
let height = options
.height
.map(Skim::parse_height_string)
.expect("height should have default values");
let (tx, rx): (EventSender, EventReceiver) = channel();
let term = Arc::new(
Term::with_options(
TermOptions::default()
.min_height(min_height)
.height(height)
.clear_on_exit(!options.no_clear)
.disable_alternate_screen(options.no_clear_start)
.clear_on_start(!options.no_clear_start)
.hold(options.select1 || options.exit0 || options.sync),
)
.unwrap(),
);
if !options.no_mouse {
let _ = term.enable_mouse_support();
}
//------------------------------------------------------------------------------
// input
let mut input = input::Input::new();
input.parse_keymaps(&options.bind);
input.parse_expect_keys(options.expect.as_deref());
let tx_clone = tx.clone();
let term_clone = term.clone();
let input_thread = thread::spawn(move || loop {
if let Ok(key) = term_clone.poll_event() {
if key == TermEvent::User(()) {
break;
}
let (key, action_chain) = input.translate_event(key);
for event in action_chain.into_iter() {
let _ = tx_clone.send((key, event));
}
}
});
//------------------------------------------------------------------------------
// reader
let reader = Reader::with_options(options).source(source);
//------------------------------------------------------------------------------
// model + previewer
let mut model = Model::new(rx, tx, reader, term.clone(), options);
let ret = model.start();
let _ = term.send_event(TermEvent::User(())); // interrupt the input thread
let _ = input_thread.join();
ret
}
// 10 -> TermHeight::Fixed(10)
// 10% -> TermHeight::Percent(10)
fn parse_height_string(string: &str) -> TermHeight {
if string.ends_with('%') {
TermHeight::Percent(string[0..string.len() - 1].parse().unwrap_or(100))
} else {
TermHeight::Fixed(string.parse().unwrap_or(0))
}
}
}
| default | identifier_name |
eventEngine.py | """
Copyright (C) 2015-2016, Juniper Networks, Inc.
All rights reserved.
Authors:
jpzhao, bphillips, ajaykv
Description:
Toby Network Event Engine.
"""
# pylint: disable=locally-disabled,undefined-variable,invalid-name
import re
#import copy
import os
import sys
#import types
#import pprint
import time
import importlib
import inspect
from robot.libraries.BuiltIn import BuiltIn as RobotBuiltIn
from jnpr.toby.utils.Vars import Vars
from jnpr.toby.engines.events.event_engine_utils import elog
import jnpr.toby.engines.events.event_engine_utils as ee_utils
import jnpr.toby.engines.config.config_utils as config_utils
class eventEngine(object):
"""
Class of Toby Event Engine
- A network event is a disruption to normal network operational evironment('steady states'),
which usually causes topology/routing information changes, that requires the network
to react to the changes in order to maintain network connectivity.
- triggering an event in a testbed will test the DUTs capability to recover from the event(s).
- Generic Event engine handles all event actions in a consistent way
. standard logging for easy debugging
. arg handling
. iteration/duration/timeout/exception handling
- Extensible to add new events in a consistent style
"""
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
# get the event/check methods/functions from a register file
#self.events_registered = self.register_event()
self.events_registered = {}
self.response = ''
self.status = ''
#self.error_msg = ''
self.time_spent = None
self.src_path = None
if Vars().get_global_variable('${SUITE_SOURCE}'):
self.src_path = os.path.dirname(Vars().get_global_variable('${SUITE_SOURCE}'))
else:
self.src_path = os.getcwd()
# the built-in event yaml file are in the same location of eventEngine.
self.ee_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) #
if self.ee_path not in sys.path:
sys.path.append(self.ee_path)
def _call_keyword(self, *args, **kwargs):
'''
call Robot keyword inside event engine
'''
# TBD: if it is Toby keyword, call them directly with Python code?
my_args = []
keyword = None
if kwargs:
for key, val in kwargs.items():
if key == 'ROBOT_keyword':
keyword = val
else:
my_args.append('{}={}'.format(key, val))
if keyword is None:
elog('_call_keyword(): no keyword passed in via ROBOT_keyword')
return False
# run user picked Robot keyword
elog('debug', '====== Robot keyword {} with args: {}'.format(keyword, my_args))
res = RobotBuiltIn().run_keyword_and_return_status(keyword, *my_args)
return res
def _update_events(self, events):
'''
updagate events to ee's attribute 'events_registered'
'''
registered_events = {}
for event in events:
registered_events[event] = {}
for action in events[event]: # trigger or check
if events[event][action].get('method'):
method_name_with_path = events[event][action]['method'].strip('\'\"')
func_name = method_name_with_path
#module_name = 'jnpr.toby.engines.events.triggers'
module_name = 'triggers'
if '.' in method_name_with_path:
module_name, func_name = method_name_with_path.rsplit('.', 1)
if module_name.endswith(')'):
# dealing with a class method here
class_pkg, class_name = module_name.rsplit('.', 1)
class_name = class_name.rstrip(r'()')
class_obj = getattr(importlib.import_module(class_pkg), class_name)()
method = getattr(class_obj, func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'class_obj'], class_obj)
elif re.match(r'ROBOT:', func_name):
# A Robot keyword
# 1. any Robot keyword user defined : done
# 2. Todo: Toby keywords, pre-imported?( verify, execute_cli_.., )
# any benefit of doing that?
method = self._call_keyword
keyword = re.sub(r'ROBOT:', '', func_name).strip()
config_utils.nested_set(registered_events[event],
[action, 'type', 'ROBOT_keyword'], keyword)
else:
# a function
method = getattr(importlib.import_module(module_name), func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'function'], func_name)
config_utils.nested_set(registered_events[event], [action, 'method'], method)
if events[event][action].get('args'):
# tbd: processing tv/cv in args?
# proc_args =
config_utils.nested_set(registered_events[event], [action, 'args'],
events[event][action]['args'])
#update registered events
config_utils.nested_update(self.events_registered, registered_events)
return registered_events
def register_event(self, *args, **kwargs):
'''
register events
'''
events = {}
if not self.events_registered:
# import Event Engine BuiltIn events file
print('+++++++++++++++ builtin event file path', self.ee_path)
events = config_utils.read_yaml(\
file=self.ee_path + '/Builtin_Events.yaml')
self._update_events(events)
if kwargs.get('file'):
events.update(config_utils.read_yaml(file=kwargs['file']))
self._update_events(events)
elif args:
# expecting one arg as event name
the_event = args[0].lower().strip('\'\" ')
if ' ' in the_event:
the_event = '_'.join(the_event.split())
if not events.get(the_event):
# a new event
t.log('\n=== adding a new event: {}'.format(the_event))
events[the_event] = {}
else:
t.log('debug', 'updating existing event: ' + the_event)
event_args = ('trigger_method', 'trigger_args', 'check_method', 'check_args')
for arg_key in kwargs:
if arg_key in event_args:
key_list = arg_key.split('_')
config_utils.nested_set(events[the_event], key_list, kwargs[arg_key])
self._update_events(events)
return self.events_registered
def _get_event_functions(self, event):
'''
only 'registered events with methods, and CLI/VTY commands are accepted
so that no user defined config can 'sneak in' via event for example
'''
nevent = re.sub(r'\s+', '_', event.strip()).lower()
if self.events_registered.get(nevent):
return self.events_registered[nevent]
else:
raise Exception('cannot find this event: ' + event)
# maybe just return None
def _process_method_args(self, event, trigger_method, **kwargs):
'''
process args and find missing args of a trigger method
'''
trg_kwargs = {}
if trigger_method.get('args'):
if '**kwargs' in trigger_method['args']:
trg_kwargs.update(kwargs)
for default_targ in trigger_method.get('args'):
targ = default_targ.strip(' \'\"')
if re.match(r'\*args|\*\*kwargs', targ):
continue
tval = None
if '=' in default_targ:
matched = re.match(r'([^=]+)=([^=]+)$', targ)
targ = matched.group(1)
tval = matched.group(2)
if targ in kwargs:
trg_kwargs.update({targ: kwargs[targ]})
elif tval is not None:
trg_kwargs.update({targ: tval}) # take registered default value
else:
raise Exception('missing mandatory argument "{}" in event "{}"'.\
format(default_targ, event))
## adjust args depending on the type of method
if trigger_method['type'].get('ROBOT_keyword'):
trg_kwargs['ROBOT_keyword'] = trigger_method['type']['ROBOT_keyword']
return trg_kwargs
##### exposed keyword and high level functions
def run_event(self, event, *args, **kwargs):
"""
This is the exposed Event keyword to toby/Robot
- Note: Only take the trigger args and check args as named args
"""
if not self.events_registered:
# get the BuiltIn list of events
self.events_registered = self.register_event()
iteration = int(kwargs.get('iteration', 1))
device = kwargs.get('device', None)
#interface = kwargs.get('interface', None)
kwargs['me_object'] = ee_utils.me_object()
dev_name = ''
dev_tag = ''
if device:
dh = ee_utils.device_handle_parser(device=device)
#if dh.__dict__.get('TE') is None:
# dh.TE = {}
kwargs['dh'] = dh
dev_name = ee_utils.get_dh_name(dh)
dev_tag = ee_utils.get_dh_tag(dh)
# get all the functions related to this event
func_list = self._get_event_functions(event)
trg_kwargs = {}
if func_list['trigger'].get('args'):
trg_kwargs = self._process_method_args(event, func_list['trigger'], **kwargs)
chk_kwargs = {}
if kwargs.get('enable_check'):
if func_list.get('check') and func_list['check'].get('args'):
chk_kwargs = self._process_method_args(event, func_list['check'], **kwargs)
start_time = time.time()
elog('==== event <{}> starts:'.format(event))
# find duration/iteration.
interval = float(kwargs.get('interval', 5)) # unit second. 0.01 also works( msec)
# up+down considered one iteration
duration = kwargs.get('duration', None)
if duration is not None:
duration = float(duration)
iteration = 99999999 # duration takes control
# execute
# todo: running in parallel, (noise at back ground)
# todo: multiple events
# todo: as a seperate tool, or multi-thread, or async?
error = 0
for itr in range(iteration):
elog('== BEGIN: Event {} # {}: {}({})'.format(event, str(itr+1), dev_tag, dev_name), \
annotate=True, **kwargs)
#elog('== BEGIN: Event {} #{}: {}({})/{}'.format(event, str(itr+1), dh.tag, \
#dh.name, ifd), annotate=True, **kwargs)
#look for function first
kwargs['event_iteration'] = itr + 1
res = func_list['trigger']['method'](**trg_kwargs)
t.log('debug', 'run_event trigger returned {}'.format(str(res)))
if res is False:
error += 1
elif not self._confirm_event_state(event, check_kwargs=chk_kwargs, **kwargs):
error += 1
if iteration > 1 and itr < iteration - 1:
t.log('debug', 'wait for {} seconds before next iteration'.format(str(interval)))
time.sleep(interval)
if duration and time.time() - start_time > duration:
print('Event duration is up')
break
#if time.time() - start_time > timeout
# break
end_time = time.time()
self.time_spent = end_time - start_time
elog('==== END: Event <{0}>, took {1:.2f} seconds'.format(event, self.time_spent), \
annotate=True, **kwargs) | #raise Exception('event failed with error: ' + str(error))
elog('error', 'event failed with error: ' + str(error))
return False
return True
def _confirm_event_state(self, event, **kwargs):
'''
check to confirm event status
'''
if not kwargs.get('enable_check'):
return True
self.status = True
func_list = self._get_event_functions(event)
st_check = False
if func_list.get('check'):
check_kwargs = kwargs.get('check_kwargs', {})
# time in float means it can take millisecond
timeout = float(kwargs.get('timeout', 30))
check_interval = float(kwargs.get('check_interval', 1))
start = time.time()
while time.time() - start < timeout:
res = func_list['check']['method'](**check_kwargs)
if res:
t.log('debug', 'state confirmed')
duration = time.time() - start
st_check = duration
t.log('takes {} for {} to finish'.format(duration, event))
break
time.sleep(check_interval)
else:
elog('error', '== Check event {} status failed'.format(event))
st_check = False
else:
t.log('warn', 'No check function for {}, skip'.format(event))
st_check = True
return st_check | # return True/false or raise exception when failed??
#ret = False if error > 0 else True
if error > 0:
# Todo: an eventException to standardize error msg | random_line_split |
eventEngine.py | """
Copyright (C) 2015-2016, Juniper Networks, Inc.
All rights reserved.
Authors:
jpzhao, bphillips, ajaykv
Description:
Toby Network Event Engine.
"""
# pylint: disable=locally-disabled,undefined-variable,invalid-name
import re
#import copy
import os
import sys
#import types
#import pprint
import time
import importlib
import inspect
from robot.libraries.BuiltIn import BuiltIn as RobotBuiltIn
from jnpr.toby.utils.Vars import Vars
from jnpr.toby.engines.events.event_engine_utils import elog
import jnpr.toby.engines.events.event_engine_utils as ee_utils
import jnpr.toby.engines.config.config_utils as config_utils
class eventEngine(object):
"""
Class of Toby Event Engine
- A network event is a disruption to normal network operational evironment('steady states'),
which usually causes topology/routing information changes, that requires the network
to react to the changes in order to maintain network connectivity.
- triggering an event in a testbed will test the DUTs capability to recover from the event(s).
- Generic Event engine handles all event actions in a consistent way
. standard logging for easy debugging
. arg handling
. iteration/duration/timeout/exception handling
- Extensible to add new events in a consistent style
"""
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
# get the event/check methods/functions from a register file
#self.events_registered = self.register_event()
self.events_registered = {}
self.response = ''
self.status = ''
#self.error_msg = ''
self.time_spent = None
self.src_path = None
if Vars().get_global_variable('${SUITE_SOURCE}'):
self.src_path = os.path.dirname(Vars().get_global_variable('${SUITE_SOURCE}'))
else:
self.src_path = os.getcwd()
# the built-in event yaml file are in the same location of eventEngine.
self.ee_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) #
if self.ee_path not in sys.path:
sys.path.append(self.ee_path)
def _call_keyword(self, *args, **kwargs):
|
def _update_events(self, events):
'''
updagate events to ee's attribute 'events_registered'
'''
registered_events = {}
for event in events:
registered_events[event] = {}
for action in events[event]: # trigger or check
if events[event][action].get('method'):
method_name_with_path = events[event][action]['method'].strip('\'\"')
func_name = method_name_with_path
#module_name = 'jnpr.toby.engines.events.triggers'
module_name = 'triggers'
if '.' in method_name_with_path:
module_name, func_name = method_name_with_path.rsplit('.', 1)
if module_name.endswith(')'):
# dealing with a class method here
class_pkg, class_name = module_name.rsplit('.', 1)
class_name = class_name.rstrip(r'()')
class_obj = getattr(importlib.import_module(class_pkg), class_name)()
method = getattr(class_obj, func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'class_obj'], class_obj)
elif re.match(r'ROBOT:', func_name):
# A Robot keyword
# 1. any Robot keyword user defined : done
# 2. Todo: Toby keywords, pre-imported?( verify, execute_cli_.., )
# any benefit of doing that?
method = self._call_keyword
keyword = re.sub(r'ROBOT:', '', func_name).strip()
config_utils.nested_set(registered_events[event],
[action, 'type', 'ROBOT_keyword'], keyword)
else:
# a function
method = getattr(importlib.import_module(module_name), func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'function'], func_name)
config_utils.nested_set(registered_events[event], [action, 'method'], method)
if events[event][action].get('args'):
# tbd: processing tv/cv in args?
# proc_args =
config_utils.nested_set(registered_events[event], [action, 'args'],
events[event][action]['args'])
#update registered events
config_utils.nested_update(self.events_registered, registered_events)
return registered_events
def register_event(self, *args, **kwargs):
'''
register events
'''
events = {}
if not self.events_registered:
# import Event Engine BuiltIn events file
print('+++++++++++++++ builtin event file path', self.ee_path)
events = config_utils.read_yaml(\
file=self.ee_path + '/Builtin_Events.yaml')
self._update_events(events)
if kwargs.get('file'):
events.update(config_utils.read_yaml(file=kwargs['file']))
self._update_events(events)
elif args:
# expecting one arg as event name
the_event = args[0].lower().strip('\'\" ')
if ' ' in the_event:
the_event = '_'.join(the_event.split())
if not events.get(the_event):
# a new event
t.log('\n=== adding a new event: {}'.format(the_event))
events[the_event] = {}
else:
t.log('debug', 'updating existing event: ' + the_event)
event_args = ('trigger_method', 'trigger_args', 'check_method', 'check_args')
for arg_key in kwargs:
if arg_key in event_args:
key_list = arg_key.split('_')
config_utils.nested_set(events[the_event], key_list, kwargs[arg_key])
self._update_events(events)
return self.events_registered
def _get_event_functions(self, event):
'''
only 'registered events with methods, and CLI/VTY commands are accepted
so that no user defined config can 'sneak in' via event for example
'''
nevent = re.sub(r'\s+', '_', event.strip()).lower()
if self.events_registered.get(nevent):
return self.events_registered[nevent]
else:
raise Exception('cannot find this event: ' + event)
# maybe just return None
def _process_method_args(self, event, trigger_method, **kwargs):
'''
process args and find missing args of a trigger method
'''
trg_kwargs = {}
if trigger_method.get('args'):
if '**kwargs' in trigger_method['args']:
trg_kwargs.update(kwargs)
for default_targ in trigger_method.get('args'):
targ = default_targ.strip(' \'\"')
if re.match(r'\*args|\*\*kwargs', targ):
continue
tval = None
if '=' in default_targ:
matched = re.match(r'([^=]+)=([^=]+)$', targ)
targ = matched.group(1)
tval = matched.group(2)
if targ in kwargs:
trg_kwargs.update({targ: kwargs[targ]})
elif tval is not None:
trg_kwargs.update({targ: tval}) # take registered default value
else:
raise Exception('missing mandatory argument "{}" in event "{}"'.\
format(default_targ, event))
## adjust args depending on the type of method
if trigger_method['type'].get('ROBOT_keyword'):
trg_kwargs['ROBOT_keyword'] = trigger_method['type']['ROBOT_keyword']
return trg_kwargs
##### exposed keyword and high level functions
def run_event(self, event, *args, **kwargs):
"""
This is the exposed Event keyword to toby/Robot
- Note: Only take the trigger args and check args as named args
"""
if not self.events_registered:
# get the BuiltIn list of events
self.events_registered = self.register_event()
iteration = int(kwargs.get('iteration', 1))
device = kwargs.get('device', None)
#interface = kwargs.get('interface', None)
kwargs['me_object'] = ee_utils.me_object()
dev_name = ''
dev_tag = ''
if device:
dh = ee_utils.device_handle_parser(device=device)
#if dh.__dict__.get('TE') is None:
# dh.TE = {}
kwargs['dh'] = dh
dev_name = ee_utils.get_dh_name(dh)
dev_tag = ee_utils.get_dh_tag(dh)
# get all the functions related to this event
func_list = self._get_event_functions(event)
trg_kwargs = {}
if func_list['trigger'].get('args'):
trg_kwargs = self._process_method_args(event, func_list['trigger'], **kwargs)
chk_kwargs = {}
if kwargs.get('enable_check'):
if func_list.get('check') and func_list['check'].get('args'):
chk_kwargs = self._process_method_args(event, func_list['check'], **kwargs)
start_time = time.time()
elog('==== event <{}> starts:'.format(event))
# find duration/iteration.
interval = float(kwargs.get('interval', 5)) # unit second. 0.01 also works( msec)
# up+down considered one iteration
duration = kwargs.get('duration', None)
if duration is not None:
duration = float(duration)
iteration = 99999999 # duration takes control
# execute
# todo: running in parallel, (noise at back ground)
# todo: multiple events
# todo: as a seperate tool, or multi-thread, or async?
error = 0
for itr in range(iteration):
elog('== BEGIN: Event {} # {}: {}({})'.format(event, str(itr+1), dev_tag, dev_name), \
annotate=True, **kwargs)
#elog('== BEGIN: Event {} #{}: {}({})/{}'.format(event, str(itr+1), dh.tag, \
#dh.name, ifd), annotate=True, **kwargs)
#look for function first
kwargs['event_iteration'] = itr + 1
res = func_list['trigger']['method'](**trg_kwargs)
t.log('debug', 'run_event trigger returned {}'.format(str(res)))
if res is False:
error += 1
elif not self._confirm_event_state(event, check_kwargs=chk_kwargs, **kwargs):
error += 1
if iteration > 1 and itr < iteration - 1:
t.log('debug', 'wait for {} seconds before next iteration'.format(str(interval)))
time.sleep(interval)
if duration and time.time() - start_time > duration:
print('Event duration is up')
break
#if time.time() - start_time > timeout
# break
end_time = time.time()
self.time_spent = end_time - start_time
elog('==== END: Event <{0}>, took {1:.2f} seconds'.format(event, self.time_spent), \
annotate=True, **kwargs)
# return True/false or raise exception when failed??
#ret = False if error > 0 else True
if error > 0:
# Todo: an eventException to standardize error msg
#raise Exception('event failed with error: ' + str(error))
elog('error', 'event failed with error: ' + str(error))
return False
return True
def _confirm_event_state(self, event, **kwargs):
'''
check to confirm event status
'''
if not kwargs.get('enable_check'):
return True
self.status = True
func_list = self._get_event_functions(event)
st_check = False
if func_list.get('check'):
check_kwargs = kwargs.get('check_kwargs', {})
# time in float means it can take millisecond
timeout = float(kwargs.get('timeout', 30))
check_interval = float(kwargs.get('check_interval', 1))
start = time.time()
while time.time() - start < timeout:
res = func_list['check']['method'](**check_kwargs)
if res:
t.log('debug', 'state confirmed')
duration = time.time() - start
st_check = duration
t.log('takes {} for {} to finish'.format(duration, event))
break
time.sleep(check_interval)
else:
elog('error', '== Check event {} status failed'.format(event))
st_check = False
else:
t.log('warn', 'No check function for {}, skip'.format(event))
st_check = True
return st_check
| '''
call Robot keyword inside event engine
'''
# TBD: if it is Toby keyword, call them directly with Python code?
my_args = []
keyword = None
if kwargs:
for key, val in kwargs.items():
if key == 'ROBOT_keyword':
keyword = val
else:
my_args.append('{}={}'.format(key, val))
if keyword is None:
elog('_call_keyword(): no keyword passed in via ROBOT_keyword')
return False
# run user picked Robot keyword
elog('debug', '====== Robot keyword {} with args: {}'.format(keyword, my_args))
res = RobotBuiltIn().run_keyword_and_return_status(keyword, *my_args)
return res | identifier_body |
eventEngine.py | """
Copyright (C) 2015-2016, Juniper Networks, Inc.
All rights reserved.
Authors:
jpzhao, bphillips, ajaykv
Description:
Toby Network Event Engine.
"""
# pylint: disable=locally-disabled,undefined-variable,invalid-name
import re
#import copy
import os
import sys
#import types
#import pprint
import time
import importlib
import inspect
from robot.libraries.BuiltIn import BuiltIn as RobotBuiltIn
from jnpr.toby.utils.Vars import Vars
from jnpr.toby.engines.events.event_engine_utils import elog
import jnpr.toby.engines.events.event_engine_utils as ee_utils
import jnpr.toby.engines.config.config_utils as config_utils
class eventEngine(object):
"""
Class of Toby Event Engine
- A network event is a disruption to normal network operational evironment('steady states'),
which usually causes topology/routing information changes, that requires the network
to react to the changes in order to maintain network connectivity.
- triggering an event in a testbed will test the DUTs capability to recover from the event(s).
- Generic Event engine handles all event actions in a consistent way
. standard logging for easy debugging
. arg handling
. iteration/duration/timeout/exception handling
- Extensible to add new events in a consistent style
"""
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
# get the event/check methods/functions from a register file
#self.events_registered = self.register_event()
self.events_registered = {}
self.response = ''
self.status = ''
#self.error_msg = ''
self.time_spent = None
self.src_path = None
if Vars().get_global_variable('${SUITE_SOURCE}'):
self.src_path = os.path.dirname(Vars().get_global_variable('${SUITE_SOURCE}'))
else:
self.src_path = os.getcwd()
# the built-in event yaml file are in the same location of eventEngine.
self.ee_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) #
if self.ee_path not in sys.path:
sys.path.append(self.ee_path)
def _call_keyword(self, *args, **kwargs):
'''
call Robot keyword inside event engine
'''
# TBD: if it is Toby keyword, call them directly with Python code?
my_args = []
keyword = None
if kwargs:
for key, val in kwargs.items():
if key == 'ROBOT_keyword':
keyword = val
else:
my_args.append('{}={}'.format(key, val))
if keyword is None:
elog('_call_keyword(): no keyword passed in via ROBOT_keyword')
return False
# run user picked Robot keyword
elog('debug', '====== Robot keyword {} with args: {}'.format(keyword, my_args))
res = RobotBuiltIn().run_keyword_and_return_status(keyword, *my_args)
return res
def _update_events(self, events):
'''
updagate events to ee's attribute 'events_registered'
'''
registered_events = {}
for event in events:
registered_events[event] = {}
for action in events[event]: # trigger or check
if events[event][action].get('method'):
method_name_with_path = events[event][action]['method'].strip('\'\"')
func_name = method_name_with_path
#module_name = 'jnpr.toby.engines.events.triggers'
module_name = 'triggers'
if '.' in method_name_with_path:
module_name, func_name = method_name_with_path.rsplit('.', 1)
if module_name.endswith(')'):
# dealing with a class method here
class_pkg, class_name = module_name.rsplit('.', 1)
class_name = class_name.rstrip(r'()')
class_obj = getattr(importlib.import_module(class_pkg), class_name)()
method = getattr(class_obj, func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'class_obj'], class_obj)
elif re.match(r'ROBOT:', func_name):
# A Robot keyword
# 1. any Robot keyword user defined : done
# 2. Todo: Toby keywords, pre-imported?( verify, execute_cli_.., )
# any benefit of doing that?
method = self._call_keyword
keyword = re.sub(r'ROBOT:', '', func_name).strip()
config_utils.nested_set(registered_events[event],
[action, 'type', 'ROBOT_keyword'], keyword)
else:
# a function
method = getattr(importlib.import_module(module_name), func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'function'], func_name)
config_utils.nested_set(registered_events[event], [action, 'method'], method)
if events[event][action].get('args'):
# tbd: processing tv/cv in args?
# proc_args =
config_utils.nested_set(registered_events[event], [action, 'args'],
events[event][action]['args'])
#update registered events
config_utils.nested_update(self.events_registered, registered_events)
return registered_events
def register_event(self, *args, **kwargs):
'''
register events
'''
events = {}
if not self.events_registered:
# import Event Engine BuiltIn events file
print('+++++++++++++++ builtin event file path', self.ee_path)
events = config_utils.read_yaml(\
file=self.ee_path + '/Builtin_Events.yaml')
self._update_events(events)
if kwargs.get('file'):
events.update(config_utils.read_yaml(file=kwargs['file']))
self._update_events(events)
elif args:
# expecting one arg as event name
the_event = args[0].lower().strip('\'\" ')
if ' ' in the_event:
the_event = '_'.join(the_event.split())
if not events.get(the_event):
# a new event
t.log('\n=== adding a new event: {}'.format(the_event))
events[the_event] = {}
else:
t.log('debug', 'updating existing event: ' + the_event)
event_args = ('trigger_method', 'trigger_args', 'check_method', 'check_args')
for arg_key in kwargs:
if arg_key in event_args:
key_list = arg_key.split('_')
config_utils.nested_set(events[the_event], key_list, kwargs[arg_key])
self._update_events(events)
return self.events_registered
def _get_event_functions(self, event):
'''
only 'registered events with methods, and CLI/VTY commands are accepted
so that no user defined config can 'sneak in' via event for example
'''
nevent = re.sub(r'\s+', '_', event.strip()).lower()
if self.events_registered.get(nevent):
return self.events_registered[nevent]
else:
raise Exception('cannot find this event: ' + event)
# maybe just return None
def _process_method_args(self, event, trigger_method, **kwargs):
'''
process args and find missing args of a trigger method
'''
trg_kwargs = {}
if trigger_method.get('args'):
if '**kwargs' in trigger_method['args']:
trg_kwargs.update(kwargs)
for default_targ in trigger_method.get('args'):
targ = default_targ.strip(' \'\"')
if re.match(r'\*args|\*\*kwargs', targ):
continue
tval = None
if '=' in default_targ:
matched = re.match(r'([^=]+)=([^=]+)$', targ)
targ = matched.group(1)
tval = matched.group(2)
if targ in kwargs:
trg_kwargs.update({targ: kwargs[targ]})
elif tval is not None:
|
else:
raise Exception('missing mandatory argument "{}" in event "{}"'.\
format(default_targ, event))
## adjust args depending on the type of method
if trigger_method['type'].get('ROBOT_keyword'):
trg_kwargs['ROBOT_keyword'] = trigger_method['type']['ROBOT_keyword']
return trg_kwargs
##### exposed keyword and high level functions
def run_event(self, event, *args, **kwargs):
"""
This is the exposed Event keyword to toby/Robot
- Note: Only take the trigger args and check args as named args
"""
if not self.events_registered:
# get the BuiltIn list of events
self.events_registered = self.register_event()
iteration = int(kwargs.get('iteration', 1))
device = kwargs.get('device', None)
#interface = kwargs.get('interface', None)
kwargs['me_object'] = ee_utils.me_object()
dev_name = ''
dev_tag = ''
if device:
dh = ee_utils.device_handle_parser(device=device)
#if dh.__dict__.get('TE') is None:
# dh.TE = {}
kwargs['dh'] = dh
dev_name = ee_utils.get_dh_name(dh)
dev_tag = ee_utils.get_dh_tag(dh)
# get all the functions related to this event
func_list = self._get_event_functions(event)
trg_kwargs = {}
if func_list['trigger'].get('args'):
trg_kwargs = self._process_method_args(event, func_list['trigger'], **kwargs)
chk_kwargs = {}
if kwargs.get('enable_check'):
if func_list.get('check') and func_list['check'].get('args'):
chk_kwargs = self._process_method_args(event, func_list['check'], **kwargs)
start_time = time.time()
elog('==== event <{}> starts:'.format(event))
# find duration/iteration.
interval = float(kwargs.get('interval', 5)) # unit second. 0.01 also works( msec)
# up+down considered one iteration
duration = kwargs.get('duration', None)
if duration is not None:
duration = float(duration)
iteration = 99999999 # duration takes control
# execute
# todo: running in parallel, (noise at back ground)
# todo: multiple events
# todo: as a seperate tool, or multi-thread, or async?
error = 0
for itr in range(iteration):
elog('== BEGIN: Event {} # {}: {}({})'.format(event, str(itr+1), dev_tag, dev_name), \
annotate=True, **kwargs)
#elog('== BEGIN: Event {} #{}: {}({})/{}'.format(event, str(itr+1), dh.tag, \
#dh.name, ifd), annotate=True, **kwargs)
#look for function first
kwargs['event_iteration'] = itr + 1
res = func_list['trigger']['method'](**trg_kwargs)
t.log('debug', 'run_event trigger returned {}'.format(str(res)))
if res is False:
error += 1
elif not self._confirm_event_state(event, check_kwargs=chk_kwargs, **kwargs):
error += 1
if iteration > 1 and itr < iteration - 1:
t.log('debug', 'wait for {} seconds before next iteration'.format(str(interval)))
time.sleep(interval)
if duration and time.time() - start_time > duration:
print('Event duration is up')
break
#if time.time() - start_time > timeout
# break
end_time = time.time()
self.time_spent = end_time - start_time
elog('==== END: Event <{0}>, took {1:.2f} seconds'.format(event, self.time_spent), \
annotate=True, **kwargs)
# return True/false or raise exception when failed??
#ret = False if error > 0 else True
if error > 0:
# Todo: an eventException to standardize error msg
#raise Exception('event failed with error: ' + str(error))
elog('error', 'event failed with error: ' + str(error))
return False
return True
def _confirm_event_state(self, event, **kwargs):
'''
check to confirm event status
'''
if not kwargs.get('enable_check'):
return True
self.status = True
func_list = self._get_event_functions(event)
st_check = False
if func_list.get('check'):
check_kwargs = kwargs.get('check_kwargs', {})
# time in float means it can take millisecond
timeout = float(kwargs.get('timeout', 30))
check_interval = float(kwargs.get('check_interval', 1))
start = time.time()
while time.time() - start < timeout:
res = func_list['check']['method'](**check_kwargs)
if res:
t.log('debug', 'state confirmed')
duration = time.time() - start
st_check = duration
t.log('takes {} for {} to finish'.format(duration, event))
break
time.sleep(check_interval)
else:
elog('error', '== Check event {} status failed'.format(event))
st_check = False
else:
t.log('warn', 'No check function for {}, skip'.format(event))
st_check = True
return st_check
| trg_kwargs.update({targ: tval}) # take registered default value | conditional_block |
eventEngine.py | """
Copyright (C) 2015-2016, Juniper Networks, Inc.
All rights reserved.
Authors:
jpzhao, bphillips, ajaykv
Description:
Toby Network Event Engine.
"""
# pylint: disable=locally-disabled,undefined-variable,invalid-name
import re
#import copy
import os
import sys
#import types
#import pprint
import time
import importlib
import inspect
from robot.libraries.BuiltIn import BuiltIn as RobotBuiltIn
from jnpr.toby.utils.Vars import Vars
from jnpr.toby.engines.events.event_engine_utils import elog
import jnpr.toby.engines.events.event_engine_utils as ee_utils
import jnpr.toby.engines.config.config_utils as config_utils
class eventEngine(object):
"""
Class of Toby Event Engine
- A network event is a disruption to normal network operational evironment('steady states'),
which usually causes topology/routing information changes, that requires the network
to react to the changes in order to maintain network connectivity.
- triggering an event in a testbed will test the DUTs capability to recover from the event(s).
- Generic Event engine handles all event actions in a consistent way
. standard logging for easy debugging
. arg handling
. iteration/duration/timeout/exception handling
- Extensible to add new events in a consistent style
"""
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
def __init__(self):
# get the event/check methods/functions from a register file
#self.events_registered = self.register_event()
self.events_registered = {}
self.response = ''
self.status = ''
#self.error_msg = ''
self.time_spent = None
self.src_path = None
if Vars().get_global_variable('${SUITE_SOURCE}'):
self.src_path = os.path.dirname(Vars().get_global_variable('${SUITE_SOURCE}'))
else:
self.src_path = os.getcwd()
# the built-in event yaml file are in the same location of eventEngine.
self.ee_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) #
if self.ee_path not in sys.path:
sys.path.append(self.ee_path)
def _call_keyword(self, *args, **kwargs):
'''
call Robot keyword inside event engine
'''
# TBD: if it is Toby keyword, call them directly with Python code?
my_args = []
keyword = None
if kwargs:
for key, val in kwargs.items():
if key == 'ROBOT_keyword':
keyword = val
else:
my_args.append('{}={}'.format(key, val))
if keyword is None:
elog('_call_keyword(): no keyword passed in via ROBOT_keyword')
return False
# run user picked Robot keyword
elog('debug', '====== Robot keyword {} with args: {}'.format(keyword, my_args))
res = RobotBuiltIn().run_keyword_and_return_status(keyword, *my_args)
return res
def _update_events(self, events):
'''
updagate events to ee's attribute 'events_registered'
'''
registered_events = {}
for event in events:
registered_events[event] = {}
for action in events[event]: # trigger or check
if events[event][action].get('method'):
method_name_with_path = events[event][action]['method'].strip('\'\"')
func_name = method_name_with_path
#module_name = 'jnpr.toby.engines.events.triggers'
module_name = 'triggers'
if '.' in method_name_with_path:
module_name, func_name = method_name_with_path.rsplit('.', 1)
if module_name.endswith(')'):
# dealing with a class method here
class_pkg, class_name = module_name.rsplit('.', 1)
class_name = class_name.rstrip(r'()')
class_obj = getattr(importlib.import_module(class_pkg), class_name)()
method = getattr(class_obj, func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'class_obj'], class_obj)
elif re.match(r'ROBOT:', func_name):
# A Robot keyword
# 1. any Robot keyword user defined : done
# 2. Todo: Toby keywords, pre-imported?( verify, execute_cli_.., )
# any benefit of doing that?
method = self._call_keyword
keyword = re.sub(r'ROBOT:', '', func_name).strip()
config_utils.nested_set(registered_events[event],
[action, 'type', 'ROBOT_keyword'], keyword)
else:
# a function
method = getattr(importlib.import_module(module_name), func_name)
config_utils.nested_set(registered_events[event],
[action, 'type', 'function'], func_name)
config_utils.nested_set(registered_events[event], [action, 'method'], method)
if events[event][action].get('args'):
# tbd: processing tv/cv in args?
# proc_args =
config_utils.nested_set(registered_events[event], [action, 'args'],
events[event][action]['args'])
#update registered events
config_utils.nested_update(self.events_registered, registered_events)
return registered_events
def register_event(self, *args, **kwargs):
'''
register events
'''
events = {}
if not self.events_registered:
# import Event Engine BuiltIn events file
print('+++++++++++++++ builtin event file path', self.ee_path)
events = config_utils.read_yaml(\
file=self.ee_path + '/Builtin_Events.yaml')
self._update_events(events)
if kwargs.get('file'):
events.update(config_utils.read_yaml(file=kwargs['file']))
self._update_events(events)
elif args:
# expecting one arg as event name
the_event = args[0].lower().strip('\'\" ')
if ' ' in the_event:
the_event = '_'.join(the_event.split())
if not events.get(the_event):
# a new event
t.log('\n=== adding a new event: {}'.format(the_event))
events[the_event] = {}
else:
t.log('debug', 'updating existing event: ' + the_event)
event_args = ('trigger_method', 'trigger_args', 'check_method', 'check_args')
for arg_key in kwargs:
if arg_key in event_args:
key_list = arg_key.split('_')
config_utils.nested_set(events[the_event], key_list, kwargs[arg_key])
self._update_events(events)
return self.events_registered
def _get_event_functions(self, event):
'''
only 'registered events with methods, and CLI/VTY commands are accepted
so that no user defined config can 'sneak in' via event for example
'''
nevent = re.sub(r'\s+', '_', event.strip()).lower()
if self.events_registered.get(nevent):
return self.events_registered[nevent]
else:
raise Exception('cannot find this event: ' + event)
# maybe just return None
def _process_method_args(self, event, trigger_method, **kwargs):
'''
process args and find missing args of a trigger method
'''
trg_kwargs = {}
if trigger_method.get('args'):
if '**kwargs' in trigger_method['args']:
trg_kwargs.update(kwargs)
for default_targ in trigger_method.get('args'):
targ = default_targ.strip(' \'\"')
if re.match(r'\*args|\*\*kwargs', targ):
continue
tval = None
if '=' in default_targ:
matched = re.match(r'([^=]+)=([^=]+)$', targ)
targ = matched.group(1)
tval = matched.group(2)
if targ in kwargs:
trg_kwargs.update({targ: kwargs[targ]})
elif tval is not None:
trg_kwargs.update({targ: tval}) # take registered default value
else:
raise Exception('missing mandatory argument "{}" in event "{}"'.\
format(default_targ, event))
## adjust args depending on the type of method
if trigger_method['type'].get('ROBOT_keyword'):
trg_kwargs['ROBOT_keyword'] = trigger_method['type']['ROBOT_keyword']
return trg_kwargs
##### exposed keyword and high level functions
def run_event(self, event, *args, **kwargs):
"""
This is the exposed Event keyword to toby/Robot
- Note: Only take the trigger args and check args as named args
"""
if not self.events_registered:
# get the BuiltIn list of events
self.events_registered = self.register_event()
iteration = int(kwargs.get('iteration', 1))
device = kwargs.get('device', None)
#interface = kwargs.get('interface', None)
kwargs['me_object'] = ee_utils.me_object()
dev_name = ''
dev_tag = ''
if device:
dh = ee_utils.device_handle_parser(device=device)
#if dh.__dict__.get('TE') is None:
# dh.TE = {}
kwargs['dh'] = dh
dev_name = ee_utils.get_dh_name(dh)
dev_tag = ee_utils.get_dh_tag(dh)
# get all the functions related to this event
func_list = self._get_event_functions(event)
trg_kwargs = {}
if func_list['trigger'].get('args'):
trg_kwargs = self._process_method_args(event, func_list['trigger'], **kwargs)
chk_kwargs = {}
if kwargs.get('enable_check'):
if func_list.get('check') and func_list['check'].get('args'):
chk_kwargs = self._process_method_args(event, func_list['check'], **kwargs)
start_time = time.time()
elog('==== event <{}> starts:'.format(event))
# find duration/iteration.
interval = float(kwargs.get('interval', 5)) # unit second. 0.01 also works( msec)
# up+down considered one iteration
duration = kwargs.get('duration', None)
if duration is not None:
duration = float(duration)
iteration = 99999999 # duration takes control
# execute
# todo: running in parallel, (noise at back ground)
# todo: multiple events
# todo: as a seperate tool, or multi-thread, or async?
error = 0
for itr in range(iteration):
elog('== BEGIN: Event {} # {}: {}({})'.format(event, str(itr+1), dev_tag, dev_name), \
annotate=True, **kwargs)
#elog('== BEGIN: Event {} #{}: {}({})/{}'.format(event, str(itr+1), dh.tag, \
#dh.name, ifd), annotate=True, **kwargs)
#look for function first
kwargs['event_iteration'] = itr + 1
res = func_list['trigger']['method'](**trg_kwargs)
t.log('debug', 'run_event trigger returned {}'.format(str(res)))
if res is False:
error += 1
elif not self._confirm_event_state(event, check_kwargs=chk_kwargs, **kwargs):
error += 1
if iteration > 1 and itr < iteration - 1:
t.log('debug', 'wait for {} seconds before next iteration'.format(str(interval)))
time.sleep(interval)
if duration and time.time() - start_time > duration:
print('Event duration is up')
break
#if time.time() - start_time > timeout
# break
end_time = time.time()
self.time_spent = end_time - start_time
elog('==== END: Event <{0}>, took {1:.2f} seconds'.format(event, self.time_spent), \
annotate=True, **kwargs)
# return True/false or raise exception when failed??
#ret = False if error > 0 else True
if error > 0:
# Todo: an eventException to standardize error msg
#raise Exception('event failed with error: ' + str(error))
elog('error', 'event failed with error: ' + str(error))
return False
return True
def | (self, event, **kwargs):
'''
check to confirm event status
'''
if not kwargs.get('enable_check'):
return True
self.status = True
func_list = self._get_event_functions(event)
st_check = False
if func_list.get('check'):
check_kwargs = kwargs.get('check_kwargs', {})
# time in float means it can take millisecond
timeout = float(kwargs.get('timeout', 30))
check_interval = float(kwargs.get('check_interval', 1))
start = time.time()
while time.time() - start < timeout:
res = func_list['check']['method'](**check_kwargs)
if res:
t.log('debug', 'state confirmed')
duration = time.time() - start
st_check = duration
t.log('takes {} for {} to finish'.format(duration, event))
break
time.sleep(check_interval)
else:
elog('error', '== Check event {} status failed'.format(event))
st_check = False
else:
t.log('warn', 'No check function for {}, skip'.format(event))
st_check = True
return st_check
| _confirm_event_state | identifier_name |
dynamic_store.rs | // Copyright 2017 Amagicom AB.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Bindings to [`SCDynamicStore`].
//!
//! See the examples directory for examples how to use this module.
//!
//! [`SCDynamicStore`]: https://developer.apple.com/documentation/systemconfiguration/scdynamicstore?language=objc
use crate::sys::{
dynamic_store::{
kSCDynamicStoreUseSessionKeys, SCDynamicStoreCallBack, SCDynamicStoreContext,
SCDynamicStoreCopyKeyList, SCDynamicStoreCopyValue, SCDynamicStoreCreateRunLoopSource,
SCDynamicStoreCreateWithOptions, SCDynamicStoreGetTypeID, SCDynamicStoreRef,
SCDynamicStoreRemoveValue, SCDynamicStoreSetNotificationKeys, SCDynamicStoreSetValue,
},
dynamic_store_copy_specific::SCDynamicStoreCopyProxies,
};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{kCFAllocatorDefault, CFType, TCFType},
boolean::CFBoolean,
dictionary::CFDictionary,
propertylist::{CFPropertyList, CFPropertyListSubClass},
runloop::CFRunLoopSource,
string::CFString,
};
use std::{ffi::c_void, ptr};
/// Struct describing the callback happening when a watched value in the dynamic store is changed.
pub struct SCDynamicStoreCallBackContext<T> {
/// The callback function that will be called when a watched value in the dynamic store is
/// changed.
pub callout: SCDynamicStoreCallBackT<T>,
/// The argument passed to each `callout` call. Can be used to keep state between
/// callbacks.
pub info: T,
}
/// Signature for callback functions getting called when a watched value in the dynamic store is
/// changed.
///
/// This is the safe callback definition, abstracting over the lower level `SCDynamicStoreCallBack`
/// from the `system-configuration-sys` crate.
pub type SCDynamicStoreCallBackT<T> =
fn(store: SCDynamicStore, changed_keys: CFArray<CFString>, info: &mut T);
/// Builder for [`SCDynamicStore`] sessions.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub struct SCDynamicStoreBuilder<T> {
name: CFString,
session_keys: bool,
callback_context: Option<SCDynamicStoreCallBackContext<T>>,
}
impl SCDynamicStoreBuilder<()> {
/// Creates a new builder. `name` is used as the name parameter when creating the
/// [`SCDynamicStore`] session.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub fn new<S: Into<CFString>>(name: S) -> Self {
SCDynamicStoreBuilder {
name: name.into(),
session_keys: false,
callback_context: None,
}
}
}
impl<T> SCDynamicStoreBuilder<T> {
/// Set wether or not the created [`SCDynamicStore`] should have session keys or not.
/// See [`SCDynamicStoreCreateWithOptions`] for details.
///
/// Defaults to `false`.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
/// [`SCDynamicStoreCreateWithOptions`]: https://developer.apple.com/documentation/systemconfiguration/1437818-scdynamicstorecreatewithoptions?language=objc
pub fn session_keys(mut self, session_keys: bool) -> Self {
self.session_keys = session_keys;
self
}
/// Set a callback context (callback function and data to pass to each callback call).
///
/// Defaults to having callbacks disabled.
pub fn callback_context<T2>(
self,
callback_context: SCDynamicStoreCallBackContext<T2>,
) -> SCDynamicStoreBuilder<T2> {
SCDynamicStoreBuilder {
name: self.name,
session_keys: self.session_keys,
callback_context: Some(callback_context),
}
}
/// Create the dynamic store session.
pub fn build(mut self) -> SCDynamicStore {
let store_options = self.create_store_options();
if let Some(callback_context) = self.callback_context.take() {
SCDynamicStore::create(
&self.name,
&store_options,
Some(convert_callback::<T>),
&mut self.create_context(callback_context),
)
} else {
SCDynamicStore::create(&self.name, &store_options, None, ptr::null_mut())
}
}
fn create_store_options(&self) -> CFDictionary {
let key = unsafe { CFString::wrap_under_create_rule(kSCDynamicStoreUseSessionKeys) };
let value = CFBoolean::from(self.session_keys);
let typed_dict = CFDictionary::from_CFType_pairs(&[(key, value)]);
unsafe { CFDictionary::wrap_under_get_rule(typed_dict.as_concrete_TypeRef()) }
}
fn create_context(
&self,
callback_context: SCDynamicStoreCallBackContext<T>,
) -> SCDynamicStoreContext {
// move the callback context struct to the heap and "forget" it.
// It will later be brought back into the Rust typesystem and freed in
// `release_callback_context`
let info_ptr = Box::into_raw(Box::new(callback_context));
SCDynamicStoreContext {
version: 0,
info: info_ptr as *mut _ as *mut c_void,
retain: None,
release: Some(release_callback_context::<T>),
copyDescription: None,
}
}
}
declare_TCFType! {
/// Access to the key-value pairs in the dynamic store of a running system.
///
/// Use the [`SCDynamicStoreBuilder`] to create instances of this.
///
/// [`SCDynamicStoreBuilder`]: struct.SCDynamicStoreBuilder.html
SCDynamicStore, SCDynamicStoreRef
}
impl_TCFType!(SCDynamicStore, SCDynamicStoreRef, SCDynamicStoreGetTypeID);
impl SCDynamicStore {
/// Creates a new session used to interact with the dynamic store maintained by the System
/// Configuration server.
fn create(
name: &CFString,
store_options: &CFDictionary,
callout: SCDynamicStoreCallBack,
context: *mut SCDynamicStoreContext,
) -> Self {
unsafe {
let store = SCDynamicStoreCreateWithOptions(
kCFAllocatorDefault,
name.as_concrete_TypeRef(),
store_options.as_concrete_TypeRef(),
callout,
context,
);
SCDynamicStore::wrap_under_create_rule(store)
}
}
/// Returns the keys that represent the current dynamic store entries that match the specified
/// pattern. Or `None` if an error occured.
///
/// `pattern` - A regular expression pattern used to match the dynamic store keys.
pub fn get_keys<S: Into<CFString>>(&self, pattern: S) -> Option<CFArray<CFString>> {
let cf_pattern = pattern.into();
unsafe {
let array_ref = SCDynamicStoreCopyKeyList(
self.as_concrete_TypeRef(),
cf_pattern.as_concrete_TypeRef(),
);
if !array_ref.is_null() {
Some(CFArray::wrap_under_create_rule(array_ref))
} else {
None
}
}
}
/// Returns the key-value pairs that represent the current internet proxy settings. Or `None` if
/// no proxy settings have been defined or if an error occured.
pub fn get_proxies(&self) -> Option<CFDictionary<CFString, CFType>> {
unsafe {
let dictionary_ref = SCDynamicStoreCopyProxies(self.as_concrete_TypeRef());
if !dictionary_ref.is_null() {
Some(CFDictionary::wrap_under_create_rule(dictionary_ref))
} else {
None
}
}
}
/// If the given key exists in the store, the associated value is returned.
///
/// Use `CFPropertyList::downcast_into` to cast the result into the correct type.
pub fn get<S: Into<CFString>>(&self, key: S) -> Option<CFPropertyList> {
let cf_key = key.into();
unsafe {
let dict_ref =
SCDynamicStoreCopyValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef());
if !dict_ref.is_null() {
Some(CFPropertyList::wrap_under_create_rule(dict_ref))
} else |
}
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set<S: Into<CFString>, V: CFPropertyListSubClass>(&self, key: S, value: V) -> bool {
self.set_raw(key, &value.into_CFPropertyList())
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set_raw<S: Into<CFString>>(&self, key: S, value: &CFPropertyList) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreSetValue(
self.as_concrete_TypeRef(),
cf_key.as_concrete_TypeRef(),
value.as_concrete_TypeRef(),
)
};
success != 0
}
/// Removes the value of the specified key from the dynamic store.
pub fn remove<S: Into<CFString>>(&self, key: S) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreRemoveValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef())
};
success != 0
}
/// Specifies a set of keys and key patterns that should be monitored for changes.
pub fn set_notification_keys<T1, T2>(
&self,
keys: &CFArray<T1>,
patterns: &CFArray<T2>,
) -> bool {
let success = unsafe {
SCDynamicStoreSetNotificationKeys(
self.as_concrete_TypeRef(),
keys.as_concrete_TypeRef(),
patterns.as_concrete_TypeRef(),
)
};
success != 0
}
/// Creates a run loop source object that can be added to the application's run loop.
pub fn create_run_loop_source(&self) -> CFRunLoopSource {
unsafe {
let run_loop_source_ref = SCDynamicStoreCreateRunLoopSource(
kCFAllocatorDefault,
self.as_concrete_TypeRef(),
0,
);
CFRunLoopSource::wrap_under_create_rule(run_loop_source_ref)
}
}
}
/// The raw callback used by the safe `SCDynamicStore` to convert from the `SCDynamicStoreCallBack`
/// to the `SCDynamicStoreCallBackT`
unsafe extern "C" fn convert_callback<T>(
store_ref: SCDynamicStoreRef,
changed_keys_ref: CFArrayRef,
context_ptr: *mut c_void,
) {
let store = SCDynamicStore::wrap_under_get_rule(store_ref);
let changed_keys = CFArray::<CFString>::wrap_under_get_rule(changed_keys_ref);
let context = &mut *(context_ptr as *mut _ as *mut SCDynamicStoreCallBackContext<T>);
(context.callout)(store, changed_keys, &mut context.info);
}
// Release function called by core foundation on release of the dynamic store context.
unsafe extern "C" fn release_callback_context<T>(context_ptr: *const c_void) {
// Bring back the context object from raw ptr so it is correctly freed.
let _context = Box::from_raw(context_ptr as *mut SCDynamicStoreCallBackContext<T>);
}
| {
None
} | conditional_block |
dynamic_store.rs | // Copyright 2017 Amagicom AB.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Bindings to [`SCDynamicStore`].
//!
//! See the examples directory for examples how to use this module.
//!
//! [`SCDynamicStore`]: https://developer.apple.com/documentation/systemconfiguration/scdynamicstore?language=objc
use crate::sys::{
dynamic_store::{
kSCDynamicStoreUseSessionKeys, SCDynamicStoreCallBack, SCDynamicStoreContext,
SCDynamicStoreCopyKeyList, SCDynamicStoreCopyValue, SCDynamicStoreCreateRunLoopSource,
SCDynamicStoreCreateWithOptions, SCDynamicStoreGetTypeID, SCDynamicStoreRef,
SCDynamicStoreRemoveValue, SCDynamicStoreSetNotificationKeys, SCDynamicStoreSetValue,
},
dynamic_store_copy_specific::SCDynamicStoreCopyProxies,
};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{kCFAllocatorDefault, CFType, TCFType},
boolean::CFBoolean,
dictionary::CFDictionary,
propertylist::{CFPropertyList, CFPropertyListSubClass},
runloop::CFRunLoopSource,
string::CFString,
};
use std::{ffi::c_void, ptr};
/// Struct describing the callback happening when a watched value in the dynamic store is changed.
pub struct SCDynamicStoreCallBackContext<T> {
/// The callback function that will be called when a watched value in the dynamic store is
/// changed.
pub callout: SCDynamicStoreCallBackT<T>,
/// The argument passed to each `callout` call. Can be used to keep state between
/// callbacks.
pub info: T,
}
/// Signature for callback functions getting called when a watched value in the dynamic store is
/// changed.
///
/// This is the safe callback definition, abstracting over the lower level `SCDynamicStoreCallBack`
/// from the `system-configuration-sys` crate.
pub type SCDynamicStoreCallBackT<T> =
fn(store: SCDynamicStore, changed_keys: CFArray<CFString>, info: &mut T);
/// Builder for [`SCDynamicStore`] sessions.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub struct SCDynamicStoreBuilder<T> {
name: CFString,
session_keys: bool,
callback_context: Option<SCDynamicStoreCallBackContext<T>>,
}
impl SCDynamicStoreBuilder<()> {
/// Creates a new builder. `name` is used as the name parameter when creating the
/// [`SCDynamicStore`] session.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub fn new<S: Into<CFString>>(name: S) -> Self {
SCDynamicStoreBuilder {
name: name.into(),
session_keys: false,
callback_context: None,
}
}
}
impl<T> SCDynamicStoreBuilder<T> {
/// Set wether or not the created [`SCDynamicStore`] should have session keys or not.
/// See [`SCDynamicStoreCreateWithOptions`] for details.
///
/// Defaults to `false`.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
/// [`SCDynamicStoreCreateWithOptions`]: https://developer.apple.com/documentation/systemconfiguration/1437818-scdynamicstorecreatewithoptions?language=objc
pub fn session_keys(mut self, session_keys: bool) -> Self {
self.session_keys = session_keys;
self
}
/// Set a callback context (callback function and data to pass to each callback call).
///
/// Defaults to having callbacks disabled.
pub fn callback_context<T2>(
self,
callback_context: SCDynamicStoreCallBackContext<T2>,
) -> SCDynamicStoreBuilder<T2> {
SCDynamicStoreBuilder {
name: self.name,
session_keys: self.session_keys,
callback_context: Some(callback_context),
}
}
/// Create the dynamic store session.
pub fn build(mut self) -> SCDynamicStore {
let store_options = self.create_store_options();
if let Some(callback_context) = self.callback_context.take() {
SCDynamicStore::create(
&self.name,
&store_options,
Some(convert_callback::<T>),
&mut self.create_context(callback_context),
)
} else {
SCDynamicStore::create(&self.name, &store_options, None, ptr::null_mut())
}
}
fn create_store_options(&self) -> CFDictionary {
let key = unsafe { CFString::wrap_under_create_rule(kSCDynamicStoreUseSessionKeys) };
let value = CFBoolean::from(self.session_keys);
let typed_dict = CFDictionary::from_CFType_pairs(&[(key, value)]);
unsafe { CFDictionary::wrap_under_get_rule(typed_dict.as_concrete_TypeRef()) }
}
fn create_context(
&self,
callback_context: SCDynamicStoreCallBackContext<T>,
) -> SCDynamicStoreContext {
// move the callback context struct to the heap and "forget" it.
// It will later be brought back into the Rust typesystem and freed in
// `release_callback_context`
let info_ptr = Box::into_raw(Box::new(callback_context));
SCDynamicStoreContext {
version: 0,
info: info_ptr as *mut _ as *mut c_void,
retain: None,
release: Some(release_callback_context::<T>),
copyDescription: None,
}
}
}
declare_TCFType! {
/// Access to the key-value pairs in the dynamic store of a running system.
///
/// Use the [`SCDynamicStoreBuilder`] to create instances of this.
///
/// [`SCDynamicStoreBuilder`]: struct.SCDynamicStoreBuilder.html
SCDynamicStore, SCDynamicStoreRef
}
impl_TCFType!(SCDynamicStore, SCDynamicStoreRef, SCDynamicStoreGetTypeID);
impl SCDynamicStore {
/// Creates a new session used to interact with the dynamic store maintained by the System
/// Configuration server.
fn create(
name: &CFString,
store_options: &CFDictionary,
callout: SCDynamicStoreCallBack,
context: *mut SCDynamicStoreContext,
) -> Self {
unsafe {
let store = SCDynamicStoreCreateWithOptions(
kCFAllocatorDefault,
name.as_concrete_TypeRef(),
store_options.as_concrete_TypeRef(),
callout,
context,
);
SCDynamicStore::wrap_under_create_rule(store)
}
}
/// Returns the keys that represent the current dynamic store entries that match the specified
/// pattern. Or `None` if an error occured.
///
/// `pattern` - A regular expression pattern used to match the dynamic store keys.
pub fn get_keys<S: Into<CFString>>(&self, pattern: S) -> Option<CFArray<CFString>> {
let cf_pattern = pattern.into();
unsafe {
let array_ref = SCDynamicStoreCopyKeyList(
self.as_concrete_TypeRef(),
cf_pattern.as_concrete_TypeRef(),
);
if !array_ref.is_null() {
Some(CFArray::wrap_under_create_rule(array_ref))
} else {
None
}
}
}
/// Returns the key-value pairs that represent the current internet proxy settings. Or `None` if
/// no proxy settings have been defined or if an error occured.
pub fn get_proxies(&self) -> Option<CFDictionary<CFString, CFType>> {
unsafe {
let dictionary_ref = SCDynamicStoreCopyProxies(self.as_concrete_TypeRef());
if !dictionary_ref.is_null() {
Some(CFDictionary::wrap_under_create_rule(dictionary_ref))
} else {
None
}
}
}
/// If the given key exists in the store, the associated value is returned.
///
/// Use `CFPropertyList::downcast_into` to cast the result into the correct type.
pub fn get<S: Into<CFString>>(&self, key: S) -> Option<CFPropertyList> {
let cf_key = key.into();
unsafe {
let dict_ref =
SCDynamicStoreCopyValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef());
if !dict_ref.is_null() {
Some(CFPropertyList::wrap_under_create_rule(dict_ref))
} else {
None
}
}
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set<S: Into<CFString>, V: CFPropertyListSubClass>(&self, key: S, value: V) -> bool {
self.set_raw(key, &value.into_CFPropertyList())
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set_raw<S: Into<CFString>>(&self, key: S, value: &CFPropertyList) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreSetValue(
self.as_concrete_TypeRef(),
cf_key.as_concrete_TypeRef(),
value.as_concrete_TypeRef(),
)
};
success != 0
}
/// Removes the value of the specified key from the dynamic store.
pub fn remove<S: Into<CFString>>(&self, key: S) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreRemoveValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef())
};
success != 0
}
/// Specifies a set of keys and key patterns that should be monitored for changes.
pub fn set_notification_keys<T1, T2>(
&self,
keys: &CFArray<T1>,
patterns: &CFArray<T2>,
) -> bool {
let success = unsafe {
SCDynamicStoreSetNotificationKeys(
self.as_concrete_TypeRef(),
keys.as_concrete_TypeRef(),
patterns.as_concrete_TypeRef(),
)
};
success != 0
}
/// Creates a run loop source object that can be added to the application's run loop.
pub fn create_run_loop_source(&self) -> CFRunLoopSource |
}
/// The raw callback used by the safe `SCDynamicStore` to convert from the `SCDynamicStoreCallBack`
/// to the `SCDynamicStoreCallBackT`
unsafe extern "C" fn convert_callback<T>(
store_ref: SCDynamicStoreRef,
changed_keys_ref: CFArrayRef,
context_ptr: *mut c_void,
) {
let store = SCDynamicStore::wrap_under_get_rule(store_ref);
let changed_keys = CFArray::<CFString>::wrap_under_get_rule(changed_keys_ref);
let context = &mut *(context_ptr as *mut _ as *mut SCDynamicStoreCallBackContext<T>);
(context.callout)(store, changed_keys, &mut context.info);
}
// Release function called by core foundation on release of the dynamic store context.
unsafe extern "C" fn release_callback_context<T>(context_ptr: *const c_void) {
// Bring back the context object from raw ptr so it is correctly freed.
let _context = Box::from_raw(context_ptr as *mut SCDynamicStoreCallBackContext<T>);
}
| {
unsafe {
let run_loop_source_ref = SCDynamicStoreCreateRunLoopSource(
kCFAllocatorDefault,
self.as_concrete_TypeRef(),
0,
);
CFRunLoopSource::wrap_under_create_rule(run_loop_source_ref)
}
} | identifier_body |
dynamic_store.rs | // Copyright 2017 Amagicom AB.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Bindings to [`SCDynamicStore`].
//!
//! See the examples directory for examples how to use this module.
//!
//! [`SCDynamicStore`]: https://developer.apple.com/documentation/systemconfiguration/scdynamicstore?language=objc
use crate::sys::{
dynamic_store::{
kSCDynamicStoreUseSessionKeys, SCDynamicStoreCallBack, SCDynamicStoreContext,
SCDynamicStoreCopyKeyList, SCDynamicStoreCopyValue, SCDynamicStoreCreateRunLoopSource,
SCDynamicStoreCreateWithOptions, SCDynamicStoreGetTypeID, SCDynamicStoreRef,
SCDynamicStoreRemoveValue, SCDynamicStoreSetNotificationKeys, SCDynamicStoreSetValue,
},
dynamic_store_copy_specific::SCDynamicStoreCopyProxies,
};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{kCFAllocatorDefault, CFType, TCFType},
boolean::CFBoolean,
dictionary::CFDictionary,
propertylist::{CFPropertyList, CFPropertyListSubClass},
runloop::CFRunLoopSource,
string::CFString,
};
use std::{ffi::c_void, ptr};
/// Struct describing the callback happening when a watched value in the dynamic store is changed.
pub struct SCDynamicStoreCallBackContext<T> {
/// The callback function that will be called when a watched value in the dynamic store is
/// changed.
pub callout: SCDynamicStoreCallBackT<T>,
/// The argument passed to each `callout` call. Can be used to keep state between
/// callbacks.
pub info: T,
}
/// Signature for callback functions getting called when a watched value in the dynamic store is
/// changed.
///
/// This is the safe callback definition, abstracting over the lower level `SCDynamicStoreCallBack`
/// from the `system-configuration-sys` crate.
pub type SCDynamicStoreCallBackT<T> =
fn(store: SCDynamicStore, changed_keys: CFArray<CFString>, info: &mut T);
/// Builder for [`SCDynamicStore`] sessions.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub struct SCDynamicStoreBuilder<T> {
name: CFString,
session_keys: bool,
callback_context: Option<SCDynamicStoreCallBackContext<T>>,
}
impl SCDynamicStoreBuilder<()> {
/// Creates a new builder. `name` is used as the name parameter when creating the
/// [`SCDynamicStore`] session.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub fn new<S: Into<CFString>>(name: S) -> Self {
SCDynamicStoreBuilder {
name: name.into(),
session_keys: false,
callback_context: None,
}
}
}
impl<T> SCDynamicStoreBuilder<T> {
/// Set wether or not the created [`SCDynamicStore`] should have session keys or not.
/// See [`SCDynamicStoreCreateWithOptions`] for details.
///
/// Defaults to `false`.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
/// [`SCDynamicStoreCreateWithOptions`]: https://developer.apple.com/documentation/systemconfiguration/1437818-scdynamicstorecreatewithoptions?language=objc
pub fn session_keys(mut self, session_keys: bool) -> Self {
self.session_keys = session_keys;
self
}
/// Set a callback context (callback function and data to pass to each callback call).
///
/// Defaults to having callbacks disabled.
pub fn callback_context<T2>(
self,
callback_context: SCDynamicStoreCallBackContext<T2>,
) -> SCDynamicStoreBuilder<T2> {
SCDynamicStoreBuilder {
name: self.name,
session_keys: self.session_keys,
callback_context: Some(callback_context),
}
}
/// Create the dynamic store session.
pub fn build(mut self) -> SCDynamicStore {
let store_options = self.create_store_options();
if let Some(callback_context) = self.callback_context.take() {
SCDynamicStore::create(
&self.name,
&store_options,
Some(convert_callback::<T>),
&mut self.create_context(callback_context),
)
} else {
SCDynamicStore::create(&self.name, &store_options, None, ptr::null_mut())
}
}
fn create_store_options(&self) -> CFDictionary {
let key = unsafe { CFString::wrap_under_create_rule(kSCDynamicStoreUseSessionKeys) };
let value = CFBoolean::from(self.session_keys);
let typed_dict = CFDictionary::from_CFType_pairs(&[(key, value)]);
unsafe { CFDictionary::wrap_under_get_rule(typed_dict.as_concrete_TypeRef()) }
}
fn | (
&self,
callback_context: SCDynamicStoreCallBackContext<T>,
) -> SCDynamicStoreContext {
// move the callback context struct to the heap and "forget" it.
// It will later be brought back into the Rust typesystem and freed in
// `release_callback_context`
let info_ptr = Box::into_raw(Box::new(callback_context));
SCDynamicStoreContext {
version: 0,
info: info_ptr as *mut _ as *mut c_void,
retain: None,
release: Some(release_callback_context::<T>),
copyDescription: None,
}
}
}
declare_TCFType! {
/// Access to the key-value pairs in the dynamic store of a running system.
///
/// Use the [`SCDynamicStoreBuilder`] to create instances of this.
///
/// [`SCDynamicStoreBuilder`]: struct.SCDynamicStoreBuilder.html
SCDynamicStore, SCDynamicStoreRef
}
impl_TCFType!(SCDynamicStore, SCDynamicStoreRef, SCDynamicStoreGetTypeID);
impl SCDynamicStore {
/// Creates a new session used to interact with the dynamic store maintained by the System
/// Configuration server.
fn create(
name: &CFString,
store_options: &CFDictionary,
callout: SCDynamicStoreCallBack,
context: *mut SCDynamicStoreContext,
) -> Self {
unsafe {
let store = SCDynamicStoreCreateWithOptions(
kCFAllocatorDefault,
name.as_concrete_TypeRef(),
store_options.as_concrete_TypeRef(),
callout,
context,
);
SCDynamicStore::wrap_under_create_rule(store)
}
}
/// Returns the keys that represent the current dynamic store entries that match the specified
/// pattern. Or `None` if an error occured.
///
/// `pattern` - A regular expression pattern used to match the dynamic store keys.
pub fn get_keys<S: Into<CFString>>(&self, pattern: S) -> Option<CFArray<CFString>> {
let cf_pattern = pattern.into();
unsafe {
let array_ref = SCDynamicStoreCopyKeyList(
self.as_concrete_TypeRef(),
cf_pattern.as_concrete_TypeRef(),
);
if !array_ref.is_null() {
Some(CFArray::wrap_under_create_rule(array_ref))
} else {
None
}
}
}
/// Returns the key-value pairs that represent the current internet proxy settings. Or `None` if
/// no proxy settings have been defined or if an error occured.
pub fn get_proxies(&self) -> Option<CFDictionary<CFString, CFType>> {
unsafe {
let dictionary_ref = SCDynamicStoreCopyProxies(self.as_concrete_TypeRef());
if !dictionary_ref.is_null() {
Some(CFDictionary::wrap_under_create_rule(dictionary_ref))
} else {
None
}
}
}
/// If the given key exists in the store, the associated value is returned.
///
/// Use `CFPropertyList::downcast_into` to cast the result into the correct type.
pub fn get<S: Into<CFString>>(&self, key: S) -> Option<CFPropertyList> {
let cf_key = key.into();
unsafe {
let dict_ref =
SCDynamicStoreCopyValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef());
if !dict_ref.is_null() {
Some(CFPropertyList::wrap_under_create_rule(dict_ref))
} else {
None
}
}
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set<S: Into<CFString>, V: CFPropertyListSubClass>(&self, key: S, value: V) -> bool {
self.set_raw(key, &value.into_CFPropertyList())
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set_raw<S: Into<CFString>>(&self, key: S, value: &CFPropertyList) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreSetValue(
self.as_concrete_TypeRef(),
cf_key.as_concrete_TypeRef(),
value.as_concrete_TypeRef(),
)
};
success != 0
}
/// Removes the value of the specified key from the dynamic store.
pub fn remove<S: Into<CFString>>(&self, key: S) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreRemoveValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef())
};
success != 0
}
/// Specifies a set of keys and key patterns that should be monitored for changes.
pub fn set_notification_keys<T1, T2>(
&self,
keys: &CFArray<T1>,
patterns: &CFArray<T2>,
) -> bool {
let success = unsafe {
SCDynamicStoreSetNotificationKeys(
self.as_concrete_TypeRef(),
keys.as_concrete_TypeRef(),
patterns.as_concrete_TypeRef(),
)
};
success != 0
}
/// Creates a run loop source object that can be added to the application's run loop.
pub fn create_run_loop_source(&self) -> CFRunLoopSource {
unsafe {
let run_loop_source_ref = SCDynamicStoreCreateRunLoopSource(
kCFAllocatorDefault,
self.as_concrete_TypeRef(),
0,
);
CFRunLoopSource::wrap_under_create_rule(run_loop_source_ref)
}
}
}
/// The raw callback used by the safe `SCDynamicStore` to convert from the `SCDynamicStoreCallBack`
/// to the `SCDynamicStoreCallBackT`
unsafe extern "C" fn convert_callback<T>(
store_ref: SCDynamicStoreRef,
changed_keys_ref: CFArrayRef,
context_ptr: *mut c_void,
) {
let store = SCDynamicStore::wrap_under_get_rule(store_ref);
let changed_keys = CFArray::<CFString>::wrap_under_get_rule(changed_keys_ref);
let context = &mut *(context_ptr as *mut _ as *mut SCDynamicStoreCallBackContext<T>);
(context.callout)(store, changed_keys, &mut context.info);
}
// Release function called by core foundation on release of the dynamic store context.
unsafe extern "C" fn release_callback_context<T>(context_ptr: *const c_void) {
// Bring back the context object from raw ptr so it is correctly freed.
let _context = Box::from_raw(context_ptr as *mut SCDynamicStoreCallBackContext<T>);
}
| create_context | identifier_name |
dynamic_store.rs | // Copyright 2017 Amagicom AB.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Bindings to [`SCDynamicStore`].
//!
//! See the examples directory for examples how to use this module.
//!
//! [`SCDynamicStore`]: https://developer.apple.com/documentation/systemconfiguration/scdynamicstore?language=objc
use crate::sys::{
dynamic_store::{
kSCDynamicStoreUseSessionKeys, SCDynamicStoreCallBack, SCDynamicStoreContext,
SCDynamicStoreCopyKeyList, SCDynamicStoreCopyValue, SCDynamicStoreCreateRunLoopSource,
SCDynamicStoreCreateWithOptions, SCDynamicStoreGetTypeID, SCDynamicStoreRef,
SCDynamicStoreRemoveValue, SCDynamicStoreSetNotificationKeys, SCDynamicStoreSetValue,
},
dynamic_store_copy_specific::SCDynamicStoreCopyProxies,
};
use core_foundation::{
array::{CFArray, CFArrayRef},
base::{kCFAllocatorDefault, CFType, TCFType},
boolean::CFBoolean,
dictionary::CFDictionary,
propertylist::{CFPropertyList, CFPropertyListSubClass},
runloop::CFRunLoopSource,
string::CFString,
};
use std::{ffi::c_void, ptr};
/// Struct describing the callback happening when a watched value in the dynamic store is changed.
pub struct SCDynamicStoreCallBackContext<T> {
/// The callback function that will be called when a watched value in the dynamic store is
/// changed.
pub callout: SCDynamicStoreCallBackT<T>,
/// The argument passed to each `callout` call. Can be used to keep state between
/// callbacks.
pub info: T,
}
/// Signature for callback functions getting called when a watched value in the dynamic store is
/// changed.
///
/// This is the safe callback definition, abstracting over the lower level `SCDynamicStoreCallBack`
/// from the `system-configuration-sys` crate.
pub type SCDynamicStoreCallBackT<T> =
fn(store: SCDynamicStore, changed_keys: CFArray<CFString>, info: &mut T);
/// Builder for [`SCDynamicStore`] sessions.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub struct SCDynamicStoreBuilder<T> {
name: CFString,
session_keys: bool,
callback_context: Option<SCDynamicStoreCallBackContext<T>>,
}
impl SCDynamicStoreBuilder<()> {
/// Creates a new builder. `name` is used as the name parameter when creating the
/// [`SCDynamicStore`] session.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
pub fn new<S: Into<CFString>>(name: S) -> Self {
SCDynamicStoreBuilder {
name: name.into(),
session_keys: false,
callback_context: None,
}
}
}
impl<T> SCDynamicStoreBuilder<T> {
/// Set wether or not the created [`SCDynamicStore`] should have session keys or not.
/// See [`SCDynamicStoreCreateWithOptions`] for details.
///
/// Defaults to `false`.
///
/// [`SCDynamicStore`]: struct.SCDynamicStore.html
/// [`SCDynamicStoreCreateWithOptions`]: https://developer.apple.com/documentation/systemconfiguration/1437818-scdynamicstorecreatewithoptions?language=objc
pub fn session_keys(mut self, session_keys: bool) -> Self {
self.session_keys = session_keys;
self
}
/// Set a callback context (callback function and data to pass to each callback call).
///
/// Defaults to having callbacks disabled.
pub fn callback_context<T2>(
self,
callback_context: SCDynamicStoreCallBackContext<T2>,
) -> SCDynamicStoreBuilder<T2> {
SCDynamicStoreBuilder {
name: self.name,
session_keys: self.session_keys,
callback_context: Some(callback_context),
}
}
/// Create the dynamic store session.
pub fn build(mut self) -> SCDynamicStore {
let store_options = self.create_store_options();
if let Some(callback_context) = self.callback_context.take() {
SCDynamicStore::create(
&self.name,
&store_options,
Some(convert_callback::<T>),
&mut self.create_context(callback_context),
)
} else {
SCDynamicStore::create(&self.name, &store_options, None, ptr::null_mut())
}
}
fn create_store_options(&self) -> CFDictionary {
let key = unsafe { CFString::wrap_under_create_rule(kSCDynamicStoreUseSessionKeys) };
let value = CFBoolean::from(self.session_keys);
let typed_dict = CFDictionary::from_CFType_pairs(&[(key, value)]);
unsafe { CFDictionary::wrap_under_get_rule(typed_dict.as_concrete_TypeRef()) }
}
fn create_context(
&self,
callback_context: SCDynamicStoreCallBackContext<T>,
) -> SCDynamicStoreContext {
// move the callback context struct to the heap and "forget" it. | let info_ptr = Box::into_raw(Box::new(callback_context));
SCDynamicStoreContext {
version: 0,
info: info_ptr as *mut _ as *mut c_void,
retain: None,
release: Some(release_callback_context::<T>),
copyDescription: None,
}
}
}
declare_TCFType! {
/// Access to the key-value pairs in the dynamic store of a running system.
///
/// Use the [`SCDynamicStoreBuilder`] to create instances of this.
///
/// [`SCDynamicStoreBuilder`]: struct.SCDynamicStoreBuilder.html
SCDynamicStore, SCDynamicStoreRef
}
impl_TCFType!(SCDynamicStore, SCDynamicStoreRef, SCDynamicStoreGetTypeID);
impl SCDynamicStore {
/// Creates a new session used to interact with the dynamic store maintained by the System
/// Configuration server.
fn create(
name: &CFString,
store_options: &CFDictionary,
callout: SCDynamicStoreCallBack,
context: *mut SCDynamicStoreContext,
) -> Self {
unsafe {
let store = SCDynamicStoreCreateWithOptions(
kCFAllocatorDefault,
name.as_concrete_TypeRef(),
store_options.as_concrete_TypeRef(),
callout,
context,
);
SCDynamicStore::wrap_under_create_rule(store)
}
}
/// Returns the keys that represent the current dynamic store entries that match the specified
/// pattern. Or `None` if an error occured.
///
/// `pattern` - A regular expression pattern used to match the dynamic store keys.
pub fn get_keys<S: Into<CFString>>(&self, pattern: S) -> Option<CFArray<CFString>> {
let cf_pattern = pattern.into();
unsafe {
let array_ref = SCDynamicStoreCopyKeyList(
self.as_concrete_TypeRef(),
cf_pattern.as_concrete_TypeRef(),
);
if !array_ref.is_null() {
Some(CFArray::wrap_under_create_rule(array_ref))
} else {
None
}
}
}
/// Returns the key-value pairs that represent the current internet proxy settings. Or `None` if
/// no proxy settings have been defined or if an error occured.
pub fn get_proxies(&self) -> Option<CFDictionary<CFString, CFType>> {
unsafe {
let dictionary_ref = SCDynamicStoreCopyProxies(self.as_concrete_TypeRef());
if !dictionary_ref.is_null() {
Some(CFDictionary::wrap_under_create_rule(dictionary_ref))
} else {
None
}
}
}
/// If the given key exists in the store, the associated value is returned.
///
/// Use `CFPropertyList::downcast_into` to cast the result into the correct type.
pub fn get<S: Into<CFString>>(&self, key: S) -> Option<CFPropertyList> {
let cf_key = key.into();
unsafe {
let dict_ref =
SCDynamicStoreCopyValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef());
if !dict_ref.is_null() {
Some(CFPropertyList::wrap_under_create_rule(dict_ref))
} else {
None
}
}
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set<S: Into<CFString>, V: CFPropertyListSubClass>(&self, key: S, value: V) -> bool {
self.set_raw(key, &value.into_CFPropertyList())
}
/// Sets the value of the given key. Overwrites existing values.
/// Returns `true` on success, false on failure.
pub fn set_raw<S: Into<CFString>>(&self, key: S, value: &CFPropertyList) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreSetValue(
self.as_concrete_TypeRef(),
cf_key.as_concrete_TypeRef(),
value.as_concrete_TypeRef(),
)
};
success != 0
}
/// Removes the value of the specified key from the dynamic store.
pub fn remove<S: Into<CFString>>(&self, key: S) -> bool {
let cf_key = key.into();
let success = unsafe {
SCDynamicStoreRemoveValue(self.as_concrete_TypeRef(), cf_key.as_concrete_TypeRef())
};
success != 0
}
/// Specifies a set of keys and key patterns that should be monitored for changes.
pub fn set_notification_keys<T1, T2>(
&self,
keys: &CFArray<T1>,
patterns: &CFArray<T2>,
) -> bool {
let success = unsafe {
SCDynamicStoreSetNotificationKeys(
self.as_concrete_TypeRef(),
keys.as_concrete_TypeRef(),
patterns.as_concrete_TypeRef(),
)
};
success != 0
}
/// Creates a run loop source object that can be added to the application's run loop.
pub fn create_run_loop_source(&self) -> CFRunLoopSource {
unsafe {
let run_loop_source_ref = SCDynamicStoreCreateRunLoopSource(
kCFAllocatorDefault,
self.as_concrete_TypeRef(),
0,
);
CFRunLoopSource::wrap_under_create_rule(run_loop_source_ref)
}
}
}
/// The raw callback used by the safe `SCDynamicStore` to convert from the `SCDynamicStoreCallBack`
/// to the `SCDynamicStoreCallBackT`
unsafe extern "C" fn convert_callback<T>(
store_ref: SCDynamicStoreRef,
changed_keys_ref: CFArrayRef,
context_ptr: *mut c_void,
) {
let store = SCDynamicStore::wrap_under_get_rule(store_ref);
let changed_keys = CFArray::<CFString>::wrap_under_get_rule(changed_keys_ref);
let context = &mut *(context_ptr as *mut _ as *mut SCDynamicStoreCallBackContext<T>);
(context.callout)(store, changed_keys, &mut context.info);
}
// Release function called by core foundation on release of the dynamic store context.
unsafe extern "C" fn release_callback_context<T>(context_ptr: *const c_void) {
// Bring back the context object from raw ptr so it is correctly freed.
let _context = Box::from_raw(context_ptr as *mut SCDynamicStoreCallBackContext<T>);
} | // It will later be brought back into the Rust typesystem and freed in
// `release_callback_context` | random_line_split |
filter_list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Filter-list view widget
use super::{driver, Driver, ListView, SelectionError, SelectionMode};
use crate::Scrollable;
use kas::event::ChildMsg;
use kas::prelude::*;
use kas::updatable::filter::Filter;
use kas::updatable::{ListData, Updatable, UpdatableHandler};
use std::cell::RefCell;
use std::fmt::Debug;
use UpdatableHandler as UpdHandler;
/// Filter accessor over another accessor
///
/// This is an abstraction over a [`ListData`], applying a filter to items when
/// iterating and accessing.
///
/// When updating, the filter applies to the old value: if the old is included,
/// it is replaced by the new, otherwise no replacement occurs.
///
/// Note: the key and item types are the same as those in the underlying list,
/// thus one can also retrieve values from the underlying list directly.
///
/// Note: only `Rc<FilteredList<T, F>>` implements [`ListData`]; the [`Rc`]
/// wrapper is required!
///
/// Warning: this implementation is `O(n)` where `n = data.len()` and not well
/// optimised, thus is expected to be slow on large data lists.
#[derive(Clone, Debug)]
struct FilteredList<T: ListData, F: Filter<T::Item>> {
/// Direct access to unfiltered data
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
data: T,
/// Direct access to the filter
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
filter: F,
view: RefCell<Vec<T::Key>>, // TODO: does this need to be in a RefCell?
}
impl<T: ListData, F: Filter<T::Item>> FilteredList<T, F> {
/// Construct and apply filter
#[inline]
fn new(data: T, filter: F) -> Self {
let len = data.len().cast();
let view = RefCell::new(Vec::with_capacity(len));
let s = FilteredList { data, filter, view };
let _ = s.refresh();
s
}
/// Refresh the view
///
/// Re-applies the filter (`O(n)` where `n` is the number of data elements).
/// Calling this directly may be useful in case the data is modified.
///
/// An update should be triggered using the returned handle.
fn refresh(&self) -> Option<UpdateHandle> {
let mut view = self.view.borrow_mut();
view.clear();
for (key, item) in self.data.iter_vec(usize::MAX) {
if self.filter.matches(item) {
view.push(key);
}
}
self.filter.update_handle()
}
}
impl<T: ListData, F: Filter<T::Item>> Updatable for FilteredList<T, F> {
fn update_handle(&self) -> Option<UpdateHandle> {
self.filter.update_handle()
}
fn update_self(&self) -> Option<UpdateHandle> {
self.refresh()
}
}
impl<K, M, T: ListData + UpdatableHandler<K, M> + 'static, F: Filter<T::Item>>
UpdatableHandler<K, M> for FilteredList<T, F>
{
fn handle(&self, key: &K, msg: &M) -> Option<UpdateHandle> {
self.data.handle(key, msg)
}
}
impl<T: ListData + 'static, F: Filter<T::Item>> ListData for FilteredList<T, F> {
type Key = T::Key;
type Item = T::Item;
fn len(&self) -> usize {
self.view.borrow().len()
}
fn contains_key(&self, key: &Self::Key) -> bool {
self.get_cloned(key).is_some()
}
fn get_cloned(&self, key: &Self::Key) -> Option<Self::Item> {
// Check the item against our filter (probably O(1)) instead of using
// our filtered list (O(n) where n=self.len()).
self.data
.get_cloned(key)
.filter(|item| self.filter.matches(item.clone()))
}
fn update(&self, key: &Self::Key, value: Self::Item) -> Option<UpdateHandle> {
// Filtering does not affect result, but does affect the view
if self
.data
.get_cloned(key)
.map(|item| !self.filter.matches(item))
.unwrap_or(true)
{
// Not previously visible: no update occurs
return None;
}
let new_visible = self.filter.matches(value.clone());
let result = self.data.update(key, value);
if result.is_some() && !new_visible |
result
}
fn iter_vec_from(&self, start: usize, limit: usize) -> Vec<(Self::Key, Self::Item)> {
let view = self.view.borrow();
let end = self.len().min(start + limit);
if start >= end {
return Vec::new();
}
let mut v = Vec::with_capacity(end - start);
for k in &view[start..end] {
v.push((k.clone(), self.data.get_cloned(k).unwrap()));
}
v
}
}
/// Filter-list view widget
///
/// This widget is a wrapper around [`ListView`] which applies a filter to the
/// data list.
///
/// Why is a data-filter a widget and not a pure-data item, you ask? The answer
/// is that a filter-list must be updated when the filter or the data changes,
/// and, since filtering a list is not especially cheap, the filtering must be
/// cached and updated when required, not every time the list view asks for more
/// data. Although it is possible to do this with a data-view, that requires
/// machinery for recursive-updates on data-structures and/or a mechanism to
/// test whether the underlying list-data changed. Implementing as a widget
/// avoids this.
// TODO: impl Clone
#[derive(Debug, Widget)]
#[handler(handle=noauto, generics = <>)]
#[layout(single)]
#[widget(config=noauto)]
pub struct FilterListView<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg> + 'static,
F: Filter<T::Item>,
V: Driver<T::Item> = driver::Default,
> {
#[widget_core]
core: CoreData,
#[widget]
list: ListView<D, FilteredList<T, F>, V>,
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`FilterListView::new_with_direction`].
pub fn new(data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), <V as Default>::default(), data, filter)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, data: T, filter: F) -> Self {
Self::new_with_dir_driver(direction, <V as Default>::default(), data, filter)
}
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit view
pub fn new_with_driver(view: V, data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), view, data, filter)
}
}
impl<
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<Direction, T, F, V>
{
/// Set the direction of contents
pub fn set_direction(&mut self, direction: Direction) -> TkAction {
self.list.set_direction(direction)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction and view
pub fn new_with_dir_driver(direction: D, view: V, data: T, filter: F) -> Self {
let data = FilteredList::new(data, filter);
FilterListView {
core: Default::default(),
list: ListView::new_with_dir_driver(direction, view, data),
}
}
/// Access the stored data (pre-filter)
pub fn unfiltered_data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (pre-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn unfiltered_data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Access the stored data (post-filter)
pub fn data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (post-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Check whether a key has data (post-filter)
pub fn contains_key(&self, key: &T::Key) -> bool {
self.data().contains_key(key)
}
/// Get a copy of the shared value at `key` (post-filter)
pub fn get_value(&self, key: &T::Key) -> Option<T::Item> {
self.data().get_cloned(key)
}
/// Set shared data (post-filter)
///
/// This method updates the shared data, if supported (see
/// [`ListData::update`]). Other widgets sharing this data are notified
/// of the update, if data is changed.
pub fn set_value(&self, mgr: &mut Manager, key: &T::Key, data: T::Item) {
if let Some(handle) = self.data().update(key, data) {
mgr.trigger_update(handle, 0);
}
}
/// Update shared data (post-filter)
///
/// This is purely a convenience method over [`FilterListView::set_value`].
/// It does nothing if no value is found at `key`.
/// It notifies other widgets of updates to the shared data.
pub fn update_value<G: Fn(T::Item) -> T::Item>(&self, mgr: &mut Manager, key: &T::Key, f: G) {
if let Some(item) = self.get_value(key) {
self.set_value(mgr, key, f(item));
}
}
/// Get the current selection mode
pub fn selection_mode(&self) -> SelectionMode {
self.list.selection_mode()
}
/// Set the current selection mode
pub fn set_selection_mode(&mut self, mode: SelectionMode) -> TkAction {
self.list.set_selection_mode(mode)
}
/// Set the selection mode (inline)
pub fn with_selection_mode(mut self, mode: SelectionMode) -> Self {
let _ = self.set_selection_mode(mode);
self
}
/// Read the list of selected entries
///
/// With mode [`SelectionMode::Single`] this may contain zero or one entry;
/// use `selected_iter().next()` to extract only the first (optional) entry.
pub fn selected_iter(&'_ self) -> impl Iterator<Item = &'_ T::Key> + '_ {
self.list.selected_iter()
}
/// Check whether an entry is selected
pub fn is_selected(&self, key: &T::Key) -> bool {
self.list.is_selected(key)
}
/// Clear all selected items
///
/// Does not send [`ChildMsg`] responses.
pub fn clear_selected(&mut self) {
self.list.clear_selected()
}
/// Directly select an item
///
/// Returns `true` if selected, `false` if already selected.
/// Fails if selection mode does not permit selection or if the key is
/// invalid or filtered out.
///
/// Does not send [`ChildMsg`] responses.
pub fn select(&mut self, key: T::Key) -> Result<bool, SelectionError> {
self.list.select(key)
}
/// Directly deselect an item
///
/// Returns `true` if deselected, `false` if not previously selected.
/// Also returns `false` on invalid and filtered-out keys.
///
/// Does not send [`ChildMsg`] responses.
pub fn deselect(&mut self, key: &T::Key) -> bool {
self.list.deselect(key)
}
/// Manually trigger an update to handle changed data or filter
pub fn update_view(&mut self, mgr: &mut Manager) {
self.list.data().refresh();
self.list.update_view(mgr)
}
/// Get the direction of contents
pub fn direction(&self) -> Direction {
self.list.direction()
}
/// Set the preferred number of items visible (inline)
///
/// This affects the (ideal) size request and whether children are sized
/// according to their ideal or minimum size but not the minimum size.
pub fn with_num_visible(mut self, number: i32) -> Self {
self.list = self.list.with_num_visible(number);
self
}
}
// TODO: support derive(Scrollable)?
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Scrollable for FilterListView<D, T, F, V>
{
#[inline]
fn scroll_axes(&self, size: Size) -> (bool, bool) {
self.list.scroll_axes(size)
}
#[inline]
fn max_scroll_offset(&self) -> Offset {
self.list.max_scroll_offset()
}
#[inline]
fn scroll_offset(&self) -> Offset {
self.list.scroll_offset()
}
#[inline]
fn set_scroll_offset(&mut self, mgr: &mut Manager, offset: Offset) -> Offset {
self.list.set_scroll_offset(mgr, offset)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> WidgetConfig for FilterListView<D, T, F, V>
{
fn configure(&mut self, mgr: &mut Manager) {
// We must refresh the filtered list when the underlying list changes
if let Some(handle) = self.list.data().data.update_handle() {
mgr.update_on_handle(handle, self.id());
}
// As well as when the filter changes
if let Some(handle) = self.list.data().update_handle() {
mgr.update_on_handle(handle, self.id());
}
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Handler for FilterListView<D, T, F, V>
{
type Msg = ChildMsg<T::Key, <V::Widget as Handler>::Msg>;
fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> {
match event {
Event::HandleUpdate { .. } => {
self.update_view(mgr);
return Response::Update;
}
_ => Response::None,
}
}
}
| {
// remove the updated item from our filtered list
self.view.borrow_mut().retain(|item| item != key);
} | conditional_block |
filter_list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Filter-list view widget
use super::{driver, Driver, ListView, SelectionError, SelectionMode};
use crate::Scrollable;
use kas::event::ChildMsg;
use kas::prelude::*;
use kas::updatable::filter::Filter;
use kas::updatable::{ListData, Updatable, UpdatableHandler};
use std::cell::RefCell;
use std::fmt::Debug;
use UpdatableHandler as UpdHandler;
/// Filter accessor over another accessor
///
/// This is an abstraction over a [`ListData`], applying a filter to items when
/// iterating and accessing.
///
/// When updating, the filter applies to the old value: if the old is included,
/// it is replaced by the new, otherwise no replacement occurs.
///
/// Note: the key and item types are the same as those in the underlying list,
/// thus one can also retrieve values from the underlying list directly.
///
/// Note: only `Rc<FilteredList<T, F>>` implements [`ListData`]; the [`Rc`]
/// wrapper is required!
///
/// Warning: this implementation is `O(n)` where `n = data.len()` and not well
/// optimised, thus is expected to be slow on large data lists.
#[derive(Clone, Debug)]
struct FilteredList<T: ListData, F: Filter<T::Item>> {
/// Direct access to unfiltered data
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
data: T,
/// Direct access to the filter
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
filter: F,
view: RefCell<Vec<T::Key>>, // TODO: does this need to be in a RefCell?
}
impl<T: ListData, F: Filter<T::Item>> FilteredList<T, F> {
/// Construct and apply filter
#[inline]
fn new(data: T, filter: F) -> Self {
let len = data.len().cast();
let view = RefCell::new(Vec::with_capacity(len));
let s = FilteredList { data, filter, view };
let _ = s.refresh();
s
}
/// Refresh the view
///
/// Re-applies the filter (`O(n)` where `n` is the number of data elements).
/// Calling this directly may be useful in case the data is modified.
///
/// An update should be triggered using the returned handle.
fn refresh(&self) -> Option<UpdateHandle> {
let mut view = self.view.borrow_mut();
view.clear();
for (key, item) in self.data.iter_vec(usize::MAX) {
if self.filter.matches(item) {
view.push(key);
}
}
self.filter.update_handle()
}
}
impl<T: ListData, F: Filter<T::Item>> Updatable for FilteredList<T, F> {
fn update_handle(&self) -> Option<UpdateHandle> |
fn update_self(&self) -> Option<UpdateHandle> {
self.refresh()
}
}
impl<K, M, T: ListData + UpdatableHandler<K, M> + 'static, F: Filter<T::Item>>
UpdatableHandler<K, M> for FilteredList<T, F>
{
fn handle(&self, key: &K, msg: &M) -> Option<UpdateHandle> {
self.data.handle(key, msg)
}
}
impl<T: ListData + 'static, F: Filter<T::Item>> ListData for FilteredList<T, F> {
type Key = T::Key;
type Item = T::Item;
fn len(&self) -> usize {
self.view.borrow().len()
}
fn contains_key(&self, key: &Self::Key) -> bool {
self.get_cloned(key).is_some()
}
fn get_cloned(&self, key: &Self::Key) -> Option<Self::Item> {
// Check the item against our filter (probably O(1)) instead of using
// our filtered list (O(n) where n=self.len()).
self.data
.get_cloned(key)
.filter(|item| self.filter.matches(item.clone()))
}
fn update(&self, key: &Self::Key, value: Self::Item) -> Option<UpdateHandle> {
// Filtering does not affect result, but does affect the view
if self
.data
.get_cloned(key)
.map(|item| !self.filter.matches(item))
.unwrap_or(true)
{
// Not previously visible: no update occurs
return None;
}
let new_visible = self.filter.matches(value.clone());
let result = self.data.update(key, value);
if result.is_some() && !new_visible {
// remove the updated item from our filtered list
self.view.borrow_mut().retain(|item| item != key);
}
result
}
fn iter_vec_from(&self, start: usize, limit: usize) -> Vec<(Self::Key, Self::Item)> {
let view = self.view.borrow();
let end = self.len().min(start + limit);
if start >= end {
return Vec::new();
}
let mut v = Vec::with_capacity(end - start);
for k in &view[start..end] {
v.push((k.clone(), self.data.get_cloned(k).unwrap()));
}
v
}
}
/// Filter-list view widget
///
/// This widget is a wrapper around [`ListView`] which applies a filter to the
/// data list.
///
/// Why is a data-filter a widget and not a pure-data item, you ask? The answer
/// is that a filter-list must be updated when the filter or the data changes,
/// and, since filtering a list is not especially cheap, the filtering must be
/// cached and updated when required, not every time the list view asks for more
/// data. Although it is possible to do this with a data-view, that requires
/// machinery for recursive-updates on data-structures and/or a mechanism to
/// test whether the underlying list-data changed. Implementing as a widget
/// avoids this.
// TODO: impl Clone
#[derive(Debug, Widget)]
#[handler(handle=noauto, generics = <>)]
#[layout(single)]
#[widget(config=noauto)]
pub struct FilterListView<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg> + 'static,
F: Filter<T::Item>,
V: Driver<T::Item> = driver::Default,
> {
#[widget_core]
core: CoreData,
#[widget]
list: ListView<D, FilteredList<T, F>, V>,
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`FilterListView::new_with_direction`].
pub fn new(data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), <V as Default>::default(), data, filter)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, data: T, filter: F) -> Self {
Self::new_with_dir_driver(direction, <V as Default>::default(), data, filter)
}
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit view
pub fn new_with_driver(view: V, data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), view, data, filter)
}
}
impl<
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<Direction, T, F, V>
{
/// Set the direction of contents
pub fn set_direction(&mut self, direction: Direction) -> TkAction {
self.list.set_direction(direction)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction and view
pub fn new_with_dir_driver(direction: D, view: V, data: T, filter: F) -> Self {
let data = FilteredList::new(data, filter);
FilterListView {
core: Default::default(),
list: ListView::new_with_dir_driver(direction, view, data),
}
}
/// Access the stored data (pre-filter)
pub fn unfiltered_data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (pre-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn unfiltered_data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Access the stored data (post-filter)
pub fn data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (post-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Check whether a key has data (post-filter)
pub fn contains_key(&self, key: &T::Key) -> bool {
self.data().contains_key(key)
}
/// Get a copy of the shared value at `key` (post-filter)
pub fn get_value(&self, key: &T::Key) -> Option<T::Item> {
self.data().get_cloned(key)
}
/// Set shared data (post-filter)
///
/// This method updates the shared data, if supported (see
/// [`ListData::update`]). Other widgets sharing this data are notified
/// of the update, if data is changed.
pub fn set_value(&self, mgr: &mut Manager, key: &T::Key, data: T::Item) {
if let Some(handle) = self.data().update(key, data) {
mgr.trigger_update(handle, 0);
}
}
/// Update shared data (post-filter)
///
/// This is purely a convenience method over [`FilterListView::set_value`].
/// It does nothing if no value is found at `key`.
/// It notifies other widgets of updates to the shared data.
pub fn update_value<G: Fn(T::Item) -> T::Item>(&self, mgr: &mut Manager, key: &T::Key, f: G) {
if let Some(item) = self.get_value(key) {
self.set_value(mgr, key, f(item));
}
}
/// Get the current selection mode
pub fn selection_mode(&self) -> SelectionMode {
self.list.selection_mode()
}
/// Set the current selection mode
pub fn set_selection_mode(&mut self, mode: SelectionMode) -> TkAction {
self.list.set_selection_mode(mode)
}
/// Set the selection mode (inline)
pub fn with_selection_mode(mut self, mode: SelectionMode) -> Self {
let _ = self.set_selection_mode(mode);
self
}
/// Read the list of selected entries
///
/// With mode [`SelectionMode::Single`] this may contain zero or one entry;
/// use `selected_iter().next()` to extract only the first (optional) entry.
pub fn selected_iter(&'_ self) -> impl Iterator<Item = &'_ T::Key> + '_ {
self.list.selected_iter()
}
/// Check whether an entry is selected
pub fn is_selected(&self, key: &T::Key) -> bool {
self.list.is_selected(key)
}
/// Clear all selected items
///
/// Does not send [`ChildMsg`] responses.
pub fn clear_selected(&mut self) {
self.list.clear_selected()
}
/// Directly select an item
///
/// Returns `true` if selected, `false` if already selected.
/// Fails if selection mode does not permit selection or if the key is
/// invalid or filtered out.
///
/// Does not send [`ChildMsg`] responses.
pub fn select(&mut self, key: T::Key) -> Result<bool, SelectionError> {
self.list.select(key)
}
/// Directly deselect an item
///
/// Returns `true` if deselected, `false` if not previously selected.
/// Also returns `false` on invalid and filtered-out keys.
///
/// Does not send [`ChildMsg`] responses.
pub fn deselect(&mut self, key: &T::Key) -> bool {
self.list.deselect(key)
}
/// Manually trigger an update to handle changed data or filter
pub fn update_view(&mut self, mgr: &mut Manager) {
self.list.data().refresh();
self.list.update_view(mgr)
}
/// Get the direction of contents
pub fn direction(&self) -> Direction {
self.list.direction()
}
/// Set the preferred number of items visible (inline)
///
/// This affects the (ideal) size request and whether children are sized
/// according to their ideal or minimum size but not the minimum size.
pub fn with_num_visible(mut self, number: i32) -> Self {
self.list = self.list.with_num_visible(number);
self
}
}
// TODO: support derive(Scrollable)?
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Scrollable for FilterListView<D, T, F, V>
{
#[inline]
fn scroll_axes(&self, size: Size) -> (bool, bool) {
self.list.scroll_axes(size)
}
#[inline]
fn max_scroll_offset(&self) -> Offset {
self.list.max_scroll_offset()
}
#[inline]
fn scroll_offset(&self) -> Offset {
self.list.scroll_offset()
}
#[inline]
fn set_scroll_offset(&mut self, mgr: &mut Manager, offset: Offset) -> Offset {
self.list.set_scroll_offset(mgr, offset)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> WidgetConfig for FilterListView<D, T, F, V>
{
fn configure(&mut self, mgr: &mut Manager) {
// We must refresh the filtered list when the underlying list changes
if let Some(handle) = self.list.data().data.update_handle() {
mgr.update_on_handle(handle, self.id());
}
// As well as when the filter changes
if let Some(handle) = self.list.data().update_handle() {
mgr.update_on_handle(handle, self.id());
}
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Handler for FilterListView<D, T, F, V>
{
type Msg = ChildMsg<T::Key, <V::Widget as Handler>::Msg>;
fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> {
match event {
Event::HandleUpdate { .. } => {
self.update_view(mgr);
return Response::Update;
}
_ => Response::None,
}
}
}
| {
self.filter.update_handle()
} | identifier_body |
filter_list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Filter-list view widget
use super::{driver, Driver, ListView, SelectionError, SelectionMode};
use crate::Scrollable;
use kas::event::ChildMsg;
use kas::prelude::*;
use kas::updatable::filter::Filter;
use kas::updatable::{ListData, Updatable, UpdatableHandler};
use std::cell::RefCell;
use std::fmt::Debug;
use UpdatableHandler as UpdHandler;
/// Filter accessor over another accessor
///
/// This is an abstraction over a [`ListData`], applying a filter to items when
/// iterating and accessing.
///
/// When updating, the filter applies to the old value: if the old is included,
/// it is replaced by the new, otherwise no replacement occurs.
///
/// Note: the key and item types are the same as those in the underlying list,
/// thus one can also retrieve values from the underlying list directly.
///
/// Note: only `Rc<FilteredList<T, F>>` implements [`ListData`]; the [`Rc`]
/// wrapper is required!
///
/// Warning: this implementation is `O(n)` where `n = data.len()` and not well
/// optimised, thus is expected to be slow on large data lists.
#[derive(Clone, Debug)]
struct FilteredList<T: ListData, F: Filter<T::Item>> {
/// Direct access to unfiltered data
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
data: T,
/// Direct access to the filter
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
filter: F,
view: RefCell<Vec<T::Key>>, // TODO: does this need to be in a RefCell?
}
impl<T: ListData, F: Filter<T::Item>> FilteredList<T, F> {
/// Construct and apply filter
#[inline]
fn new(data: T, filter: F) -> Self {
let len = data.len().cast();
let view = RefCell::new(Vec::with_capacity(len));
let s = FilteredList { data, filter, view };
let _ = s.refresh();
s
}
/// Refresh the view
///
/// Re-applies the filter (`O(n)` where `n` is the number of data elements).
/// Calling this directly may be useful in case the data is modified.
///
/// An update should be triggered using the returned handle.
fn refresh(&self) -> Option<UpdateHandle> {
let mut view = self.view.borrow_mut();
view.clear();
for (key, item) in self.data.iter_vec(usize::MAX) {
if self.filter.matches(item) {
view.push(key);
}
}
self.filter.update_handle()
}
}
impl<T: ListData, F: Filter<T::Item>> Updatable for FilteredList<T, F> {
fn update_handle(&self) -> Option<UpdateHandle> {
self.filter.update_handle()
}
fn update_self(&self) -> Option<UpdateHandle> {
self.refresh()
}
}
impl<K, M, T: ListData + UpdatableHandler<K, M> + 'static, F: Filter<T::Item>>
UpdatableHandler<K, M> for FilteredList<T, F>
{
fn handle(&self, key: &K, msg: &M) -> Option<UpdateHandle> {
self.data.handle(key, msg)
}
}
impl<T: ListData + 'static, F: Filter<T::Item>> ListData for FilteredList<T, F> {
type Key = T::Key;
type Item = T::Item;
fn | (&self) -> usize {
self.view.borrow().len()
}
fn contains_key(&self, key: &Self::Key) -> bool {
self.get_cloned(key).is_some()
}
fn get_cloned(&self, key: &Self::Key) -> Option<Self::Item> {
// Check the item against our filter (probably O(1)) instead of using
// our filtered list (O(n) where n=self.len()).
self.data
.get_cloned(key)
.filter(|item| self.filter.matches(item.clone()))
}
fn update(&self, key: &Self::Key, value: Self::Item) -> Option<UpdateHandle> {
// Filtering does not affect result, but does affect the view
if self
.data
.get_cloned(key)
.map(|item| !self.filter.matches(item))
.unwrap_or(true)
{
// Not previously visible: no update occurs
return None;
}
let new_visible = self.filter.matches(value.clone());
let result = self.data.update(key, value);
if result.is_some() && !new_visible {
// remove the updated item from our filtered list
self.view.borrow_mut().retain(|item| item != key);
}
result
}
fn iter_vec_from(&self, start: usize, limit: usize) -> Vec<(Self::Key, Self::Item)> {
let view = self.view.borrow();
let end = self.len().min(start + limit);
if start >= end {
return Vec::new();
}
let mut v = Vec::with_capacity(end - start);
for k in &view[start..end] {
v.push((k.clone(), self.data.get_cloned(k).unwrap()));
}
v
}
}
/// Filter-list view widget
///
/// This widget is a wrapper around [`ListView`] which applies a filter to the
/// data list.
///
/// Why is a data-filter a widget and not a pure-data item, you ask? The answer
/// is that a filter-list must be updated when the filter or the data changes,
/// and, since filtering a list is not especially cheap, the filtering must be
/// cached and updated when required, not every time the list view asks for more
/// data. Although it is possible to do this with a data-view, that requires
/// machinery for recursive-updates on data-structures and/or a mechanism to
/// test whether the underlying list-data changed. Implementing as a widget
/// avoids this.
// TODO: impl Clone
#[derive(Debug, Widget)]
#[handler(handle=noauto, generics = <>)]
#[layout(single)]
#[widget(config=noauto)]
pub struct FilterListView<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg> + 'static,
F: Filter<T::Item>,
V: Driver<T::Item> = driver::Default,
> {
#[widget_core]
core: CoreData,
#[widget]
list: ListView<D, FilteredList<T, F>, V>,
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`FilterListView::new_with_direction`].
pub fn new(data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), <V as Default>::default(), data, filter)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, data: T, filter: F) -> Self {
Self::new_with_dir_driver(direction, <V as Default>::default(), data, filter)
}
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit view
pub fn new_with_driver(view: V, data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), view, data, filter)
}
}
impl<
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<Direction, T, F, V>
{
/// Set the direction of contents
pub fn set_direction(&mut self, direction: Direction) -> TkAction {
self.list.set_direction(direction)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction and view
pub fn new_with_dir_driver(direction: D, view: V, data: T, filter: F) -> Self {
let data = FilteredList::new(data, filter);
FilterListView {
core: Default::default(),
list: ListView::new_with_dir_driver(direction, view, data),
}
}
/// Access the stored data (pre-filter)
pub fn unfiltered_data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (pre-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn unfiltered_data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Access the stored data (post-filter)
pub fn data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (post-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Check whether a key has data (post-filter)
pub fn contains_key(&self, key: &T::Key) -> bool {
self.data().contains_key(key)
}
/// Get a copy of the shared value at `key` (post-filter)
pub fn get_value(&self, key: &T::Key) -> Option<T::Item> {
self.data().get_cloned(key)
}
/// Set shared data (post-filter)
///
/// This method updates the shared data, if supported (see
/// [`ListData::update`]). Other widgets sharing this data are notified
/// of the update, if data is changed.
pub fn set_value(&self, mgr: &mut Manager, key: &T::Key, data: T::Item) {
if let Some(handle) = self.data().update(key, data) {
mgr.trigger_update(handle, 0);
}
}
/// Update shared data (post-filter)
///
/// This is purely a convenience method over [`FilterListView::set_value`].
/// It does nothing if no value is found at `key`.
/// It notifies other widgets of updates to the shared data.
pub fn update_value<G: Fn(T::Item) -> T::Item>(&self, mgr: &mut Manager, key: &T::Key, f: G) {
if let Some(item) = self.get_value(key) {
self.set_value(mgr, key, f(item));
}
}
/// Get the current selection mode
pub fn selection_mode(&self) -> SelectionMode {
self.list.selection_mode()
}
/// Set the current selection mode
pub fn set_selection_mode(&mut self, mode: SelectionMode) -> TkAction {
self.list.set_selection_mode(mode)
}
/// Set the selection mode (inline)
pub fn with_selection_mode(mut self, mode: SelectionMode) -> Self {
let _ = self.set_selection_mode(mode);
self
}
/// Read the list of selected entries
///
/// With mode [`SelectionMode::Single`] this may contain zero or one entry;
/// use `selected_iter().next()` to extract only the first (optional) entry.
pub fn selected_iter(&'_ self) -> impl Iterator<Item = &'_ T::Key> + '_ {
self.list.selected_iter()
}
/// Check whether an entry is selected
pub fn is_selected(&self, key: &T::Key) -> bool {
self.list.is_selected(key)
}
/// Clear all selected items
///
/// Does not send [`ChildMsg`] responses.
pub fn clear_selected(&mut self) {
self.list.clear_selected()
}
/// Directly select an item
///
/// Returns `true` if selected, `false` if already selected.
/// Fails if selection mode does not permit selection or if the key is
/// invalid or filtered out.
///
/// Does not send [`ChildMsg`] responses.
pub fn select(&mut self, key: T::Key) -> Result<bool, SelectionError> {
self.list.select(key)
}
/// Directly deselect an item
///
/// Returns `true` if deselected, `false` if not previously selected.
/// Also returns `false` on invalid and filtered-out keys.
///
/// Does not send [`ChildMsg`] responses.
pub fn deselect(&mut self, key: &T::Key) -> bool {
self.list.deselect(key)
}
/// Manually trigger an update to handle changed data or filter
pub fn update_view(&mut self, mgr: &mut Manager) {
self.list.data().refresh();
self.list.update_view(mgr)
}
/// Get the direction of contents
pub fn direction(&self) -> Direction {
self.list.direction()
}
/// Set the preferred number of items visible (inline)
///
/// This affects the (ideal) size request and whether children are sized
/// according to their ideal or minimum size but not the minimum size.
pub fn with_num_visible(mut self, number: i32) -> Self {
self.list = self.list.with_num_visible(number);
self
}
}
// TODO: support derive(Scrollable)?
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Scrollable for FilterListView<D, T, F, V>
{
#[inline]
fn scroll_axes(&self, size: Size) -> (bool, bool) {
self.list.scroll_axes(size)
}
#[inline]
fn max_scroll_offset(&self) -> Offset {
self.list.max_scroll_offset()
}
#[inline]
fn scroll_offset(&self) -> Offset {
self.list.scroll_offset()
}
#[inline]
fn set_scroll_offset(&mut self, mgr: &mut Manager, offset: Offset) -> Offset {
self.list.set_scroll_offset(mgr, offset)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> WidgetConfig for FilterListView<D, T, F, V>
{
fn configure(&mut self, mgr: &mut Manager) {
// We must refresh the filtered list when the underlying list changes
if let Some(handle) = self.list.data().data.update_handle() {
mgr.update_on_handle(handle, self.id());
}
// As well as when the filter changes
if let Some(handle) = self.list.data().update_handle() {
mgr.update_on_handle(handle, self.id());
}
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Handler for FilterListView<D, T, F, V>
{
type Msg = ChildMsg<T::Key, <V::Widget as Handler>::Msg>;
fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> {
match event {
Event::HandleUpdate { .. } => {
self.update_view(mgr);
return Response::Update;
}
_ => Response::None,
}
}
}
| len | identifier_name |
filter_list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Filter-list view widget
use super::{driver, Driver, ListView, SelectionError, SelectionMode};
use crate::Scrollable;
use kas::event::ChildMsg;
use kas::prelude::*;
use kas::updatable::filter::Filter;
use kas::updatable::{ListData, Updatable, UpdatableHandler};
use std::cell::RefCell;
use std::fmt::Debug;
use UpdatableHandler as UpdHandler;
/// Filter accessor over another accessor
///
/// This is an abstraction over a [`ListData`], applying a filter to items when
/// iterating and accessing.
///
/// When updating, the filter applies to the old value: if the old is included,
/// it is replaced by the new, otherwise no replacement occurs.
///
/// Note: the key and item types are the same as those in the underlying list,
/// thus one can also retrieve values from the underlying list directly.
///
/// Note: only `Rc<FilteredList<T, F>>` implements [`ListData`]; the [`Rc`]
/// wrapper is required!
///
/// Warning: this implementation is `O(n)` where `n = data.len()` and not well
/// optimised, thus is expected to be slow on large data lists.
#[derive(Clone, Debug)]
struct FilteredList<T: ListData, F: Filter<T::Item>> {
/// Direct access to unfiltered data
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
data: T,
/// Direct access to the filter
///
/// If adjusting this, one should call [`FilteredList::refresh`] after.
filter: F,
view: RefCell<Vec<T::Key>>, // TODO: does this need to be in a RefCell?
}
impl<T: ListData, F: Filter<T::Item>> FilteredList<T, F> {
/// Construct and apply filter
#[inline]
fn new(data: T, filter: F) -> Self {
let len = data.len().cast();
let view = RefCell::new(Vec::with_capacity(len));
let s = FilteredList { data, filter, view };
let _ = s.refresh();
s
}
/// Refresh the view
///
/// Re-applies the filter (`O(n)` where `n` is the number of data elements).
/// Calling this directly may be useful in case the data is modified.
///
/// An update should be triggered using the returned handle.
fn refresh(&self) -> Option<UpdateHandle> {
let mut view = self.view.borrow_mut();
view.clear();
for (key, item) in self.data.iter_vec(usize::MAX) {
if self.filter.matches(item) {
view.push(key);
}
}
self.filter.update_handle()
}
}
impl<T: ListData, F: Filter<T::Item>> Updatable for FilteredList<T, F> {
fn update_handle(&self) -> Option<UpdateHandle> {
self.filter.update_handle()
}
fn update_self(&self) -> Option<UpdateHandle> {
self.refresh()
}
}
impl<K, M, T: ListData + UpdatableHandler<K, M> + 'static, F: Filter<T::Item>>
UpdatableHandler<K, M> for FilteredList<T, F>
{
fn handle(&self, key: &K, msg: &M) -> Option<UpdateHandle> {
self.data.handle(key, msg)
}
}
impl<T: ListData + 'static, F: Filter<T::Item>> ListData for FilteredList<T, F> {
type Key = T::Key;
type Item = T::Item;
fn len(&self) -> usize {
self.view.borrow().len()
}
fn contains_key(&self, key: &Self::Key) -> bool {
self.get_cloned(key).is_some()
}
fn get_cloned(&self, key: &Self::Key) -> Option<Self::Item> {
// Check the item against our filter (probably O(1)) instead of using
// our filtered list (O(n) where n=self.len()).
self.data
.get_cloned(key)
.filter(|item| self.filter.matches(item.clone()))
}
fn update(&self, key: &Self::Key, value: Self::Item) -> Option<UpdateHandle> {
// Filtering does not affect result, but does affect the view
if self
.data
.get_cloned(key)
.map(|item| !self.filter.matches(item))
.unwrap_or(true)
{
// Not previously visible: no update occurs
return None;
}
let new_visible = self.filter.matches(value.clone());
let result = self.data.update(key, value);
if result.is_some() && !new_visible {
// remove the updated item from our filtered list
self.view.borrow_mut().retain(|item| item != key);
}
result
}
fn iter_vec_from(&self, start: usize, limit: usize) -> Vec<(Self::Key, Self::Item)> {
let view = self.view.borrow();
let end = self.len().min(start + limit);
if start >= end {
return Vec::new();
}
let mut v = Vec::with_capacity(end - start);
for k in &view[start..end] {
v.push((k.clone(), self.data.get_cloned(k).unwrap()));
}
v
}
}
/// Filter-list view widget
///
/// This widget is a wrapper around [`ListView`] which applies a filter to the
/// data list.
///
/// Why is a data-filter a widget and not a pure-data item, you ask? The answer
/// is that a filter-list must be updated when the filter or the data changes,
/// and, since filtering a list is not especially cheap, the filtering must be
/// cached and updated when required, not every time the list view asks for more
/// data. Although it is possible to do this with a data-view, that requires
/// machinery for recursive-updates on data-structures and/or a mechanism to
/// test whether the underlying list-data changed. Implementing as a widget
/// avoids this.
// TODO: impl Clone
#[derive(Debug, Widget)]
#[handler(handle=noauto, generics = <>)]
#[layout(single)]
#[widget(config=noauto)]
pub struct FilterListView<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg> + 'static,
F: Filter<T::Item>,
V: Driver<T::Item> = driver::Default,
> {
#[widget_core]
core: CoreData,
#[widget]
list: ListView<D, FilteredList<T, F>, V>,
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`FilterListView::new_with_direction`].
pub fn new(data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), <V as Default>::default(), data, filter)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, data: T, filter: F) -> Self {
Self::new_with_dir_driver(direction, <V as Default>::default(), data, filter)
}
}
impl<
D: Directional + Default,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit view
pub fn new_with_driver(view: V, data: T, filter: F) -> Self {
Self::new_with_dir_driver(D::default(), view, data, filter)
}
}
impl<
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item> + Default,
> FilterListView<Direction, T, F, V>
{ | }
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> FilterListView<D, T, F, V>
{
/// Construct a new instance with explicit direction and view
pub fn new_with_dir_driver(direction: D, view: V, data: T, filter: F) -> Self {
let data = FilteredList::new(data, filter);
FilterListView {
core: Default::default(),
list: ListView::new_with_dir_driver(direction, view, data),
}
}
/// Access the stored data (pre-filter)
pub fn unfiltered_data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (pre-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn unfiltered_data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Access the stored data (post-filter)
pub fn data(&self) -> &T {
&self.list.data().data
}
/// Mutably access the stored data (post-filter)
///
/// It may be necessary to use [`FilterListView::update_view`] to update the view of this data.
pub fn data_mut(&mut self) -> &mut T {
&mut self.list.data_mut().data
}
/// Check whether a key has data (post-filter)
pub fn contains_key(&self, key: &T::Key) -> bool {
self.data().contains_key(key)
}
/// Get a copy of the shared value at `key` (post-filter)
pub fn get_value(&self, key: &T::Key) -> Option<T::Item> {
self.data().get_cloned(key)
}
/// Set shared data (post-filter)
///
/// This method updates the shared data, if supported (see
/// [`ListData::update`]). Other widgets sharing this data are notified
/// of the update, if data is changed.
pub fn set_value(&self, mgr: &mut Manager, key: &T::Key, data: T::Item) {
if let Some(handle) = self.data().update(key, data) {
mgr.trigger_update(handle, 0);
}
}
/// Update shared data (post-filter)
///
/// This is purely a convenience method over [`FilterListView::set_value`].
/// It does nothing if no value is found at `key`.
/// It notifies other widgets of updates to the shared data.
pub fn update_value<G: Fn(T::Item) -> T::Item>(&self, mgr: &mut Manager, key: &T::Key, f: G) {
if let Some(item) = self.get_value(key) {
self.set_value(mgr, key, f(item));
}
}
/// Get the current selection mode
pub fn selection_mode(&self) -> SelectionMode {
self.list.selection_mode()
}
/// Set the current selection mode
pub fn set_selection_mode(&mut self, mode: SelectionMode) -> TkAction {
self.list.set_selection_mode(mode)
}
/// Set the selection mode (inline)
pub fn with_selection_mode(mut self, mode: SelectionMode) -> Self {
let _ = self.set_selection_mode(mode);
self
}
/// Read the list of selected entries
///
/// With mode [`SelectionMode::Single`] this may contain zero or one entry;
/// use `selected_iter().next()` to extract only the first (optional) entry.
pub fn selected_iter(&'_ self) -> impl Iterator<Item = &'_ T::Key> + '_ {
self.list.selected_iter()
}
/// Check whether an entry is selected
pub fn is_selected(&self, key: &T::Key) -> bool {
self.list.is_selected(key)
}
/// Clear all selected items
///
/// Does not send [`ChildMsg`] responses.
pub fn clear_selected(&mut self) {
self.list.clear_selected()
}
/// Directly select an item
///
/// Returns `true` if selected, `false` if already selected.
/// Fails if selection mode does not permit selection or if the key is
/// invalid or filtered out.
///
/// Does not send [`ChildMsg`] responses.
pub fn select(&mut self, key: T::Key) -> Result<bool, SelectionError> {
self.list.select(key)
}
/// Directly deselect an item
///
/// Returns `true` if deselected, `false` if not previously selected.
/// Also returns `false` on invalid and filtered-out keys.
///
/// Does not send [`ChildMsg`] responses.
pub fn deselect(&mut self, key: &T::Key) -> bool {
self.list.deselect(key)
}
/// Manually trigger an update to handle changed data or filter
pub fn update_view(&mut self, mgr: &mut Manager) {
self.list.data().refresh();
self.list.update_view(mgr)
}
/// Get the direction of contents
pub fn direction(&self) -> Direction {
self.list.direction()
}
/// Set the preferred number of items visible (inline)
///
/// This affects the (ideal) size request and whether children are sized
/// according to their ideal or minimum size but not the minimum size.
pub fn with_num_visible(mut self, number: i32) -> Self {
self.list = self.list.with_num_visible(number);
self
}
}
// TODO: support derive(Scrollable)?
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Scrollable for FilterListView<D, T, F, V>
{
#[inline]
fn scroll_axes(&self, size: Size) -> (bool, bool) {
self.list.scroll_axes(size)
}
#[inline]
fn max_scroll_offset(&self) -> Offset {
self.list.max_scroll_offset()
}
#[inline]
fn scroll_offset(&self) -> Offset {
self.list.scroll_offset()
}
#[inline]
fn set_scroll_offset(&mut self, mgr: &mut Manager, offset: Offset) -> Offset {
self.list.set_scroll_offset(mgr, offset)
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> WidgetConfig for FilterListView<D, T, F, V>
{
fn configure(&mut self, mgr: &mut Manager) {
// We must refresh the filtered list when the underlying list changes
if let Some(handle) = self.list.data().data.update_handle() {
mgr.update_on_handle(handle, self.id());
}
// As well as when the filter changes
if let Some(handle) = self.list.data().update_handle() {
mgr.update_on_handle(handle, self.id());
}
}
}
impl<
D: Directional,
T: ListData + UpdHandler<T::Key, V::Msg>,
F: Filter<T::Item>,
V: Driver<T::Item>,
> Handler for FilterListView<D, T, F, V>
{
type Msg = ChildMsg<T::Key, <V::Widget as Handler>::Msg>;
fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> {
match event {
Event::HandleUpdate { .. } => {
self.update_view(mgr);
return Response::Update;
}
_ => Response::None,
}
}
} | /// Set the direction of contents
pub fn set_direction(&mut self, direction: Direction) -> TkAction {
self.list.set_direction(direction) | random_line_split |
mod.rs | //! `types` module contains types necessary for Fluent runtime
//! value handling.
//! The core struct is [`FluentValue`] which is a type that can be passed
//! to the [`FluentBundle::format_pattern`](crate::bundle::FluentBundle) as an argument, it can be passed
//! to any Fluent Function, and any function may return it.
//!
//! This part of functionality is not fully hashed out yet, since we're waiting
//! for the internationalization APIs to mature, at which point all number
//! formatting operations will be moved out of Fluent.
//!
//! For now, [`FluentValue`] can be a string, a number, or a custom [`FluentType`]
//! which allows users of the library to implement their own types of values,
//! such as dates, or more complex structures needed for their bindings.
mod number;
mod plural;
pub use number::*;
use plural::PluralRules;
use std::any::Any;
use std::borrow::{Borrow, Cow};
use std::fmt;
use std::str::FromStr;
use intl_pluralrules::{PluralCategory, PluralRuleType};
use crate::memoizer::MemoizerKind;
use crate::resolver::Scope;
use crate::resource::FluentResource;
/// Custom types can implement the [`FluentType`] trait in order to generate a string
/// value for use in the message generation process.
pub trait FluentType: fmt::Debug + AnyEq + 'static {
/// Create a clone of the underlying type.
fn duplicate(&self) -> Box<dyn FluentType + Send>;
/// Convert the custom type into a string value, for instance a custom DateTime
/// type could return "Oct. 27, 2022".
fn as_string(&self, intls: &intl_memoizer::IntlLangMemoizer) -> Cow<'static, str>;
/// Convert the custom type into a string value, for instance a custom DateTime
/// type could return "Oct. 27, 2022". This operation is provided the threadsafe
/// [IntlLangMemoizer](intl_memoizer::concurrent::IntlLangMemoizer).
fn as_string_threadsafe(
&self,
intls: &intl_memoizer::concurrent::IntlLangMemoizer,
) -> Cow<'static, str>;
}
impl PartialEq for dyn FluentType + Send {
fn eq(&self, other: &Self) -> bool {
self.equals(other.as_any())
}
}
pub trait AnyEq: Any + 'static {
fn equals(&self, other: &dyn Any) -> bool;
fn as_any(&self) -> &dyn Any;
}
impl<T: Any + PartialEq> AnyEq for T {
fn equals(&self, other: &dyn Any) -> bool {
other
.downcast_ref::<Self>()
.map_or(false, |that| self == that)
}
fn as_any(&self) -> &dyn Any {
self
}
}
/// The `FluentValue` enum represents values which can be formatted to a String.
///
/// Those values are either passed as arguments to [`FluentBundle::format_pattern`] or
/// produced by functions, or generated in the process of pattern resolution.
///
/// [`FluentBundle::format_pattern`]: crate::bundle::FluentBundle::format_pattern
#[derive(Debug)]
pub enum FluentValue<'source> {
String(Cow<'source, str>),
Number(FluentNumber),
Custom(Box<dyn FluentType + Send>),
None,
Error,
}
impl<'s> PartialEq for FluentValue<'s> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(FluentValue::String(s), FluentValue::String(s2)) => s == s2,
(FluentValue::Number(s), FluentValue::Number(s2)) => s == s2,
(FluentValue::Custom(s), FluentValue::Custom(s2)) => s == s2,
_ => false,
}
}
}
impl<'s> Clone for FluentValue<'s> {
fn clone(&self) -> Self {
match self {
FluentValue::String(s) => FluentValue::String(s.clone()),
FluentValue::Number(s) => FluentValue::Number(s.clone()),
FluentValue::Custom(s) => {
let new_value: Box<dyn FluentType + Send> = s.duplicate();
FluentValue::Custom(new_value)
}
FluentValue::Error => FluentValue::Error,
FluentValue::None => FluentValue::None,
}
}
}
impl<'source> FluentValue<'source> {
/// Attempts to parse the string representation of a `value` that supports
/// [`ToString`] into a [`FluentValue::Number`]. If it fails, it will instead
/// convert it to a [`FluentValue::String`].
///
/// ```
/// use fluent_bundle::types::{FluentNumber, FluentNumberOptions, FluentValue};
///
/// // "2" parses into a `FluentNumber`
/// assert_eq!(
/// FluentValue::try_number("2"),
/// FluentValue::Number(FluentNumber::new(2.0, FluentNumberOptions::default()))
/// );
///
/// // Floats can be parsed as well.
/// assert_eq!(
/// FluentValue::try_number("3.141569"),
/// FluentValue::Number(FluentNumber::new(
/// 3.141569,
/// FluentNumberOptions {
/// minimum_fraction_digits: Some(6),
/// ..Default::default()
/// }
/// ))
/// );
///
/// // When a value is not a valid number, it falls back to a `FluentValue::String`
/// assert_eq!(
/// FluentValue::try_number("A string"),
/// FluentValue::String("A string".into())
/// );
/// ```
pub fn try_number(value: &'source str) -> Self {
if let Ok(number) = FluentNumber::from_str(value) {
number.into()
} else {
value.into()
}
}
/// Checks to see if two [`FluentValues`](FluentValue) match each other by having the
/// same type and contents. The special exception is in the case of a string being
/// compared to a number. Here attempt to check that the plural rule category matches.
///
/// ```
/// use fluent_bundle::resolver::Scope;
/// use fluent_bundle::{types::FluentValue, FluentBundle, FluentResource};
/// use unic_langid::langid;
///
/// let langid_ars = langid!("en");
/// let bundle: FluentBundle<FluentResource> = FluentBundle::new(vec![langid_ars]);
/// let scope = Scope::new(&bundle, None, None);
///
/// // Matching examples:
/// assert!(FluentValue::try_number("2").matches(&FluentValue::try_number("2"), &scope));
/// assert!(FluentValue::from("fluent").matches(&FluentValue::from("fluent"), &scope));
/// assert!(
/// FluentValue::from("one").matches(&FluentValue::try_number("1"), &scope),
/// "Plural rules are matched."
/// );
///
/// // Non-matching examples:
/// assert!(!FluentValue::try_number("2").matches(&FluentValue::try_number("3"), &scope));
/// assert!(!FluentValue::from("fluent").matches(&FluentValue::from("not fluent"), &scope));
/// assert!(!FluentValue::from("two").matches(&FluentValue::try_number("100"), &scope),);
/// ```
pub fn matches<R: Borrow<FluentResource>, M>(
&self,
other: &FluentValue,
scope: &Scope<R, M>,
) -> bool
where
M: MemoizerKind,
{
match (self, other) {
(&FluentValue::String(ref a), &FluentValue::String(ref b)) => a == b,
(&FluentValue::Number(ref a), &FluentValue::Number(ref b)) => a == b,
(&FluentValue::String(ref a), &FluentValue::Number(ref b)) => {
let cat = match a.as_ref() {
"zero" => PluralCategory::ZERO,
"one" => PluralCategory::ONE,
"two" => PluralCategory::TWO,
"few" => PluralCategory::FEW,
"many" => PluralCategory::MANY,
"other" => PluralCategory::OTHER,
_ => return false,
};
// This string matches a plural rule keyword. Check if the number
// matches the plural rule category.
scope
.bundle
.intls
.with_try_get_threadsafe::<PluralRules, _, _>(
(PluralRuleType::CARDINAL,),
|pr| pr.0.select(b) == Ok(cat),
)
.unwrap()
}
_ => false,
}
}
/// Write out a string version of the [`FluentValue`] to `W`.
pub fn write<W, R, M>(&self, w: &mut W, scope: &Scope<R, M>) -> fmt::Result
where
W: fmt::Write,
R: Borrow<FluentResource>,
M: MemoizerKind,
{
if let Some(formatter) = &scope.bundle.formatter {
if let Some(val) = formatter(self, &scope.bundle.intls) {
return w.write_str(&val);
}
}
match self {
FluentValue::String(s) => w.write_str(s),
FluentValue::Number(n) => w.write_str(&n.as_string()),
FluentValue::Custom(s) => w.write_str(&scope.bundle.intls.stringify_value(&**s)),
FluentValue::Error => Ok(()),
FluentValue::None => Ok(()),
}
}
/// Converts the [`FluentValue`] to a string.
///
/// Clones inner values when owned, borrowed data is not cloned.
/// Prefer using [`FluentValue::into_string()`] when possible.
pub fn as_string<R: Borrow<FluentResource>, M>(&self, scope: &Scope<R, M>) -> Cow<'source, str>
where
M: MemoizerKind,
{
if let Some(formatter) = &scope.bundle.formatter {
if let Some(val) = formatter(self, &scope.bundle.intls) {
return val.into();
}
}
match self {
FluentValue::String(s) => s.clone(),
FluentValue::Number(n) => n.as_string(),
FluentValue::Custom(s) => scope.bundle.intls.stringify_value(&**s),
FluentValue::Error => "".into(),
FluentValue::None => "".into(),
}
}
/// Converts the [`FluentValue`] to a string.
///
/// Takes self by-value to be able to skip expensive clones.
/// Prefer this method over [`FluentValue::as_string()`] when possible.
pub fn into_string<R: Borrow<FluentResource>, M>(self, scope: &Scope<R, M>) -> Cow<'source, str>
where
M: MemoizerKind,
{
if let Some(formatter) = &scope.bundle.formatter {
if let Some(val) = formatter(&self, &scope.bundle.intls) {
return val.into();
}
}
match self {
FluentValue::String(s) => s,
FluentValue::Number(n) => n.as_string(),
FluentValue::Custom(s) => scope.bundle.intls.stringify_value(s.as_ref()),
FluentValue::Error => "".into(),
FluentValue::None => "".into(),
}
}
pub fn into_owned<'a>(&self) -> FluentValue<'a> {
match self {
FluentValue::String(str) => FluentValue::String(Cow::from(str.to_string())),
FluentValue::Number(s) => FluentValue::Number(s.clone()),
FluentValue::Custom(s) => FluentValue::Custom(s.duplicate()),
FluentValue::Error => FluentValue::Error,
FluentValue::None => FluentValue::None,
}
}
}
impl<'source> From<String> for FluentValue<'source> {
fn from(s: String) -> Self {
FluentValue::String(s.into())
}
}
impl<'source> From<&'source String> for FluentValue<'source> {
fn from(s: &'source String) -> Self {
FluentValue::String(s.into())
}
}
impl<'source> From<&'source str> for FluentValue<'source> {
fn from(s: &'source str) -> Self {
FluentValue::String(s.into())
}
}
impl<'source> From<Cow<'source, str>> for FluentValue<'source> {
fn from(s: Cow<'source, str>) -> Self {
FluentValue::String(s)
}
}
impl<'source, T> From<Option<T>> for FluentValue<'source>
where
T: Into<FluentValue<'source>>,
{
fn | (v: Option<T>) -> Self {
match v {
Some(v) => v.into(),
None => FluentValue::None,
}
}
}
| from | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.