text
stringlengths 27
775k
|
---|
package com.sunnyweather.android.bean
data class BannerResponse(val data: List<Banner>)
data class Banner(val imagePath: String, val url: String) |
require "spec_helper"
describe OpenXml::DrawingML::Properties::AudioCd do
include PropertyTestMacros
it_should_use tag: :audioCd, name: "audio_cd"
it_should_have_properties :start, :end, :extension_list
end
|
import { ParticipantsCollection, ConversationsCollection, MessagesCollection } from '../../common.js';
MessagesCollection.allow({
// If the user is a participant, allow them to insert (send) a message
insert(userId, message) {
if (userId && ParticipantsCollection.findOne({ userId, conversationId: message.conversationId })) {
return true;
}
return false;
},
// If the user sent the message, let them modify it.
update(userId, message) {
return userId && message.checkOwnership();
},
});
// After a message is sent we need to update the ParticipantsCollection and ConversationsCollection
MessagesCollection.after.insert(function afterInsert(userId, document) {
/* Only update participants who aren't observing the conversation.
* If we update users who are reading the conversation it will show the
* conversation as unread to the user. This would be bad UX design
*
* Tracking observations is done through the "viewingConversation" subscription
*/
ParticipantsCollection.update({
userId: { $ne: userId },
conversationId: document.conversationId,
observing: {
$size: 0,
},
read: true,
}, {
$set: { read: false },
}, {
multi: true,
});
// update the date on the conversation for sorting the conversation from newest to oldest
ConversationsCollection.update(document.conversationId, { $inc: { messageCount: 1 } });
});
MessagesCollection.after.remove(function afterRemove(userId, document) {
ConversationsCollection.update(document.conversationId, { $inc: { messageCount: -1 } });
});
|
// flog.c
#include "flog.h"
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <assert.h>
#define DATE_START 7
#define TIME_START (DATE_START + 11)
typedef struct FLog
{
/// 日志文件名
char file_name[NLOG_MAX_PATH];
/// 单个日志文件最大文件大小
size_t max_size;
/// 日志级别
LogLevel max_level;
/// 日志文件文件描述符
FILE * file;
/// 今天开始时刻
time_t mid_night;
int enable_usec;
int enable_pack_print;
int binited;
}FLog;
static FLog g_sFlog;
static int FLog_open();
static void FLog_close();
static int FLog_log(LogLevel level, const char* fmt, ...);
static int FLog_strformatreplace(char * srcstr, char * desstr);
static int FLog_vlog(int level, const char * fmt, va_list ap);
static char level_str_[][64] = {
"\033[1;31m2008-11-07 09:35:00 FATAL ",
"\033[1;33m2008-11-07 09:35:00 ERROR ",
"\033[1;35m2008-11-07 09:35:00 WARN ",
"\033[1;32m2008-11-07 09:35:00 INFO ",
"\033[0;00m2008-11-07 09:35:00 DEBUG ",
"\033[0;00m2008-11-07 09:35:00 TRACE ",
};
static char level_str_usec_[][64] = {
"\033[1;31m2008-11-07 09:35:00.000000 FATAL ",
"\033[1;33m2008-11-07 09:35:00.000000 ERROR ",
"\033[1;35m2008-11-07 09:35:00.000000 WARN ",
"\033[1;32m2008-11-07 09:35:00.000000 INFO ",
"\033[0;00m2008-11-07 09:35:00.000000 DEBUG ",
"\033[0;00m2008-11-07 09:35:00.000000 TRACE ",
};
//Public
int InitFLog(Flogconf logconf)
{
assert(g_sFlog.binited == 0);
strncpy(g_sFlog.file_name,logconf.file_name,NLOG_MAX_PATH);
g_sFlog.max_size = (logconf.max_size > LOGFILE_DEFMAXSIZE)?LOGFILE_DEFMAXSIZE:logconf.max_size;
g_sFlog.file = NULL;
g_sFlog.max_level = (logconf.max_level > L_LEVEL_MAX)?L_LEVEL_MAX:logconf.max_level;
g_sFlog.enable_usec = logconf.enable_usec;
if (0 > FLog_open()) {
return -1;
}
g_sFlog.binited = 1;
return 0;
}
//Public
void ExitFlog()
{
FLog_close();
}
static int FLog_open()
{
assert(g_sFlog.binited == 0);
int i = 0;
char name[NLOG_MAX_PATH];
size_t len = 0;
strncpy(name, g_sFlog.file_name, NLOG_MAX_PATH);
len = strlen(name);
time_t t;
time(&t);
struct tm lt = *localtime(&t);
strftime(name + len, NLOG_MAX_PATH - len, "-%Y%m%d-%H%M%S.log", <);
g_sFlog.file = fopen(name, "a+");
if (NULL == g_sFlog.file) {
return -1;
}
strftime(name, 12, "%Y-%m-%d", <);
for (i = 0; i < L_LEVEL_MAX; i++) {
memcpy(level_str_[i] + DATE_START, name, 10);
}
for (i = 0; i < L_LEVEL_MAX; i++) {
memcpy(level_str_usec_[i] + DATE_START, name, 10);
}
lt.tm_hour = lt.tm_min = lt.tm_sec = 0;
g_sFlog.mid_night = mktime(<);
return 0;
}
static void FLog_close()
{
if (0 == g_sFlog.binited) {
return ;
}
fclose(g_sFlog.file);
g_sFlog.file = NULL;
}
//Public
int FLog_log(LogLevel level, const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(level, fmt, ap); // not safe
va_end(ap);
return ret;
}
//Public
int FLog_log_fatal(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_FATAL, fmt, ap);
va_end(ap);
return ret;
}
//Public
int FLog_log_error(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_ERROR, fmt, ap);
va_end(ap);
return ret;
}
//Public
int FLog_log_warn(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_WARN, fmt, ap);
va_end(ap);
return ret;
}
static int FLog_strformatreplace(char * srcstr, char * desstr)
{
if (NULL == srcstr || NULL == desstr) {
return -1;
}
if (strlen(srcstr) >= strlen(desstr)) {
return -1;
}
unsigned int j = 0;
desstr[j++] = srcstr[0];
unsigned int i = 0;
for (i = 1; i<strlen(srcstr); i++) {
if (srcstr[i-1] == '%' && (srcstr[i] == 's' || srcstr[i] == 'S')) {
if (j+5 >= strlen(desstr)) {
return -1;
}
desstr[j++] = '.';
desstr[j++] = '5';
desstr[j++] = '1';
desstr[j++] = '2';
desstr[j++] = 's';
}
else {
if (j >= strlen(desstr)) {
return -1;
}
desstr[j++] = srcstr[i];
}
}
if (j >= strlen(desstr)) {
return -1;
}
desstr[j++] = '\0';
return 0;
}
//Public
int FLog_log_info(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_INFO, fmt, ap);
va_end(ap);
return ret;
}
//Public
int FLog_log_trace(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_TRACE, fmt, ap);
va_end(ap);
return ret;
}
//Public
int FLog_log_debug(const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
int ret = FLog_vlog(L_DEBUG, fmt, ap);
va_end(ap);
return ret;
}
static int FLog_vlog(int level, const char * fmt, va_list ap)
{
if (g_sFlog.binited == 0 || level > g_sFlog.max_level) {
return -1;
}
struct tm tm_now;
struct timeval tv;
struct timezone tz;
gettimeofday(&tv, &tz);
time_t now = tv.tv_sec;
int t_diff = (int)(now - g_sFlog.mid_night);
if (t_diff > 24 * 60 * 60) {
FLog_close();
FLog_open();
t_diff -= 24 * 60 * 60;
}
localtime_r(&now, &tm_now);
if (g_sFlog.enable_usec) {
sprintf(((char*)level_str_usec_[level]+TIME_START), "%02d:%02d:%02d.%06ld",
tm_now.tm_hour, tm_now.tm_min, tm_now.tm_sec, tv.tv_usec);
level_str_usec_[level][strlen(level_str_usec_[level])] = ' ';
fputs(level_str_usec_[level], g_sFlog.file);
}
else {
sprintf(((char*)level_str_[level]+TIME_START), "%02d:%02d:%02d",
tm_now.tm_hour, tm_now.tm_min, tm_now.tm_sec);
level_str_[level][strlen(level_str_[level])] = ' ';
fputs(level_str_[level], g_sFlog.file);
}
char strformat[128] = "";
if (0 == FLog_strformatreplace((char *) fmt, strformat)) {
vfprintf(g_sFlog.file, strformat, ap);
}
else {
vfprintf(g_sFlog.file, fmt, ap);
}
// reset color
if (fmt[strlen(fmt) - 1] != '\n') {
fputc('\n', g_sFlog.file);
}
if ((size_t)ftell(g_sFlog.file) > g_sFlog.max_size) {
FLog_close();
FLog_open();
}
return 0;
}
static const char chex[] = "0123456789ABCDEF";
//Public
int FLog_log_hex_prefix(unsigned char * prefix,unsigned char * data, size_t len, LogLevel level)
{
FLog_log(level, "%s", prefix);
return FLog_log_hex(data, len, level);
}
//Public
int FLog_log_hex(unsigned char * data, size_t len, LogLevel level)
{
size_t i, j, k, l;
if (level > g_sFlog.max_level ||NULL == data|| NULL == g_sFlog.file) {
return -1;
}
//DON'T disable hex_print when level is l_info, l_warn....
if (!g_sFlog.enable_pack_print && level > L_INFO) {
return -1;
}
char msg_str[128] = {0};
msg_str[0] = '[';
msg_str[5] = '0';
msg_str[6] = ']';
msg_str[59] = ' ';
msg_str[60] = '|';
msg_str[77] = '|';
msg_str[78] = 0;
k = 6;
for (j = 0; j < 16; j++) {
if ((j & 0x03) == 0) {
msg_str[++k] = ' ';
}
k += 3;
msg_str[k] = ' ';
}
for (i = 0; i < len / 16; i++) {
msg_str[1] = chex[i >> 12];
msg_str[2] = chex[(i >> 8)&0x0F];
msg_str[3] = chex[(i >>4)&0x0F];
msg_str[4] = chex[i &0x0F];
k = 7;
l = i * 16;
memcpy(msg_str + 61, data + l, 16);
for (j = 0; j < 16; j++) {
if ((j & 0x03) == 0) {
k++;
}
msg_str[k++] = chex[data[l] >> 4];
msg_str[k++] = chex[data[l++] & 0x0F];
k++;
if (!isgraph(msg_str[61 + j])) {
msg_str[61 + j]= '.';
}
}
msg_str[127] = 0;
fprintf(g_sFlog.file, "# %s\n", msg_str);
}
msg_str[1] = chex[i >> 12];
msg_str[2] = chex[(i >> 8)&0x0F];
msg_str[3] = chex[(i >>4)&0x0F];
msg_str[4] = chex[i &0x0F];
k = 7;
l = i * 16;
memcpy(msg_str + 61, data + l, len % 16);
for (j = 0; j < len % 16; j++) {
if ((j & 0x03) == 0) {
k++;
}
msg_str[k++] = chex[data[l] >> 4];
msg_str[k++] = chex[data[l++] & 0x0F];
k++;
if (!isgraph(msg_str[61 + j])) {
msg_str[61 + j]= '.';
}
}
for (; j < 16; j++) {
if ((j & 0x03) == 0) {
k++;
}
msg_str[k++] = ' ';
msg_str[k++] = ' ';
k++;
msg_str[61 + j]= ' ';
}
msg_str[127] = 0;
fprintf(g_sFlog.file, "# %s\n", msg_str);
return 0;
}
|
#!/usr/bin/env bash
train_data=../../data/stance/IACv2_stance-train.csv
dev_data=../../data/stance/IACv2_stance-dev.csv
test_data=../../data/stance/IACv2_stance-test.csv
python train_model.py -s $1 -i ${train_data} -d ${dev_data}
|
from core import BeamXY, Propagator, FourierDiffractionExecutorXY, BeamVisualizer, xlsx_to_df
from tests.diffraction.test_diffraction import TestDiffraction
NAME = 'diffraction_xy_gauss'
class TestDiffractionXYGauss(TestDiffraction):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._add_prefix(NAME)
self._p = 1.0
self._eps = 0.01
self._png_name = NAME
self._horizontal_line = 1 / 2
def process(self):
beam = BeamXY(medium=self._medium.info,
M=0,
m=0,
p_0_to_p_gauss=self._p_0_to_p_gauss,
lmbda=self._lmbda,
x_0=self._radius,
y_0=self._radius,
n_x=256,
n_y=256)
visualizer = BeamVisualizer(beam=beam,
maximum_intensity='local',
normalize_intensity_to=beam.i_0,
plot_type='volume')
propagator = Propagator(args=self._args,
beam=beam,
diffraction=FourierDiffractionExecutorXY(beam=beam),
n_z=self._n_z,
dz_0=beam.z_diff / self._n_z,
const_dz=True,
print_current_state_every=0,
plot_beam_every=0,
visualizer=visualizer)
propagator.propagate()
return propagator.logger.track_filename, propagator.manager.results_dir, propagator.beam.z_diff
def test_diffraction_xy_gauss(self):
track_filename, path_to_save_plot, z_diff = self.process()
df = xlsx_to_df(track_filename, normalize_z_to=1)
self._add_analytics_to_df(df)
self._check(df)
if self._flag_plot:
self._plot(df, path_to_save_plot, z_diff)
|
#! /usr/bin/env bash
#=========================================================================#
# run.sh #
# #
# Author: nic #
# Date: 2017-Jul-04 #
# #
# run the experiments #
# #
# Options: #
# -h Display this help message #
#=========================================================================#
set -e # error on non-zero exit
set -u # undefined variables are an error
CAULI="../../build/install/cauliflower/bin/cauliflower"
VDSCR="../optimise_vdispatch/vdispatch.dl"
VDSP="./D_exes/virtual_dispatch_csv_converter"
function usage(){
grep "^#.*#$" $0
}
function errxit(){
[ $# -gt 0 ] && echo "Error: $@" >&2
echo "Re-run with -h for help" >&2
exit 1
}
function stage(){
echo -n "--------------"
echo -n $@
echo "--------------"
}
function thread_count(){
RET=$(lscpu | grep "^CPU(s):" | tr -d '[:blank:]' | cut -f 2 -d ':')
([ $RET -gt 4 ] && echo 8 ) || echo $RET
}
function souffle_compile(){
SFL_DST="$1"
SFL_SRC="$2"
if [ "$SFL_SRC" -nt "$SFL_DST" ]; then
stage "souffle compiling $SFL_SRC -> $SFL_DST"
which souffle || (echo "please put souffle in the path" && exit 1)
mkdir -p $(dirname "$SFL_DST")
souffle -j8 -o "$SFL_DST" "$SFL_SRC" | tee "$SFL_DST.log"
mv $(basename "$SFL_DST") "$SFL_DST"
fi
}
# convert all the facts files into a sane version
function sanitise(){
DIR="./D_sane/$2/"
if [ ! -d "$DIR" ]; then
stage sanitise $2
mkdir -p "$DIR"
for FI in "$1"/*.facts; do
sed -e 's/_/__/g' -e 's/"/_Q/g' \
-e "s/'/_q/g" -e 's/,/_c/g' \
-e 's/ /_s/g' < "$FI" > "$DIR"/$(basename "$FI")
done
[ -f "$1/meta" ] && cp "$1/meta" "$DIR/meta"
fi
}
function sanitise_wrapper(){
sanitise $(paste <(cases) <(cases -d) | grep "^$1" | cut -f 2) "$1"
}
# convert the input files to csv
function convert_to_csv(){
SRC="D_sane/$1"
DST="D_converted/$1"
if [ ! -d "$DST" ]; then
souffle_compile "$VDSP" "$VDSCR"
stage "convert" $1
mkdir -p "$DST"
"$VDSP" -j${OMP_NUM_THREADS} -F "$SRC" -D "$DST" 2>&1 | tee "$DST/log.txt"
for CSV in "$DST/"*.csv; do
sed -i -e 's/\t/,/' -e 's/\t/,/' -e 's/\t/:/g' $CSV
done
fi
}
# optimise $2 with $3 rounds on cases $@
function optimised_execution(){
CASE="$1"
SPEC="$2"
ROUNDS="$3"
shift 3
LOGIC=$(basename "$SPEC" | sed 's/[^a-zA-Z].*//')
OUT="D_specs/${LOGIC}_opt_${ROUNDS}.cflr"
if [ ! -f "$OUT" ]; then
stage optimising $OUT
"$CAULI" -O "$ROUNDS" "$SPEC" "$@"
mkdir -p $(dirname "$OUT")
mv $(basename "$SPEC") "$OUT"
rm cauliflower.log
fi
timed_execution "$CASE" "$OUT"
}
# run case $1 on the spec in $2, optionally re-run this $3 times
function timed_execution(){
CASE="$1"
SPEC="$2"
RUNS="${3-3}"
for i in $(seq 1 $RUNS); do
INPUT="D_converted/$CASE"
LOGIC=$(basename "${SPEC%.cflr}")
EXE="D_exes/${LOGIC}"
OUT="D_results/$LOGIC/${CASE}_$i"
if [ ! -f "$OUT" ]; then
if [ ! -x "$EXE" ]; then
stage "compiling $EXE"
mkdir -p $(dirname "$EXE")
rm -f cauliflower.log
"$CAULI" -c -r -p -o $(dirname "$EXE") "$SPEC"
mv cauliflower.log "$EXE.log"
fi
stage "run ($i) $LOGIC $CASE"
mkdir -p $(dirname "$OUT")
timeout 600 "$EXE" "$INPUT" 2>&1 | tee "$OUT" || echo "solve semi-naive=TIMEOUT" > "$OUT"
[ -s "$OUT" ] || echo "solve semi-naive=TIMEOUT" > "$OUT"
echo "=============" >> "$OUT"
echo "threads=$OMP_NUM_THREADS" >> "$OUT"
fi
done
}
function timed_souffle(){
CASE="$1"
DL="$2"
RUNS="${3-3}"
S_EXE=./D_exes/$(basename "${DL%.dl}")
for i in $(seq 1 $RUNS); do
OUT=./D_results/$(basename "$S_EXE")/${CASE}_$i
if [ ! -f "$OUT" ]; then
souffle_compile "$S_EXE" "$DL"
stage "souffle run ($i) $CASE" $(basename "$S_EXE")
mkdir -p $(dirname "$OUT")
/usr/bin/time -f "command=%C\nsolve semi-naive=%e\nsaturation=%P" \
"$S_EXE" -j${OMP_NUM_THREADS} -F "./D_converted/$CASE" 2>&1 | tee "$OUT"
fi
done
}
#====================================#
# CaseRun, the important case runner #
#====================================#
# determine the experimental cases
function cases() {
if [ $# == 0 ]; then
cases -d | sed -e 's/.*DATASETS\///' -e 's/[ \t]/_/g' -e 's/\//_/g'
else
find ./DATASETS -type f -name "AssignHeapAllocation.facts" | xargs dirname | sort -u
fi
}
function case_run(){
ACTION="$1"
shift 1
for C in $(cases); do
"$ACTION" "$C" "$@"
done
}
#===================#
# The actual script #
#===================#
while getopts "h" opt; do
case $opt in
h)
usage
exit 0
;;
\?)
errxit Unrecognised command
;;
esac
done
shift $(($OPTIND -1))
[ $# == 0 ] || (echo "no arguments" && exit 1)
[ $(dirname "$0") == "." ] || (echo "run from this directory, i.e.\"./run.sh\"" && exit 1)
[ -d "./DATASETS" ] || (echo "put (or symlink) test cases in ./DATASETS" && exit 1)
OMP_NUM_THREADS=${OMP_NUM_THREADS-`thread_count`}
echo "Threads = $OMP_NUM_THREADS"
# build cauliflower
(pushd ../.. && ./gradlew installDist && popd)
case_run sanitise_wrapper
case_run convert_to_csv
for DISP in ../../src/test/examples/dispatch/*; do
case_run timed_execution "$DISP"
case_run optimised_execution "$DISP" 1 ./D_converted/2006_antlr
case_run optimised_execution "$DISP" 999 ./D_converted/2006_antlr
done
case_run timed_souffle "./souffle_virtual.dl"
|
import javafx.scene.paint.Color;
public interface CellView {
/**
* Make cell appear lit up on game board
**/
void turnOn();
/**
* Set color of the cell in display
*
* @param color the color of the cell
**/
void setColor(Color color);
} |
using Microsoft.VisualStudio.PlatformUI;
using PortingAssistantVSExtensionClient.Common;
using PortingAssistantVSExtensionClient.Options;
using System;
namespace PortingAssistantVSExtensionClient.Dialogs
{
public partial class SelectTargetDialog : DialogWindow
{
private readonly UserSettings _userSettings;
public bool ClickResult = false;
public SelectTargetDialog()
{
_userSettings = UserSettings.Instance;
InitializeComponent();
foreach (string framwork in TargetFrameworkType.ALL_SElECTION)
{
TargetFrameWorkDropDown.Items.Add(framwork);
}
#if Dev16
TargetFrameWorkDropDown.Items.Remove(TargetFrameworkType.NET60);
#endif
TargetFrameWorkDropDown.SelectedValue = TargetFrameworkType.NO_SELECTION;
this.Title = "Choose a Target Framework";
}
public static bool EnsureExecute()
{
SelectTargetDialog selectTargetDialog = new SelectTargetDialog();
selectTargetDialog.ShowModal();
return selectTargetDialog.ClickResult;
}
private void Button_Click(object sender, System.Windows.RoutedEventArgs e)
{
if (TargetFrameWorkDropDown.SelectedValue.Equals(TargetFrameworkType.NO_SELECTION))
{
ChooseFrameworkLabel.Content = "Please make a selection of target framework!";
}
else
{
_userSettings.TargetFramework = (string)TargetFrameWorkDropDown.SelectedValue;
_userSettings.UpdateTargetFramework();
ClickResult = true;
Close();
}
}
private void Button_Click_1(object sender, System.Windows.RoutedEventArgs e)
{
ClickResult = false;
Close();
}
}
}
|
// https://github.com/theGordHoard/hoardbot/blob/master/src/%40types/difflib.d.ts
// Copyright Katlyn Lorimer, all rights reserved.
declare module 'difflib' {
// The best (no more than n) matches among the possibilities are returned in a
// list, sorted by similarity score, most similar first.
export function getCloseMatches <T> (
// A sequence for which close matches are desired
word: T,
// A list of sequences against which to match word
possibilities: T[],
// The maximum number of close matches to return; Must be greater than 0.
n?: number,
// A float in the range [0, 1]. Possibilities that don’t score at least that
// similar to word are ignored.
cutoff?: number
): T[]
}
|
using Balta.SharedContext.Enums;
namespace Balta.SharedContext;
public class Lecture : Base
{
public int Ordem { get; set; }
public string Title { get; set; }
public int DurationInMinutes { get; set; }
public EContentLevel Level { get; set; }
} |
package amf.resolution
import amf.core.client.scala.config.RenderOptions
import amf.core.internal.remote.{AmfJsonHint, Raml10, Raml10YamlHint}
import scala.concurrent.ExecutionContext
class ExtensionResolutionTest extends ResolutionTest {
override implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
val basePath = "amf-cli/shared/src/test/resources/resolution/extension/"
test("Extension with annotations to Raml") {
cycle("input.raml", "output.raml", Raml10YamlHint, target = Raml10YamlHint, directory = s"${basePath}annotations/")
}
test("Extension basic to Raml") {
cycle("input.raml", "output.raml", Raml10YamlHint, target = Raml10YamlHint, directory = s"${basePath}basic/")
}
test("Extension with traits to Raml") {
cycle("input.raml", "output.raml", Raml10YamlHint, target = Raml10YamlHint, directory = s"${basePath}traits/")
}
multiGoldenTest("Extension with traits to Amf", "output.%s") { config =>
cycle(
"input.raml",
config.golden,
Raml10YamlHint,
target = AmfJsonHint,
directory = s"${basePath}traits/",
renderOptions = Some(config.renderOptions),
transformWith = Some(Raml10)
)
}
test("Extension chain to Raml") {
cycle("input.raml", "output.raml", Raml10YamlHint, target = Raml10YamlHint, directory = s"${basePath}chain/")
}
test("Extension with example to Raml") {
cycle("input.raml", "output.raml", Raml10YamlHint, target = Raml10YamlHint, directory = s"${basePath}example/")
}
multiGoldenTest("Trait should be applied to all operations that inherit from it", "output.%s") { config =>
cycle(
"extension.raml",
config.golden,
Raml10YamlHint,
target = AmfJsonHint,
directory = s"${basePath}operation/",
renderOptions = Some(config.renderOptions),
transformWith = Some(Raml10)
)
}
override def defaultRenderOptions: RenderOptions = RenderOptions().withSourceMaps.withPrettyPrint
}
|
import Vue from 'vue';
import routify from './routify';
export default function modularize(definitions) {
const componentMap = definitions.components || {};
const components = Object.keys(componentMap).reduce((map,key)=>{
const component = componentMap[key];
const template = component.template || '';
const controller = component.controller || function(){};
const instance = new controller();
const methods = instance.methods || {};
const data = instance.data || defaultData;
const definitions = {template, methods, data};
map[key] = Vue.component(key, definitions);
return map;
}, {});
const routes = routify(definitions.routes, components);
return {components, routes};
}
function defaultData() {
return {};
} |
#!perl
use strict;
use warnings;
use FFI::CheckLib qw{find_lib};
use FFI::Platypus qw{};
use Data::Dumper qw{Dumper};
use Convert::Binary::C qw{};
use FFI::C;
my $libPath = find_lib(lib=>'h3');
my $ffiObj = FFI::Platypus->new(api=>1);
$ffiObj->lib($libPath);
my $c = Convert::Binary::C->new->parse('
struct GeoCoord {
double lat;
double lon;
};
');
$ffiObj->attach('geoToH3',['GeoCoord', 'int'],'uint64');
my $lat = 39.0;
my $lon = -77.0;
my $level = 10;
my $ll = $c->pack({lat=>$lat, $lon=>$lon});
print Dumper({ll=>$ll});
my $h3 = geoToH3($ll, $level);
print Dumper({h3=>$h3});
|
package com.gildedrose.item
import com.gildedrose.Item
import com.gildedrose.Quality
import com.gildedrose.bonus.QualityBonus
import com.gildedrose.bonus.QualityBonuses
data class BackstagePasses(
override var name: String,
override var sellIn: Int,
override var quality: Int,
) : Item() {
private val qualityBonuses = QualityBonuses(
QualityBonus(bonus = 3, days = 5),
QualityBonus(bonus = 2, days = 10),
)
override fun updateQuality() {
increaseQualityMultipleTimes()
decreaseSellIn()
if (hasExpired) {
dropQualityToMinimum()
}
}
private fun increaseQualityMultipleTimes() {
repeat(qualityBonus()) { increaseQuality() }
}
private fun qualityBonus() = qualityBonuses.findBonus { days -> sellIn <= days }
private fun dropQualityToMinimum() {
quality = Quality.MINIMUM
}
}
|
dest_dir=/usr/local/bin
sudo mkdir $dest_dir
sudo cp -i wifi $dest_dir
sudo chmod 775 $dest_dir/wifi
sudo cp -i internet $dest_dir
sudo chmod 775 $dest_dir/internet
sudo cp -i nettraf $dest_dir
sudo chmod 775 $dest_dir/nettraf
sudo cp -i dmenuunicode $dest_dir
sudo chmod 775 $dest_dir/dmenuunicode
sudo cp -i dmenumount $dest_dir
sudo chmod 775 $dest_dir/dmenumount
sudo cp -i dmenuumount $dest_dir
sudo chmod 775 $dest_dir/dmenuumount
sudo cp -i disk $dest_dir
sudo chmod 775 $dest_dir/disk
sudo cp -i battery $dest_dir
sudo chmod 775 $dest_dir/battery
sudo cp -i volume $dest_dir
sudo chmod 775 $dest_dir/volume
sudo cp -i clock $dest_dir
sudo chmod 775 $dest_dir/clock
sudo cp -i memory $dest_dir
sudo chmod 775 $dest_dir/memory
sudo cp -i nusage $dest_dir
sudo chmod 775 $dest_dir/nusage
echo "Done copying the scripts"
|
use thiserror::Error;
use solana_program::{msg, program_error::ProgramError};
#[derive(Error, Debug, Copy, Clone)]
pub enum StreamError {
#[error("Failed to parse the pubkey")]
PubKeyParseError,
#[error("Admin account invalid")]
AdminAccountInvalid,
#[error("Not enough lamports in account")]
NotEnoughLamports,
#[error("Start time or end time for the stream is invalid")]
InvalidStartOrEndTime,
#[error("Receiver does not own enough tokens")]
WithdrawError,
}
impl From<StreamError> for ProgramError {
fn from(e: StreamError) -> Self {
msg!("{}", e);
ProgramError::Custom(e as u32)
}
}
|
#include "user_tree.h"
#include <iostream>
using namespace std;
void user_tree__create_node(TreeNodePtr &root, int value)
{
root = new TreeNode();
root->value = value;
}
void user_tree__preorder_traversal(TreeNodePtr root)
{
if (root != nullptr)
{
cout << root->value << endl;
user_tree__preorder_traversal(root->left);
user_tree__preorder_traversal(root->right);
}
}
void user_tree__inorder_traversal(TreeNodePtr root)
{
if (root != nullptr)
{
user_tree__inorder_traversal(root->left);
cout << root->value << endl;
user_tree__inorder_traversal(root->right);
}
}
void user_tree__postorder_traversal(TreeNodePtr root)
{
if (root != nullptr)
{
user_tree__postorder_traversal(root->left);
user_tree__postorder_traversal(root->right);
cout << root->value << endl;
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using ARKit;
using CoreAnimation;
using CoreFoundation;
using CoreGraphics;
using Foundation;
using Newtonsoft.Json;
using OpenTK;
using SceneKit;
using UIKit;
namespace PlacingObjects
{
public class VirtualObjectManager : NSObject
{
static VirtualObjectManager()
{
var jsonPath = NSBundle.MainBundle.PathForResource("VirtualObjects", "json");
var jsonData = System.IO.File.ReadAllText(jsonPath);
AvailableObjects = JsonConvert.DeserializeObject<List<VirtualObjectDefinition>>(jsonData);
}
public static List<VirtualObjectDefinition> AvailableObjects { get; protected set; }
DispatchQueue queue;
public IVirtualObjectManagerDelegate Delegate { get; set; }
public List<VirtualObject> VirtualObjects { get; protected set; } = new List<VirtualObject>();
VirtualObject lastUsedObject = null;
Gesture currentGesture = null;
public VirtualObjectManager(DispatchQueue queue)
{
this.queue = queue;
}
public void Translate(VirtualObject vObject, ARSCNView sceneView, CGPoint screenPos, bool instantly, bool infinitePlane)
{
DispatchQueue.MainQueue.DispatchAsync(() =>
{
var result = WorldPositionFromScreenPosition(screenPos, sceneView, vObject.Position, infinitePlane);
var newPosition = result.Item1;
if (newPosition == null)
{
if (this.Delegate != null)
{
this.Delegate.CouldNotPlace(this, vObject);
return;
}
}
var currentFrame = ViewController.CurrentFrame;
if (currentFrame == null || currentFrame.Camera == null)
{
return;
}
var cameraTransform = currentFrame.Camera.Transform;
queue.DispatchAsync(() => SetPosition(vObject, newPosition.Value, instantly, result.Item3, cameraTransform));
});
}
internal void RemoveAllVirtualObjects()
{
foreach (var vo in VirtualObjects)
{
vo.Unload();
}
VirtualObjects.Clear();
}
private void SetPosition(VirtualObject virtualObject, SCNVector3 position, bool instantly, bool filterPosition, NMatrix4 cameraTransform)
{
if (instantly)
{
SetNewVirtualObjectPosition(virtualObject, position, cameraTransform);
}
else
{
UpdateVirtualObjectPosition(virtualObject, position, filterPosition, cameraTransform);
}
}
public void UpdateVirtualObjectPosition(VirtualObject virtualObject, SCNVector3 position, bool filterPosition, NMatrix4 cameraTransform)
{
var cameraWorldPos = cameraTransform.Translation();
var cameraToPosition = position.Subtract(cameraWorldPos);
// Limit the distance of the object from the camera to a maximum of 10m
if (cameraToPosition.LengthFast > 10)
{
cameraToPosition = cameraToPosition.Normalized() * 10;
}
// Compute the average distance of the object from the camera over the last ten
// updates. If filterPosition is true, compute a new position for the object
// with this average. Notice that the distance is applied to the vector from
// the camera to the content, so it only affects the percieved distance of the
// object - the averaging does _not_ make the content "lag".
var hitTestResultDistance = cameraToPosition.LengthFast;
virtualObject.RecentVirtualObjectDistances.Add(hitTestResultDistance);
virtualObject.RecentVirtualObjectDistances.KeepLast(10);
if (filterPosition)
{
var averageDistance = virtualObject.RecentVirtualObjectDistances.Average();
var averagedDistancePos = cameraWorldPos + cameraToPosition.Normalized() * averageDistance;
virtualObject.Position = averagedDistancePos;
}
else
{
virtualObject.Position = cameraWorldPos + cameraToPosition;
}
}
private void SetNewVirtualObjectPosition(VirtualObject virtualObject, SCNVector3 position, NMatrix4 cameraTransform)
{
var cameraWorldPos = cameraTransform.Translation();
var cameraToPosition = position.Subtract(cameraWorldPos);
// Limit the distance of the object from the camera to a maximum of 10m
if (cameraToPosition.LengthFast > 10)
{
cameraToPosition = cameraToPosition.Normalized() * 10;
}
virtualObject.Position = cameraWorldPos + cameraToPosition;
virtualObject.RecentVirtualObjectDistances.Clear();
}
public void CheckIfObjectShouldMoveOntoPlane(ARPlaneAnchor anchor, SCNNode planeAnchorNode)
{
foreach (var vo in VirtualObjects)
{
// Get the object's position in the plane's coordinate system.
var objectPos = planeAnchorNode.ConvertPositionToNode(vo.Position, vo.ParentNode);
if (Math.Abs(objectPos.Y) < float.Epsilon)
{
return; // The object is already on the plane - nothing to do here.
}
// Add 10% tolerance to the corners of the plane.
var tolerance = 0.1f;
var minX = anchor.Center.X - anchor.Extent.X / 2f - anchor.Extent.X * tolerance;
var maxX = anchor.Center.X + anchor.Extent.X / 2f + anchor.Extent.X * tolerance;
var minZ = anchor.Center.Z - anchor.Extent.Z / 2f - anchor.Extent.Z * tolerance;
var maxZ = anchor.Center.Z + anchor.Extent.Z / 2f + anchor.Extent.Z * tolerance;
if (objectPos.X < minX || objectPos.X > maxX || objectPos.Z < minZ || objectPos.Z > maxZ)
{
return;
}
// Drop the object onto the plane if it is near it.
var verticalAllowance = 0.05f;
var epsilon = 0.001; // Do not bother updating if the difference is less than a mm.
var distanceToPlane = Math.Abs(objectPos.Y);
if (distanceToPlane > epsilon && distanceToPlane < verticalAllowance)
{
Delegate.DidMoveObjectOntoNearbyPlane(this, vo);
SCNTransaction.Begin();
SCNTransaction.AnimationDuration = distanceToPlane * 500; // Move 2mm per second
SCNTransaction.AnimationTimingFunction = CAMediaTimingFunction.FromName(CAMediaTimingFunction.EaseInEaseOut);
vo.Position = new SCNVector3(vo.Position.X, anchor.Transform.M32, vo.Position.Z);
SCNTransaction.Commit();
}
}
}
internal void ReactToTouchesCancelled(NSSet touches, UIEvent evt)
{
if (VirtualObjects.Count() == 0)
{
return;
}
currentGesture = currentGesture?.UpdateGestureFromTouches(touches, TouchEventType.TouchCanceled);
}
private void MoveIfNecessary(NSSet touches, UIEvent evt, TouchEventType evtType)
{
if (VirtualObjects.Count() == 0)
{
return;
}
currentGesture = currentGesture?.UpdateGestureFromTouches(touches, evtType);
var newObj = currentGesture?.LastUsedObject;
if (newObj != null)
{
lastUsedObject = newObj;
}
var gesture = currentGesture;
var touchedObj = gesture?.LastUsedObject;
if (gesture != null && touchedObj != null)
{
Delegate?.TransformDidChangeFor(this, touchedObj);
}
}
internal void ReactToTouchesEnded(NSSet touches, UIEvent evt)
{
MoveIfNecessary(touches, evt, TouchEventType.TouchEnded);
}
internal void ReactToTouchesMoved(NSSet touches, UIEvent evt)
{
MoveIfNecessary(touches, evt, TouchEventType.TouchMoved);
}
internal (SCNVector3?, ARPlaneAnchor, Boolean) WorldPositionFromScreenPosition(CGPoint position, ARSCNView sceneView, SCNVector3? objectPos, bool infinitePlane = false)
{
var dragOnInfinitePlanesEnabled = AppSettings.DragOnInfinitePlanes;
// -------------------------------------------------------------------------------
// 1. Always do a hit test against exisiting plane anchors first.
// (If any such anchors exist & only within their extents.)
var planeHitTestResults = sceneView.HitTest(position, ARHitTestResultType.ExistingPlaneUsingExtent);
var result = planeHitTestResults.FirstOrDefault();
if (result != null)
{
var planeHitTestPosition = result.WorldTransform.Translation();
var planeAnchor = result.Anchor;
return (planeHitTestPosition, (ARPlaneAnchor)planeAnchor, true);
}
// -------------------------------------------------------------------------------
// 2. Collect more information about the environment by hit testing against
// the feature point cloud, but do not return the result yet.
SCNVector3? featureHitTestPosition = null;
var highQualityFeatureHitTestResult = false;
var highQualityfeatureHitTestResults = sceneView.HitTestWithFeatures(position, 18, 0.2, 2.0);
if (highQualityfeatureHitTestResults.Count() > 0)
{
var highQualityFeatureHit = highQualityfeatureHitTestResults.First();
featureHitTestPosition = highQualityFeatureHit.Position;
highQualityFeatureHitTestResult = true;
}
// -------------------------------------------------------------------------------
// 3. If desired or necessary (no good feature hit test result): Hit test
// against an infinite, horizontal plane (ignoring the real world).
if ((infinitePlane && dragOnInfinitePlanesEnabled) || !highQualityFeatureHitTestResult)
{
if (objectPos.HasValue)
{
var pointOnInfinitePlane = sceneView.HitTestWithInfiniteHorizontalPlane(position, objectPos.Value);
if (pointOnInfinitePlane != null)
{
return (pointOnInfinitePlane, null, true);
}
}
}
// -------------------------------------------------------------------------------
// 4. If available, return the result of the hit test against high quality
// features if the hit tests against infinite planes were skipped or no
// infinite plane was hit.
if (highQualityFeatureHitTestResult)
{
return (featureHitTestPosition, null, false);
}
// -------------------------------------------------------------------------------
// 5. As a last resort, perform a second, unfiltered hit test against features.
// If there are no features in the scene, the result returned here will be nil.
var unfilteredFeatureHitTestResults = sceneView.HitTestWithFeatures(position);
if (unfilteredFeatureHitTestResults.Count() > 0)
{
var unfilteredFeaturesResult = unfilteredFeatureHitTestResults.First();
return (unfilteredFeaturesResult.Position, null, false);
}
return (null, null, false);
}
public void ReactToTouchesBegan(NSSet touches, UIEvent evt, ARSCNView scnView)
{
if (!VirtualObjects.Any())
{
return;
}
if (currentGesture == null)
{
currentGesture = Gesture.StartGestureFromTouches(touches, scnView, lastUsedObject, this);
}
else
{
currentGesture = currentGesture.UpdateGestureFromTouches(touches, TouchEventType.TouchBegan);
}
if (currentGesture != null && currentGesture.LastUsedObject != null)
{
lastUsedObject = currentGesture.LastUsedObject;
}
}
public void LoadVirtualObject(VirtualObject vo, SCNVector3 position, NMatrix4 cameraTransform)
{
VirtualObjects.Add(vo);
if (Delegate != null)
{
Delegate.WillLoad(this, vo);
}
vo.Load();
// Immediately place the object in 3D space
SetNewVirtualObjectPosition(vo, position, cameraTransform);
lastUsedObject = vo;
if (Delegate != null)
{
Delegate.DidLoad(this, vo);
}
}
public void RemoveVirtualObject(int index)
{
if (index < 0 || index >= AvailableObjects.Count())
{
return;
}
var def = AvailableObjects[index];
var vo = VirtualObjects.Where(vo2 => vo2.Definition.Equals(def)).FirstOrDefault();
if (vo == null)
{
return;
}
UnloadVirtualObject(vo);
if (index >= VirtualObjects.Count())
{
return;
}
var pos = VirtualObjects[index];
VirtualObjects.RemoveAt(index);
}
private void UnloadVirtualObject(VirtualObject vo)
{
vo.Unload();
vo.RemoveFromParentNode();
if (lastUsedObject == vo)
{
lastUsedObject = null;
if (VirtualObjects.Count() > 1)
{
lastUsedObject = VirtualObjects[0];
}
}
}
}
}
|
// <Auto-Generated></Auto-Generated>
using System.Threading.Tasks;
namespace Cuture.Extensions.Modularity
{
/// <summary>
/// <inheritdoc cref="IAppModule"/>生命周期接口 - <inheritdoc cref="OnApplicationInitializationAsync"/>
/// </summary>
public interface IOnApplicationInitializationAsync
{
/// <summary>
/// 初始化应用(包含异步操作)
/// </summary>
/// <param name="context"></param>
Task OnApplicationInitializationAsync(ApplicationInitializationContext context);
}
}
|
import React from 'react';
import config from 'src/config';
import ContentSection from './ContentSection';
function PrivacySection() {
return (
<ContentSection title="Privacy Policy">
<div>
<p>
<strong>{config.appDomainName}</strong> will collect certain non-personally identify
information about you as you use our site. We may use this data to better
understand our users.
</p>
<p>
We will also ask you to provide personal information, but you{'\''}ll always be able to opt out.
If you give us personal information, we won{'\''}t do anything evil with it.
</p>
<p>We can also use cookies, but you can choose not to store these.</p>
<p>
We use Google Analytics,{' '}
<a
target="_blank"
rel="noopener noreferrer"
href="https://privacy.google.com"
>
Google{'\''}s privacy policy
</a>.
</p>
</div>
</ContentSection>
);
}
export default PrivacySection;
|
package day1
import org.scalatest.{FunSpec, FunSuite, Matchers}
class InverseCaptchaTest extends FunSuite with Matchers {
import InverseCaptcha.captcha
test("Captcha '12' should return 0") {
captcha("12") shouldBe 0
}
test("Captcha '1111' should return 4") {
captcha("1111") shouldBe 4
}
test("Captcha '1122' should return 3") {
captcha("1122") shouldBe 3
}
test("Captcha '91212129' should return 9") {
captcha("91212129") shouldBe 9
}
}
|
#include <bits/stdc++.h>
#define MAXN 1005
using namespace std;
inline long long fpow(long long a,long long b,long long p)
{
long long r=1;
for (;b;a=(a*a)%p,b>>=1) if (b&1) r=(r*a)%p;
return r;
}
int main()
{
int w,h;
scanf("%d %d",&w,&h);
printf("%lld\n",fpow(2,w+h,998244353LL));
return 0;
} |
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-name-shadowing #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE NamedFieldPuns #-}
module Yage.Rendering.Pipeline.Deferred.GuiPass where
import Yage.Prelude
import Yage.Lens
import Yage.Viewport
import Yage.Scene hiding (toRenderEntity)
import Yage.Material
import Yage.Font
import Yage.Camera
import Yage.UI.GUI
import Yage.Rendering.Pipeline.Deferred.Common
|
import com.typesafe.sbt.SbtGit.GitKeys._
lazy val commonSettings = Seq(
organization := "net.lullabyte",
scalacOptions ++= Seq(
"-Xlint",
"-deprecation",
"-Xfatal-warnings",
"-feature"
), unmanagedSourceDirectories in Compile ++= Seq(
baseDirectory.value.getParentFile / "shared" / "src" / "main" / "scala"
),
scmInfo := Some(
ScmInfo(
url("http://github.com/lucidd/scala-js-chrome"),
"scm:[email protected]:lucidd/scala-js-chrome.git"
)
),
developers := List(
Developer(
"lucidd",
"Kevin Walter",
"[email protected]",
url("http://lullabyte.net")
)
),
licenses += "MIT" -> url("http://www.opensource.org/licenses/mit-license.html"),
homepage := Some(url("http://github.com/lucidd/scala-js-chrome")),
useGpg := true,
useGitDescribe := true
)
lazy val commonPlugins = Seq(GitVersioning)
lazy val bindings = project.in(file("bindings")).
settings(commonSettings: _*).
settings(
name := "scala-js-chrome",
scalaVersion := "2.12.2",
crossScalaVersions := Seq("2.10.6", "2.11.11", "2.12.2"),
libraryDependencies ++= Seq(
"org.scala-js" %%% "scalajs-dom" % "0.9.1"
),
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
).
enablePlugins(commonPlugins: _*).
enablePlugins(ScalaJSPlugin)
lazy val plugin = project.in(file("sbt-plugin")).
settings(commonSettings: _*).
settings(
sbtPlugin := true,
name := "sbt-chrome-plugin",
libraryDependencies ++= {
val circeVersion = "0.8.0"
Seq(
"io.circe" %% "circe-core" % circeVersion,
"io.circe" %% "circe-generic" % circeVersion,
"io.circe" %% "circe-parser" % circeVersion
)
},
publishMavenStyle := false,
bintrayRepository := "sbt-plugins",
bintrayOrganization := None,
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.16")
).
enablePlugins(commonPlugins: _*)
lazy val monixInterop = project.in(file("interop/monix")).
settings(commonSettings: _*).
settings(
name := "scala-js-chrome-monix",
scalaVersion := "2.12.2",
crossScalaVersions := Seq("2.10.6", "2.11.11", "2.12.2"),
libraryDependencies ++= Seq(
"io.monix" %%% "monix" % "2.2.4"
),
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
).dependsOn(bindings)
.enablePlugins(commonPlugins: _*)
.enablePlugins(ScalaJSPlugin)
lazy val fs2Interop = project.in(file("interop/fs2")).
settings(commonSettings: _*).
settings(
name := "scala-js-chrome-fs2",
scalaVersion := "2.12.2",
crossScalaVersions := Seq("2.11.11", "2.12.2"),
libraryDependencies ++= Seq(
"co.fs2" %%% "fs2-core" % "0.9.5"
),
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
).dependsOn(bindings)
.enablePlugins(commonPlugins: _*)
.enablePlugins(ScalaJSPlugin)
|
# -*- coding: utf-8 -*-
module Xot
module Hookable
def hook(name, &block)
c = class << self; self; end
c.__send__ :define_method, name, &block
self
end
def on(name, &block)
hook name do |*a, &b|
block.call(*a, &b)
end
end
def before(name, &block)
hook name do |*a, &b|
super(*a, &b) unless block.call(*a, &b) == :skip
end
end
def after(name, &block)
hook name do |*a, &b|
ret = super(*a, &b)
block.call(*a, &b)
ret
end
end
end# Hookable
end# Xot
|
package org.simple.clinic.facility
sealed class FacilityPullResult {
object Success : FacilityPullResult()
object NetworkError : FacilityPullResult()
object UnexpectedError : FacilityPullResult()
}
|
CREATE TABLE users (
id INTEGER PRIMARY KEY,
username TEXT NOT NULL,
hash TEXT NOT NULL,
cash NUMERIC NOT NULL DEFAULT 10000.00
);
CREATE UNIQUE INDEX username ON users (username);
CREATE TABLE users_shares (
user_id INTEGER NOT NULL REFERENCES users(id),
symbol TEXT NOT NULL,
shares NUMERIC NOT NULL
);
CREATE INDEX user_id_shares ON users_shares (user_id);
CREATE TABLE transactions (
id INTEGER PRIMARY KEY,
user_id INTEGER NOT NULL REFERENCES users(id),
symbol TEXT NOT NULL,
shares NUMERIC NOT NULL,
price NUMERIC NOT NULL,
total NUMERIC NOT NULL,
type TEXT NOT NULL
);
CREATE INDEX user_id_transactions ON transactions (user_id); |
package org.http4s
package parser
import cats.data.NonEmptyList
import java.nio.charset.{Charset, StandardCharsets}
import org.http4s._
import org.http4s.headers.Origin
import org.http4s.internal.parboiled2._
trait OriginHeader {
def ORIGIN(value: String): ParseResult[Origin] =
new OriginParser(value).parse
private class OriginParser(value: String)
extends Http4sHeaderParser[Origin](value)
with Rfc3986Parser {
override def charset: Charset =
StandardCharsets.ISO_8859_1
def entry: Rule1[Origin] = rule {
nullEntry | hostListEntry
}
// The spec states that an Origin may be the string "null":
// http://tools.ietf.org/html/rfc6454#section-7
//
// However, this MDN article states that it may be the empty string:
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin
//
// Although the MDN article is possibly wrong,
// it seems likely we could get either case,
// so we read both as Origin.Null and re-serialize it as "null":
def nullEntry: Rule1[Origin] = rule {
(str("") ~ EOI | str("null") ~ EOI) ~> { () =>
Origin.Null
}
}
def hostListEntry: Rule1[Origin] = rule {
(host ~ zeroOrMore(" " ~ host)) ~> { (head: Origin.Host, tail: collection.Seq[Origin.Host]) =>
Origin.HostList(NonEmptyList(head, tail.toList))
}
}
def host: Rule1[Origin.Host] = rule {
(scheme ~ "://" ~ Host ~ Port) ~> { (s, h, p) =>
Origin.Host(s, h, p)
}
}
}
}
|
package store4s
import com.google.cloud.datastore.{Datastore => _, _}
import shapeless._
import shapeless.labelled._
import scala.jdk.CollectionConverters._
trait ValueEncoder[T] { self =>
def encode(t: T): Value[_]
def contramap[A](f: A => T) = new ValueEncoder[A] {
def encode(a: A) = self.encode(f(a))
}
}
object ValueEncoder {
def apply[T](implicit enc: ValueEncoder[T]) = enc
def create[T](f: T => Value[_]) = new ValueEncoder[T] {
def encode(t: T) = f(t)
}
implicit val blobEncoder = create(BlobValue.of)
implicit val bytesEncoder = blobEncoder.contramap[Array[Byte]](Blob.copyFrom)
implicit val booleanEncoder = create(BooleanValue.of)
implicit val doubleEncoder = create(DoubleValue.of)
implicit def entityEncoder[T](implicit encoder: EntityEncoder[T]) =
create[T] { obj =>
EntityValue.of(encoder.encodeEntity(obj, FullEntity.newBuilder()).build())
}
implicit val keyEncoder = create(KeyValue.of)
implicit val latLngEncoder = create(LatLngValue.of)
implicit def seqEncoder[T](implicit ve: ValueEncoder[T]) =
create[Seq[T]](seq => ListValue.of(seq.map(t => ve.encode(t)).asJava))
implicit def optionEncoder[T](implicit ve: ValueEncoder[T]) =
create[Option[T]] {
case Some(t) => ve.encode(t)
case None => NullValue.of()
}
implicit val intEncoder = create((i: Int) => LongValue.of(i.toLong))
implicit val longEncoder = create(LongValue.of)
implicit val stringEncoder = create(StringValue.of)
implicit val timestampEncoder = create(TimestampValue.of)
}
trait EntityEncoder[A] {
def encodeEntity[B <: BaseEntity.Builder[_, B]](obj: A, eb: B): B
}
object EntityEncoder {
def apply[A](implicit enc: EntityEncoder[A]) = enc
implicit val hnilEncoder = new EntityEncoder[HNil] {
def encodeEntity[B <: BaseEntity.Builder[_, B]](obj: HNil, eb: B): B = eb
}
implicit def hlistEncoder[K <: Symbol, H, T <: HList](implicit
witness: Witness.Aux[K],
hEncoder: ValueEncoder[H],
tEncoder: EntityEncoder[T]
) = new EntityEncoder[FieldType[K, H] :: T] {
def encodeEntity[B <: BaseEntity.Builder[_, B]](
obj: FieldType[K, H] :: T,
eb: B
): B = {
val fieldName = witness.value.name
tEncoder.encodeEntity(
obj.tail,
eb.set(fieldName, hEncoder.encode(obj.head))
)
}
}
implicit def genericEncoder[A, R](implicit
generic: LabelledGeneric.Aux[A, R],
encoder: EntityEncoder[R]
) = new EntityEncoder[A] {
def encodeEntity[B <: BaseEntity.Builder[_, B]](obj: A, eb: B): B = {
encoder.encodeEntity(generic.to(obj), eb)
}
}
}
|
#include "shader.h"
#include "path.h"
#include <fstream>
std::string loadShaderString(std::string const& filename)
{
std::string shaderPath = SHADER_PATH;
std::string path = shaderPath + filename;
std::ifstream inputFile(path);
return std::string((std::istreambuf_iterator<char>(inputFile)), std::istreambuf_iterator<char>());
}
Shader::Shader(std::string const& vertexFilename, std::string const& fragmentFilename)
{
auto vertStr = loadShaderString(vertexFilename);
const GLchar* vertChStr = vertStr.c_str();
m_vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(m_vertexShaderID, 1, &vertChStr, NULL);
glCompileShader(m_vertexShaderID);
auto fragStr = loadShaderString(fragmentFilename);
const GLchar* fragChStr = fragStr.c_str();
m_fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(m_fragmentShaderID, 1, &fragChStr, NULL);
glCompileShader(m_fragmentShaderID);
m_programID = glCreateProgram();
glAttachShader(m_programID, m_vertexShaderID);
glAttachShader(m_programID, m_fragmentShaderID);
glLinkProgram(m_programID);
}
Shader::~Shader()
{
unbind();
glDetachShader(m_programID, m_vertexShaderID);
glDetachShader(m_programID, m_fragmentShaderID);
glDeleteShader(m_vertexShaderID);
glDeleteShader(m_fragmentShaderID);
glDeleteProgram(m_programID);
}
void Shader::bind()
{
glUseProgram(m_programID);
}
void Shader::unbind()
{
glUseProgram(0);
}
GLuint Shader::getId()
{
return m_programID;
}
|
using System;
using System.Threading;
using System.Threading.Tasks;
namespace NugetProxy.Protocol.Catalog
{
/// <summary>
/// A cursor that does not persist any state. Use this with a <see cref="CatalogProcessor"/>
/// to process all leafs each time <see cref="CatalogProcessor.ProcessAsync(CancellationToken)"/>
/// is called.
/// </summary>
public class NullCursor : ICursor
{
public Task<DateTimeOffset?> GetAsync(CancellationToken cancellationToken = default)
{
return Task.FromResult<DateTimeOffset?>(null);
}
public Task SetAsync(DateTimeOffset value, CancellationToken cancellationToken = default)
{
return Task.CompletedTask;
}
}
}
|
package rorm
type RedisMode uint
const (
_ RedisMode = iota
Normal
Cluster
)
type SingleNodeDesc struct {
URL string
Port string
DB int
Username string
Password string
}
type Options struct {
Mode RedisMode
AddressMap map[string]*SingleNodeDesc
ReadOnly bool
}
//初始化
func NewRedisOptions() *Options {
return &Options{
Mode: Normal,
ReadOnly: false,
AddressMap: make(map[string]*SingleNodeDesc),
}
}
func NewDefaultOptions() *Options {
node := &SingleNodeDesc{
URL: "127.0.0.1",
Port: "6379",
DB: 0,
}
return &Options{
Mode: Normal,
AddressMap: map[string]*SingleNodeDesc{
"default": node,
},
}
}
func (option *Options) SetMode(redisMode RedisMode) *Options {
option.Mode = redisMode
return option
}
//AddNode 添加节点 options UserName value Password value
func (option *Options) AddNode(Alias string, URL string, Port string, DB int, options ...interface{}) *Options {
if len(options) > 0 && len(options)%2 != 0 {
panic("parameter options must be key value")
}
nodeDesc := SingleNodeDesc{
URL: URL,
Port: Port,
DB: DB,
}
for i := 0; i < len(options); i = i + 2 {
if options[i] == "UserName" {
nodeDesc.Username = options[i].(string)
}
if options[i] == "Password" {
nodeDesc.Password = options[i].(string)
}
}
alias := Alias
if Alias == "" {
alias = RandStringBytesMaskImprSrcUnsafe(6)
}
if option.Mode == Normal && len(option.AddressMap) > 1 {
panic("redis is singleNode Mode.But addressList has more than one")
}
option.AddressMap[alias] = &nodeDesc
return option
}
//SetReadOnly Enables read-only commands on slave nodes (
func (option *Options) SetReadOnly(flag bool) *Options {
if option.Mode == Normal {
panic("normal mode can not be read only")
}
option.ReadOnly = flag
return option
}
|
---
layout: default
modal-id: 1
date: 2020-8-11
img: certificate/cer1.jpg
alt: image-alt
project-date: September, 2019
client: none
category: TOEFL
description: 영어 능력 공인 성적으로 TOEFL 성적을 보유. (96 / 120)
---
|
package schema1
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
vtesting "github.com/grafeas/voucher/testing"
)
func TestConfigFromManifest(t *testing.T) {
pk := vtesting.NewPrivateKey()
newManifest := vtesting.NewTestSchema1SignedManifest(pk)
// we can pass nil as the http.Client because schema1's config is stored in
// the history fields. It's super weird.
config, err := RequestConfig(nil, nil, newManifest)
require.NoError(t, err)
assert.NotNil(t, config)
assert.Equal(t, "nobody", config.User)
}
|
package com.coursion.mediapickerlib
import android.Manifest
import android.content.Intent
import android.os.Bundle
import android.support.v7.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_main.*
import android.app.Activity
import android.content.pm.PackageManager
import android.content.res.ColorStateList
import android.graphics.BitmapFactory
import android.net.Uri
import android.os.Build
import android.support.design.widget.FloatingActionButton
import android.support.v4.content.ContextCompat
import android.support.v4.view.ViewCompat
import android.util.Log
import com.coursion.freakycoder.mediapicker.galleries.Gallery
import kotlinx.android.synthetic.main.content_main.*
import java.io.File
import java.io.FileNotFoundException
class MainActivity : AppCompatActivity() {
private val OPEN_MEDIA_PICKER = 1 // Request code
private val MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE = 100 // Request code for read external storage
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
setSupportActionBar(toolbar)
setButtonTint(fab, ContextCompat.getColorStateList(applicationContext, R.color.fabColor)!!)
fab.setOnClickListener { view ->
if (!permissionIfNeeded()) {
val intent = Intent(this, Gallery::class.java)
// Set the title
intent.putExtra("title", "Select media")
// Mode 1 for both images and videos selection, 2 for images only and 3 for videos!
intent.putExtra("mode", 1)
intent.putExtra("maxSelection", 3) // Optional
startActivityForResult(intent, OPEN_MEDIA_PICKER)
}
}
}
fun setButtonTint(button: FloatingActionButton, tint: ColorStateList) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
button.backgroundTintList = tint
} else {
ViewCompat.setBackgroundTintList(button, tint)
}
}
private fun permissionIfNeeded(): Boolean {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
// Should we show an explanation?
if (shouldShowRequestPermissionRationale(
Manifest.permission.READ_EXTERNAL_STORAGE)) {
// Explain to the user why we need to read the contacts
}
requestPermissions(arrayOf(Manifest.permission.READ_EXTERNAL_STORAGE),
MY_PERMISSIONS_REQUEST_READ_EXTERNAL_STORAGE)
return true
}
}
return false
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
// Check which request we're responding to
if (requestCode == OPEN_MEDIA_PICKER) {
// Make sure the request was successful
if (resultCode == Activity.RESULT_OK && data != null) {
val selectionResult = data.getStringArrayListExtra("result")
selectionResult.forEach {
try {
Log.d("MyApp", "Image Path : " + it)
val uriFromPath = Uri.fromFile(File(it))
Log.d("MyApp", "Image URI : " + uriFromPath)
// Convert URI to Bitmap
val bm = BitmapFactory.decodeStream(
contentResolver.openInputStream(uriFromPath))
image.setImageBitmap(bm)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
}
}
}
}
}
|
# GLOM Functionality
```@autodocs
Modules = [GPLinearODEMaker]
Pages = ["src/gp_functions.jl"]
```
|
Authors
===============================================================================
These are the people that have contributed to the project, in no particular
order:
* Alexandre Anriot <[email protected]>
* Adrien Nayrat <[email protected]>
* damien clochard <[email protected]>
* Guillaume Lelarge <[email protected]>
* Thibaut Madelaine <[email protected]>
* Nicolas Gollet <[email protected]>
* Nicolas Thauvin <[email protected]>
* Stefan Fercot <[email protected]>
* Thomas Reiss <[email protected]>
|
/*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.room.integration.kotlintestapp.test
import androidx.lifecycle.LiveData
import androidx.lifecycle.asFlow
import androidx.room.Dao
import androidx.room.Database
import androidx.room.Entity
import androidx.room.Insert
import androidx.room.PrimaryKey
import androidx.room.Query
import androidx.room.Room
import androidx.room.RoomDatabase
import androidx.room.integration.kotlintestapp.assumeKsp
import androidx.test.core.app.ApplicationProvider
import androidx.test.ext.junit.runners.AndroidJUnit4
import androidx.test.filters.SdkSuppress
import androidx.test.filters.SmallTest
import com.google.common.collect.ImmutableList
import com.google.common.truth.Truth.assertThat
import com.google.common.util.concurrent.ListenableFuture
import io.reactivex.Flowable
import io.reactivex.Observable
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.runBlocking
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import java.util.Optional
/**
* This test matters in KSP specifically where we might use primitive adapter for non-null java
* primitives.
*/
@RunWith(AndroidJUnit4::class)
@SmallTest
class BoxedNonNullTypesTest {
lateinit var db: MyDb
@Before
fun init() {
db = Room.inMemoryDatabaseBuilder(
ApplicationProvider.getApplicationContext(),
MyDb::class.java
).build()
}
@Test
fun list() {
db.myDao().insert(MyEntity(3))
assertThat(db.myDao().getAsList()).containsExactly(3L)
}
@Test
fun list_nullable() {
assumeKsp()
db.myDao().insert(MyNullableEntity(null), MyNullableEntity(3L))
assertThat(db.myDao().getAsNullableList()).containsExactly(null, 3L)
}
@Test
fun immutableList() {
db.myDao().insert(MyEntity(4))
assertThat(db.myDao().getAsImmutableList()).containsExactly(4L)
}
@Test
@SdkSuppress(minSdkVersion = 24)
fun javaOptional() {
assertThat(db.myDao().getAsJavaOptional()).isEqualTo(
Optional.empty<Long>()
)
db.myDao().insert(MyEntity(5))
assertThat(db.myDao().getAsJavaOptional()).isEqualTo(
Optional.of(5L)
)
}
@Test
fun guavaOptional() {
assertThat(db.myDao().getAsGuavaOptional()).isEqualTo(
com.google.common.base.Optional.absent<Long>()
)
db.myDao().insert(MyEntity(6))
assertThat(db.myDao().getAsGuavaOptional()).isEqualTo(
com.google.common.base.Optional.of(6L)
)
}
@Test
fun getAsLiveData() = runBlocking<Unit> {
db.myDao().insert(MyEntity(7))
assertThat(db.myDao().getAsLiveData().asFlow().first()).isEqualTo(7L)
}
@Test
fun getAsLiveData_nullable() = runBlocking<Unit> {
assumeKsp()
db.myDao().insert(MyNullableEntity(null))
assertThat(db.myDao().getAsNullableLiveData().asFlow().first()).isNull()
}
@Test
fun getAsFlow() = runBlocking<Unit> {
db.myDao().insert(MyEntity(8))
assertThat(db.myDao().getAsFlow().first()).isEqualTo(8L)
}
@Test
fun getAsFlow_nullable() = runBlocking<Unit> {
assumeKsp()
db.myDao().insert(MyNullableEntity(null))
assertThat(db.myDao().getAsNullableFlow().first()).isNull()
}
@Test
fun getAsRx2Observable() {
db.myDao().insert(MyEntity(9))
assertThat(db.myDao().getAsRx2Observable().blockingFirst()).isEqualTo(9L)
}
@Test
fun getAsRx2Flowable() {
db.myDao().insert(MyEntity(10))
assertThat(db.myDao().getAsRx2Flowable().blockingFirst()).isEqualTo(10L)
}
@Test
fun getAsRx3Observable() {
db.myDao().insert(MyEntity(11))
assertThat(db.myDao().getAsRx3Observable().blockingFirst()).isEqualTo(11L)
}
@Test
fun getAsRx3Flowable() {
db.myDao().insert(MyEntity(12))
assertThat(db.myDao().getAsRx3Flowable().blockingFirst()).isEqualTo(12L)
}
@Test
fun getAsListenableFuture() {
db.myDao().insert(MyEntity(13))
assertThat(
db.myDao().getAsListenableFuture().get()
).isEqualTo(13L)
}
@Test
fun getAsListenableFuture_nullable() {
assumeKsp()
db.myDao().insert(MyNullableEntity(null))
assertThat(
db.myDao().getAsNullableListenableFuture().get()
).isEqualTo(null)
}
@Entity
data class MyEntity(
val value: Long,
@PrimaryKey(autoGenerate = true) val id: Int = 0,
)
@Entity
data class MyNullableEntity(
val value: Long?,
@PrimaryKey(autoGenerate = true) val id: Int = 0,
)
@Database(
entities = [MyEntity::class, MyNullableEntity::class],
version = 1,
exportSchema = false
)
abstract class MyDb : RoomDatabase() {
abstract fun myDao(): MyDao
}
@Dao
interface MyDao {
@Query("SELECT value FROM MyEntity")
fun getAsList(): List<Long>
@Query("SELECT value FROM MyNullableEntity")
fun getAsNullableList(): List<Long?>
// immutable list does not allow nulls, hence no nullable test for it
@Query("SELECT value FROM MyEntity")
fun getAsImmutableList(): ImmutableList<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsJavaOptional(): Optional<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsGuavaOptional(): com.google.common.base.Optional<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsLiveData(): LiveData<Long>
@Query("SELECT value FROM MyNullableEntity LIMIT 1")
fun getAsNullableLiveData(): LiveData<Long?>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsFlow(): Flow<Long>
@Query("SELECT value FROM MyNullableEntity LIMIT 1")
fun getAsNullableFlow(): Flow<Long?>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsRx2Observable(): Observable<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsRx2Flowable(): Flowable<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsRx3Observable(): io.reactivex.rxjava3.core.Observable<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsRx3Flowable(): io.reactivex.rxjava3.core.Flowable<Long>
@Query("SELECT value FROM MyEntity LIMIT 1")
fun getAsListenableFuture(): ListenableFuture<Long>
@Query("SELECT value FROM MyNullableEntity LIMIT 1")
fun getAsNullableListenableFuture(): ListenableFuture<Long?>
@Insert
fun insert(vararg entities: MyEntity)
@Insert
fun insert(vararg entities: MyNullableEntity)
}
} |
using System;
using System.ComponentModel.DataAnnotations;
using MercadoLivre.Domain.Entities;
namespace MercadoLivre.Api.DTO
{
public class ProdutoOpiniaoReqDTO
{
[Required(ErrorMessage = "O campo {0} é obrigatório.")]
public int Nota { get; set; }
[Required(ErrorMessage = "O campo {0} é obrigatório.")]
public string Titulo { get; set; }
[Required(ErrorMessage = "O campo {0} é obrigatório.")]
[MaxLength(500, ErrorMessage = "O campo {0} deve ter no máximo {1} caracteres.")]
public string Descricao { get; set; }
}
} |
from dagster_gcp.gcs import FakeGCSBlob, FakeGCSBucket, FakeGCSClient
def test_fake_blob_read_write():
bucket = FakeGCSBucket("my_bucket")
blob = FakeGCSBlob("my_blob", bucket)
assert blob.exists()
my_string = "this is a unit test"
blob.upload_from_string(my_string)
assert blob.download_as_bytes() == my_string.encode()
my_bytes = b"these are some bytes"
blob.upload_from_string(my_bytes)
assert blob.download_as_bytes() == my_bytes
def test_blob_delete():
bucket = FakeGCSBucket("my_bucket")
foo = bucket.blob("foo")
bar = bucket.blob("bar")
foo.upload_from_string("foo")
bar.upload_from_string("bar")
assert "foo" in bucket.blobs.keys()
assert "bar" in bucket.blobs.keys()
foo.delete()
assert "foo" not in bucket.blobs.keys()
assert "bar" in bucket.blobs.keys()
bar.delete()
assert "bar" not in bucket.blobs.keys()
def test_bucket():
bucket = FakeGCSBucket("my_bucket")
assert bucket.exists()
foo = bucket.blob("foo")
bar = bucket.blob("bar")
assert bucket.blob("foo") == foo
assert bucket.blob("bar") == bar
def test_client_blobs():
client = FakeGCSClient()
foo = client.bucket("foo")
assert client.bucket("foo") == foo
bar = foo.blob("bar")
assert [bar] == list(client.list_blobs("foo"))
baz = foo.blob("baz/aaa")
assert [bar, baz] == list(client.list_blobs("foo"))
assert [baz] == list(client.list_blobs("foo", prefix="baz"))
assert [] == list(client.list_blobs("foo", prefix="xyz"))
def test_client_bucekts():
client = FakeGCSClient()
foo = client.bucket("foo")
bar = client.bucket("bar")
assert [foo, bar] == list(client.list_buckets())
|
require File.dirname(__FILE__) + '/../../spec_helper'
describe "File.directory?" do
before :each do
platform :mswin do
@dir = "C:\\"
@file = "C:\\winnt\\notepad.exe"
end
platform :not, :mswin do
@dir = "/"
@file = "/bin/ls"
end
end
after :each do
@dir = nil
end
it "return true if dir is a directory, otherwise return false" do
File.directory?(@dir).should == true
File.directory?(@file).should == false
end
it "raise an exception its the arguments are the worng type or number" do
should_raise(ArgumentError){ File.directory? }
should_raise(ArgumentError){ File.directory?(@dir, @file) }
should_raise(TypeError){ File.directory?(nil) }
end
end
|
#!/bin/bash
TOMCAT_VERSION=7.0.52
TOMCAT_FILE=apache-tomcat-$TOMCAT_VERSION
INSTALL_PATH=~/java
mkdir $INSTALL_PATH
cd $INSTALL_PATH
ENV_PATH=$INSTALL_PATH/custom-tomcat2
if [ ! -f $INSTALL_PATH/tomcat ]; then
echo "Tomcat not found at $INSTALL_PATH/tomcat"
if [ ! -f $TOMCAT_FILE.tar.gz ]; then
echo "Downloading Tomcat binary"
wget http://mirrors.sonic.net/apache/tomcat/tomcat-7/v$TOMCAT_VERSION/bin/$TOMCAT_FILE.tar.gz
fi
echo "Extracting Tomcat binary to $INSTALL_PATH/$TOMCAT_FILE"
tar -xf $TOMCAT_FILE.tar.gz
ln -s $INSTALL_PATH/$TOMCAT_FILE $INSTALL_PATH/tomcat
else
echo "Tomcat found at $INSTALL_PATH/tomcat"
fi
mkdir -p $ENV_PATH/{bin,conf,logs,work,webapps,temp}
cp -v $INSTALL_PATH/tomcat/conf/server.xml $INSTALL_PATH/tomcat/conf/tomcat-users.xml $ENV_PATH/conf/
echo "Execute: export CATALINA_BASE=$ENV_PATH"
echo "Then run tomcat with command: $INSTALL_PATH/tomcat/bin/catalina.sh run"
|
package smallest_search
func SmallestSearch(slice []int, smallest int) int {
if len(slice) == 0 {
return smallest
}
var current = slice[len(slice)-1]
if smallest > current {
smallest = current
}
return SmallestSearch(slice[:len(slice)-1], smallest)
}
|
import { testName } from '../../support';
import { VirtualMachineData } from '../../types/vm';
import { OS_IMAGES_NS, TEMPLATE } from '../../utils/const/index';
import { ProvisionSource } from '../../utils/const/provisionSource';
import { pvc } from '../../views/pvc';
import { virtualization } from '../../views/virtualization';
import { vm } from '../../views/vm';
const imageFormats = ['/tmp/cirros.iso', '/tmp/cirros.gz', '/tmp/cirros.xz'];
const invalidImage = '/tmp/cirros.txt';
const os = 'Red Hat Enterprise Linux 6.0 or higher - Default data image already exists';
const template = TEMPLATE.RHEL6;
const vmData: VirtualMachineData = {
name: `pvc-test-vm-${testName}`,
namespace: testName,
template: template.name,
sshEnable: false,
startOnCreation: true,
sourceAvailable: true,
};
describe('kubevirt PVC upload', () => {
before(() => {
cy.Login();
cy.visit('/');
cy.createProject(testName);
});
after(() => {
cy.deleteResource({
kind: 'DataVolume',
metadata: {
name: template.dvName,
namespace: OS_IMAGES_NS,
},
});
cy.deleteResource({
kind: 'VirtualMachine',
metadata: {
name: vmData.name,
namespace: vmData.namespace,
},
});
cy.deleteResource({
kind: 'Namespace',
metadata: {
name: testName,
},
});
cy.exec('rm -fr /tmp/cirros.*');
});
describe('test PVC upload via form', () => {
it('ID(CNV-4778) No warning message shows when image format is supported', () => {
pvc.form.open();
for (const img of imageFormats) {
cy.exec(`touch ${img} || true`);
cy.dropFile(img, img.split('/').pop(), '.pf-c-file-upload');
cy.get('.pf-c-alert__title')
.contains('File type extension')
.should('not.exist');
}
});
it('ID(CNV-4891) It shows a warning message when image format is not supported', () => {
pvc.form.open();
cy.exec(`touch ${invalidImage} || true`);
cy.dropFile(invalidImage, invalidImage.split('/').pop(), '.pf-c-file-upload');
cy.contains('File type extension').should('be.visible');
});
it('ID(CNV-5176) It shows an error when uploading data to golden OS again', () => {
cy.createDataVolume(template.dvName, OS_IMAGES_NS);
pvc.form.open();
pvc.form.selectOS(os);
cy.get('.pf-c-alert__title')
.contains('Operating system source already defined')
.should('exist');
});
it('ID(CNV-5041) VM can be up after deleting the uploaded PVC', () => {
vm.create(vmData);
vm.stop(vmData);
// only delete template pvc for ocs, hpp does not support this
if (Cypress.env('STORAGE_CLASS') === 'ocs-storagecluster-ceph-rbd') {
cy.deleteResource({
kind: 'DataVolume',
metadata: {
name: template.dvName,
namespace: OS_IMAGES_NS,
},
});
}
vm.start(vmData);
vm.delete();
});
});
describe('test PVC upload via CLI', () => {
it('ID(CNV-5044) Verify boot source is available for template after upload via CLI', () => {
cy.exec(
`test -f ${Cypress.env(
'UPLOAD_IMG',
)} || curl --fail -L ${ProvisionSource.URL.getSource()} -o ${Cypress.env('UPLOAD_IMG')}`,
{ timeout: 600000 },
);
cy.uploadFromCLI(template.dvName, OS_IMAGES_NS, Cypress.env('UPLOAD_IMG'), '1');
virtualization.templates.visit();
virtualization.templates.testSource(template.name, 'Unknown');
});
it('ID(CNV-5597) Verify create VM from the template whose source is uploaded via CLI', () => {
vm.create(vmData);
vm.stop(vmData);
vm.delete();
});
it('ID(CNV-5598) Delete DV/PVC from CLI', () => {
cy.deleteResource({
kind: 'DataVolume',
metadata: {
name: template.dvName,
namespace: OS_IMAGES_NS,
},
});
virtualization.templates.visit();
virtualization.templates.testSource(template.name, 'Add source');
});
});
});
|
export const description = `
queue submit validation tests.
`;
import { TestGroup } from '../../../framework/index.js';
import { ValidationTest } from './validation_test.js';
export const g = new TestGroup(ValidationTest);
g.test('submitting with a mapped buffer is disallowed', async t => {
const buffer = t.device.createBuffer({
size: 4,
usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC,
});
const targetBuffer = t.device.createBuffer({
size: 4,
usage: GPUBufferUsage.COPY_DST,
});
const getCommandBuffer = (): GPUCommandBuffer => {
const commandEncoder = t.device.createCommandEncoder();
commandEncoder.copyBufferToBuffer(buffer, 0, targetBuffer, 0, 4);
return commandEncoder.finish();
};
// Submitting when the buffer has never been mapped should succeed
t.queue.submit([getCommandBuffer()]);
// Map the buffer, submitting when the buffer is mapped should fail
await buffer.mapWriteAsync();
t.queue.submit([]);
await t.expectValidationError(() => {
t.queue.submit([getCommandBuffer()]);
});
// Unmap the buffer, queue submit should succeed
buffer.unmap();
t.queue.submit([getCommandBuffer()]);
});
|
using Weave
using Test
function pljtest(source, resfile, doctype)
weave("documents/$source", out_path = "documents/plotsjl/$resfile", doctype=doctype)
result = read("documents/plotsjl/$resfile", String)
ref = read("documents/plotsjl/$resfile.ref", String)
@test result == ref
rm("documents/plotsjl/$resfile")
end
pljtest("plotsjl_test_gr.jmd", "plotsjl_test_gr.md", "pandoc")
pljtest("plotsjl_test_gr.jmd", "plotsjl_test_gr.tex", "tex")
# test cache with plots
isdir("documents/cache") && rm("documents/cache", recursive = true)
weave("documents/plotsjl_test_gr.jmd", cache=:all)
result = read("documents/plotsjl_test_gr.html", String)
rm("documents/plotsjl_test_gr.html")
weave("documents/plotsjl_test_gr.jmd", cache=:all)
cached_result = read("documents/plotsjl_test_gr.html", String)
@test result == cached_result
|
// Code generated by the FlatBuffers compiler. DO NOT EDIT.
package serialization
import "strconv"
type OrderUpdateType int8
const (
OrderUpdateTypeUNKNOWN OrderUpdateType = 0
OrderUpdateTypeRECEIVED OrderUpdateType = 1
OrderUpdateTypeOPEN OrderUpdateType = 2
OrderUpdateTypeDONE OrderUpdateType = 3
OrderUpdateTypeMATCH OrderUpdateType = 4
OrderUpdateTypeCHANGE OrderUpdateType = 5
)
var EnumNamesOrderUpdateType = map[OrderUpdateType]string{
OrderUpdateTypeUNKNOWN: "UNKNOWN",
OrderUpdateTypeRECEIVED: "RECEIVED",
OrderUpdateTypeOPEN: "OPEN",
OrderUpdateTypeDONE: "DONE",
OrderUpdateTypeMATCH: "MATCH",
OrderUpdateTypeCHANGE: "CHANGE",
}
var EnumValuesOrderUpdateType = map[string]OrderUpdateType{
"UNKNOWN": OrderUpdateTypeUNKNOWN,
"RECEIVED": OrderUpdateTypeRECEIVED,
"OPEN": OrderUpdateTypeOPEN,
"DONE": OrderUpdateTypeDONE,
"MATCH": OrderUpdateTypeMATCH,
"CHANGE": OrderUpdateTypeCHANGE,
}
func (v OrderUpdateType) String() string {
if s, ok := EnumNamesOrderUpdateType[v]; ok {
return s
}
return "OrderUpdateType(" + strconv.FormatInt(int64(v), 10) + ")"
}
|
package com.github.llmaximll.mystoryismyworld.presentation.settings.view
import android.content.Context
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.ListAdapter
import androidx.recyclerview.widget.RecyclerView.ViewHolder
import com.github.llmaximll.mystoryismyworld.R
import com.github.llmaximll.mystoryismyworld.databinding.ItemSettingsBinding
import com.github.llmaximll.mystoryismyworld.utils.showToast
internal class SettingsAdapter(
private val context: Context,
private val onItemClick: (Settings) -> Unit,
private val onAboutLongClick: () -> Unit
) : ListAdapter<Settings, SettingsViewHolder>(SettingsDiffCallback()) {
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): SettingsViewHolder {
val layoutInflater = LayoutInflater.from(parent.context)
val binding = ItemSettingsBinding.inflate(layoutInflater, parent, false)
return SettingsViewHolder(binding)
}
override fun onBindViewHolder(holder: SettingsViewHolder, position: Int) {
holder.bind(context, getItem(position), onItemClick, onAboutLongClick)
}
}
internal class SettingsViewHolder(
private val binding: ItemSettingsBinding
) : ViewHolder(binding.root) {
fun bind(
context: Context,
settings: Settings,
onItemClick: (Settings) -> Unit,
onAboutLongClick: () -> Unit
) {
val (drawableRes, text) = when (settings) {
Settings.NOTIFICATIONS -> R.drawable.ic_baseline_notifications_24 to
context.getString(R.string.fragment_settings_enum_notifications)
Settings.APPEARANCE -> R.drawable.ic_baseline_appearance_24 to
context.getString(R.string.fragment_settings_enum_appearance)
Settings.SUPPORT -> R.drawable.ic_baseline_question_answer_24 to
context.getString(R.string.fragment_settings_enum_support)
Settings.ABOUT -> R.drawable.ic_baseline_question_mark_24 to
context.getString(R.string.fragment_settings_enum_about)
}
binding.imageView.setImageResource(drawableRes)
binding.textTextView.text = text
setItemClickListener(settings, onItemClick)
if (settings == Settings.ABOUT) {
binding.root.setOnLongClickListener {
onAboutLongClick()
true
}
}
}
private fun setItemClickListener(
settings: Settings,
onItemClick: (Settings) -> Unit
) {
binding.root.setOnClickListener { onItemClick(settings) }
}
}
private class SettingsDiffCallback : DiffUtil.ItemCallback<Settings>() {
override fun areItemsTheSame(oldItem: Settings, newItem: Settings): Boolean =
oldItem.ordinal == newItem.ordinal
override fun areContentsTheSame(oldItem: Settings, newItem: Settings): Boolean =
oldItem == newItem
} |
package net.autoreconnect.mixin;
import net.minecraft.client.MinecraftClient;
import net.minecraft.client.font.TextRenderer;
import net.minecraft.client.gui.screen.DisconnectedScreen;
import net.minecraft.client.util.Window;
import net.minecraft.client.util.math.MatrixStack;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import static net.autoreconnect.AutoReconnect.attempt;
import static net.autoreconnect.AutoReconnect.getCountdown;
@Mixin(DisconnectedScreen.class)
public class MixinDisconnectedScreen
{
@Inject(at = @At("RETURN"), method = "shouldCloseOnEsc", cancellable = true)
private void shouldCloseOnEsc(CallbackInfoReturnable<Boolean> info)
{
info.setReturnValue(true);
}
@Inject(at = @At("RETURN"), method = "render")
private void render(MatrixStack matrices, int mouseX, int mouseY, float delta, CallbackInfo info)
{
Window window = MinecraftClient.getInstance().getWindow();
TextRenderer renderer = MinecraftClient.getInstance().textRenderer;
String text = attempt == -1 ? "Can not reconnect!" : "Reconnecting in " + getCountdown() + "...";
renderer.draw(matrices, text,
(window.getScaledWidth() - renderer.getWidth(text)) / 2F,
(window.getScaledHeight() - renderer.fontHeight) / 3F,
0xFF4422);
}
} |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2021 Stefano Gottardo (script.appcast)
Functions to create a new SQLite database
SPDX-License-Identifier: MIT
See LICENSES/MIT.md for more information.
"""
import sqlite3 as sql
import resources.lib.database.db_utils as db_utils
from resources.lib.helpers.logging import LOG
def create_database(db_file_path, db_filename):
LOG.debug('The SQLite database {} is empty, creating tables', db_filename)
_create_local_database(db_file_path)
def _create_local_database(db_file_path):
"""Create a new local database"""
conn = sql.connect(db_file_path)
cur = conn.cursor()
table = str('CREATE TABLE config ('
'ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,'
'Name TEXT UNIQUE NOT NULL,'
'Value TEXT);')
cur.execute(table)
if conn:
conn.close()
|
{-# LANGUAGE CPP #-}
-- !!! Testing Typeable instances
module Main(main) where
import Data.Dynamic
#if MIN_VERSION_base(4,10,0)
import Data.Typeable (TyCon, TypeRep, typeOf)
#endif
import Data.Array
import Data.Array.MArray
import Data.Array.ST
import Data.Array.IO
import Data.Array.Unboxed
import Data.Complex
import Data.Int
import Data.Word
import Data.IORef
import System.IO
import Control.Monad.ST
import System.Mem.StableName
import System.Mem.Weak
import Foreign.StablePtr
import Control.Exception
import Foreign.C.Types
main :: IO ()
main = do
print (typeOf (undefined :: [()]))
print (typeOf (undefined :: ()))
print (typeOf (undefined :: ((),())))
print (typeOf (undefined :: ((),(),())))
print (typeOf (undefined :: ((),(),(),())))
print (typeOf (undefined :: ((),(),(),(),())))
print (typeOf (undefined :: (() -> ())))
print (typeOf (undefined :: (Array () ())))
print (typeOf (undefined :: Bool))
print (typeOf (undefined :: Char))
print (typeOf (undefined :: (Complex ())))
print (typeOf (undefined :: Double))
print (typeOf (undefined :: (Either () ())))
print (typeOf (undefined :: Float))
print (typeOf (undefined :: Handle))
print (typeOf (undefined :: Int))
print (typeOf (undefined :: Integer))
print (typeOf (undefined :: IO ()))
print (typeOf (undefined :: (Maybe ())))
print (typeOf (undefined :: Ordering))
print (typeOf (undefined :: Dynamic))
print (typeOf (undefined :: (IORef ())))
print (typeOf (undefined :: Int8))
print (typeOf (undefined :: Int16))
print (typeOf (undefined :: Int32))
print (typeOf (undefined :: Int64))
print (typeOf (undefined :: (ST () ())))
print (typeOf (undefined :: (StableName ())))
print (typeOf (undefined :: (StablePtr ())))
print (typeOf (undefined :: TyCon))
print (typeOf (undefined :: TypeRep))
print (typeOf (undefined :: Word8))
print (typeOf (undefined :: Word16))
print (typeOf (undefined :: Word32))
print (typeOf (undefined :: Word64))
print (typeOf (undefined :: ArithException))
print (typeOf (undefined :: AsyncException))
print (typeOf (undefined :: (IOArray () ())))
print (typeOf (undefined :: (IOUArray () ())))
print (typeOf (undefined :: (STArray () () ())))
print (typeOf (undefined :: (STUArray () () ())))
print (typeOf (undefined :: (StableName ())))
print (typeOf (undefined :: (StablePtr ())))
print (typeOf (undefined :: (UArray () ())))
print (typeOf (undefined :: (Weak ())))
print (typeOf (undefined :: CChar))
print (typeOf (undefined :: CSChar))
print (typeOf (undefined :: CUChar))
print (typeOf (undefined :: CShort))
print (typeOf (undefined :: CUShort))
print (typeOf (undefined :: CInt))
print (typeOf (undefined :: CUInt))
print (typeOf (undefined :: CLong))
print (typeOf (undefined :: CULong))
print (typeOf (undefined :: CLLong))
print (typeOf (undefined :: CULLong))
print (typeOf (undefined :: CFloat))
print (typeOf (undefined :: CDouble))
print (typeOf (undefined :: CPtrdiff))
print (typeOf (undefined :: CSize))
print (typeOf (undefined :: CWchar))
print (typeOf (undefined :: CSigAtomic))
print (typeOf (undefined :: CClock))
print (typeOf (undefined :: CTime))
|
"""
`PyPinYin` package to interface with Python's `pypinyin` through `PyCall`.
On loading, three functions are provided as generic functions:
+ pinyin
+ lazypinyin
+ hanzi2pinyin
To find documentation for those functions, one should go into
Julia's `?` REPL mode. README.md also provides many examples.
"""
module PyPinYin
include("./styles.jl")
export pinyin
export lazypinyin
export printpinyin
export hanzi2pinyin
using PyCall
function __init__()
py"""
from pypinyin import lazy_pinyin as lazypy
from pypinyin import pinyin as pinyins
"""
end
"""
lazypinyin(hanzi::AbstractString; # 汉字串
style::Int=NORMAL, # 声调风格
errors::String="default", # 如何处理没有拼音的字符(default/ignore/replace)
strict::Bool=true, # 是否严格遵照《汉语拼音方案》处理声母和韵母
v2u::Bool=false, # 是否用 `ü` 代替 `v`
tone5::Bool=false, # 是否用 5 标识轻声
sandhi::Bool=false) # 是否变调,nǐ hǎo -> ní hǎo
"""
function lazypinyin(hanzi::AbstractString;
style::Int=NORMAL,
errors::String="default",
strict::Bool=true,
v2u::Bool=false,
tone5::Bool=false,
sandhi::Bool=false)
return py"lazypy"(hanzi,
style=style,
errors=errors,
strict=strict,
v_to_u=v2u,
neutral_tone_with_five=tone5,
tone_sandhi=sandhi)
end
"""
pinyin(hanzi::AbstractString; # 汉字串
style::Int=TONE, # 声调风格
heteronym::Bool=false, # 多音字
errors::String="default" # 如何处理没有拼音的字符(default/ignore/replace)
strict::Bool=true, # 是否严格遵照《汉语拼音方案》处理声母和韵母
v2u::Bool=false, # 是否用 `ü` 代替 `v`
tone5::Bool=false) # 是否用 5 标识轻声
"""
function pinyin(hanzi::AbstractString;
style::Int=TONE,
heteronym::Bool=false,
errors::String="default",
strict::Bool=true,
v2u::Bool=false,
tone5::Bool=false)
return py"pinyins"(hanzi,
style=style,
heteronym=heteronym,
errors=errors,
strict=strict,
v_to_u=v2u,
neutral_tone_with_five=tone5)
end
"""
printpinyin(pinyins)
将汉字转拼音的结果输出到一行并以空格分开
"""
function printpinyin(pinyins)
n = length(pinyins)
for i = 1:n-1
print("$(pinyins[i]) ")
end
print(pinyins[n])
end
"""
hanzi2pinyin(hanzi::AbstractString) = lazypinyin(hanzi, style=TONEONTOP, v2u=true, sandhi=true)
常见转拼音需求的快捷函数
# Example
julia> hanzi2pinyin("你好啊Julia,战略!") |> printpinyin
ní hǎo a Julia, zhàn lüè !
"""
function hanzi2pinyin(hanzi::AbstractString)
return lazypinyin(hanzi, style=TONEONTOP, v2u=true, sandhi=true)
end
end # module
|
/*
Copyright The ORAS Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package remote provides a client to the remote registry.
// Reference: https://github.com/distribution/distribution
package remote
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strconv"
"oras.land/oras-go/v2/errdef"
"oras.land/oras-go/v2/registry"
"oras.land/oras-go/v2/registry/remote/auth"
"oras.land/oras-go/v2/registry/remote/internal/errutil"
)
// RepositoryOptions is an alias of Repository to avoid name conflicts.
// It also hides all methods associated with Repository.
type RepositoryOptions Repository
// Registry is an HTTP client to a remote registry.
type Registry struct {
// RepositoryOptions contains common options for Registry and Repository.
// It is also used as a template for derived repositories.
RepositoryOptions
// RepositoryListPageSize specifies the page size when invoking the catalog
// API.
// If zero, the page size is determined by the remote registry.
// Reference: https://docs.docker.com/registry/spec/api/#catalog
RepositoryListPageSize int
}
// NewRegistry creates a client to the remote registry with the specified domain
// name.
// Example: localhost:5000
func NewRegistry(name string) (*Registry, error) {
ref := registry.Reference{
Registry: name,
}
if err := ref.ValidateRegistry(); err != nil {
return nil, err
}
return &Registry{
RepositoryOptions: RepositoryOptions{
Reference: ref,
},
}, nil
}
// client returns an HTTP client used to access the remote registry.
// A default HTTP client is return if the client is not configured.
func (r *Registry) client() Client {
if r.Client == nil {
return auth.DefaultClient
}
return r.Client
}
// Ping checks whether or not the registry implement Docker Registry API V2 or
// OCI Distribution Specification.
// Ping can be used to check authentication when an auth client is configured.
// References:
// - https://docs.docker.com/registry/spec/api/#base
// - https://github.com/opencontainers/distribution-spec/blob/main/spec.md#api
func (r *Registry) Ping(ctx context.Context) error {
url := buildRegistryBaseURL(r.PlainHTTP, r.Reference)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
return err
}
resp, err := r.client().Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
switch resp.StatusCode {
case http.StatusOK:
return nil
case http.StatusNotFound:
return errdef.ErrNotFound
default:
return errutil.ParseErrorResponse(resp)
}
}
// Repositories lists the name of repositories available in the registry.
// See also `RepositoryListPageSize`.
// Reference: https://docs.docker.com/registry/spec/api/#catalog
func (r *Registry) Repositories(ctx context.Context, fn func(repos []string) error) error {
ctx = auth.AppendScopes(ctx, auth.ScopeRegistryCatalog)
url := buildRegistryCatalogURL(r.PlainHTTP, r.Reference)
var err error
for err == nil {
url, err = r.repositories(ctx, fn, url)
}
if err != errNoLink {
return err
}
return nil
}
// repositories returns a single page of repository list with the next link.
func (r *Registry) repositories(ctx context.Context, fn func(repos []string) error, url string) (string, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
return "", err
}
if r.RepositoryListPageSize > 0 {
q := req.URL.Query()
q.Set("n", strconv.Itoa(r.RepositoryListPageSize))
req.URL.RawQuery = q.Encode()
}
resp, err := r.client().Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return "", errutil.ParseErrorResponse(resp)
}
var page struct {
Repositories []string `json:"repositories"`
}
lr := limitReader(resp.Body, r.MaxMetadataBytes)
if err := json.NewDecoder(lr).Decode(&page); err != nil {
return "", fmt.Errorf("%s %q: failed to decode response: %w", resp.Request.Method, resp.Request.URL, err)
}
if err := fn(page.Repositories); err != nil {
return "", err
}
return parseLink(resp)
}
// Repository returns a repository reference by the given name.
func (r *Registry) Repository(ctx context.Context, name string) (registry.Repository, error) {
ref := registry.Reference{
Registry: r.Reference.Registry,
Repository: name,
}
if err := ref.ValidateRepository(); err != nil {
return nil, err
}
repo := Repository(r.RepositoryOptions)
repo.Reference = ref
return &repo, nil
}
|
package io.provenance.scope.encryption.util
import java.io.FilterInputStream
import java.io.IOException
import java.io.InputStream
import java.security.MessageDigest
import javax.crypto.BadPaddingException
import javax.crypto.Cipher
import javax.crypto.IllegalBlockSizeException
class HashingCipherInputStream(
inputStream: InputStream,
private val cipher: Cipher,
private val digest: MessageDigest,
private val header: ByteArray = ByteArray(0)
): FilterInputStream(inputStream) {
private var done: Boolean = false
// Raw buffer to read bytes from stream
private val rawBuffer = ByteArray(32768)
// Position marker for when to read out of header
private var pos = 0
// Buffer to hold ciphered bytes, should be larger than rawBuffer for block size reasons
private var cipheredBuffer = ByteArray(rawBuffer.size + 32)
private var cipheredStart = 0
private var cipheredEnd = 0
override fun close() {
`in`.close()
}
override fun markSupported(): Boolean {
return false
}
override fun read(): Int {
if (pos >= header.size) {
if (cipheredStart >= cipheredEnd) {
// we loop for new data as the spec says we are blocking
var i = 0
while (i == 0) i = getMoreData()
if (i == -1) return -1
}
return cipheredBuffer[cipheredStart++].toInt() and 0xff
} else {
// Bitwise and removed the one's complement (which makes 0xFF appear to be -1 when converted to int)
return header[pos++].toInt() and 0xFF
}
}
override fun read(b: ByteArray): Int {
return read(b, 0, b.size)
}
override fun read(b: ByteArray?, off: Int, len: Int): Int {
if (pos >= header.size) {
if (cipheredStart >= cipheredEnd) {
// we loop for new data as the spec says we are blocking
var i = 0
while (i == 0) i = getMoreData()
if (i == -1) return -1
}
if (len <= 0) {
return 0
}
var available = cipheredEnd - cipheredStart
if (len < available) available = len
if (b != null) {
System.arraycopy(cipheredBuffer, cipheredStart, b, off, available)
}
cipheredStart += available
pos += available
return available
} else {
val sizeLeftInHeader = header.size - pos
val bytesToRead = if (sizeLeftInHeader > len) {
len
} else {
sizeLeftInHeader
}
System.arraycopy(header, pos, b, off, bytesToRead)
pos += bytesToRead
return bytesToRead
}
}
fun hash(): ByteArray {
return digest.digest()
}
private fun getMoreData(): Int {
if (done) return -1
val read = `in`.read(rawBuffer, 0, rawBuffer.size)
if (read == -1) {
done = true
try {
cipheredBuffer = cipher.doFinal()
cipheredEnd = cipheredBuffer.size
} catch (e: IllegalBlockSizeException) {
cipheredBuffer.fill(0)
throw IOException(e)
} catch (e: BadPaddingException) {
cipheredBuffer.fill(0)
throw IOException(e)
}
cipheredStart = 0
return cipheredEnd
}
try {
cipheredEnd = cipher.update(rawBuffer, 0, read, cipheredBuffer)
digest.update(rawBuffer, 0, read)
} catch (e: IllegalStateException) {
cipheredBuffer.fill(0)
throw e
}
cipheredStart = 0
return cipheredEnd
}
}
|
import {
BatchSpecWorkspaceResolutionState,
WorkspaceResolutionStatusResult,
PreviewBatchSpecWorkspaceFields,
BatchSpecWorkspacesResult,
BatchSpecImportingChangesetsResult,
PreviewBatchSpecImportingChangesetFields,
} from '../../../../graphql-operations'
export const mockWorkspaceResolutionStatus = (
status: BatchSpecWorkspaceResolutionState,
error?: string
): WorkspaceResolutionStatusResult => ({
node: {
__typename: 'BatchSpec',
workspaceResolution: {
__typename: 'BatchSpecWorkspaceResolution',
state: status,
failureMessage: error || null,
},
},
})
export const mockWorkspace = (
id: number,
fields?: Partial<PreviewBatchSpecWorkspaceFields>
): PreviewBatchSpecWorkspaceFields => ({
__typename: 'BatchSpecWorkspace',
path: '/',
searchResultPaths: ['/first-path'],
cachedResultFound: false,
ignored: false,
unsupported: false,
...fields,
repository: {
__typename: 'Repository',
id: `repo-${id}`,
name: `github.com/my-org/repo-${id}`,
url: 'superfake.com',
defaultBranch: {
__typename: 'GitRef',
id: 'main-branch-id',
...fields?.repository?.defaultBranch,
},
...fields?.repository,
},
branch: {
__typename: 'GitRef',
id: 'main-branch-id',
abbrevName: 'main',
displayName: 'main',
...fields?.branch,
target: {
__typename: 'GitObject',
oid: 'asdf1234',
...fields?.branch?.target,
},
url: 'superfake.com',
},
})
export const mockWorkspaces = (count: number): PreviewBatchSpecWorkspaceFields[] =>
[...new Array(count).keys()].map(id => mockWorkspace(id))
const mockImportingChangeset = (
id: number
): PreviewBatchSpecImportingChangesetFields & { __typename: 'VisibleChangesetSpec' } => ({
__typename: 'VisibleChangesetSpec',
id: `changeset-${id}`,
description: {
__typename: 'ExistingChangesetReference',
externalID: `external-changeset-${id}`,
baseRepository: {
name: `repo-${id}`,
url: 'superfake.com',
},
},
})
export const mockImportingChangesets = (
count: number
): (PreviewBatchSpecImportingChangesetFields & {
__typename: 'VisibleChangesetSpec'
})[] => [...new Array(count).keys()].map(id => mockImportingChangeset(id))
export const mockBatchSpecWorkspaces = (workspacesCount: number): BatchSpecWorkspacesResult => ({
node: {
__typename: 'BatchSpec',
workspaceResolution: {
__typename: 'BatchSpecWorkspaceResolution',
workspaces: {
__typename: 'BatchSpecWorkspaceConnection',
totalCount: workspacesCount,
pageInfo: {
hasNextPage: true,
endCursor: 'end-cursor',
},
nodes: mockWorkspaces(workspacesCount),
},
},
},
})
export const mockBatchSpecImportingChangesets = (importsCount: number): BatchSpecImportingChangesetsResult => ({
node: {
__typename: 'BatchSpec',
importingChangesets: {
__typename: 'ChangesetSpecConnection',
totalCount: importsCount,
pageInfo: {
hasNextPage: true,
endCursor: 'end-cursor',
},
nodes: mockImportingChangesets(importsCount),
},
},
})
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Text;
namespace Microsoft.MIDebugEngine
{
static public class EngineConstants
{
/// <summary>
/// This is the engine GUID of the engine. It needs to be changed here and in the registration
/// when creating a new engine.
/// </summary>
public static readonly Guid EngineId = new Guid("{ea6637c6-17df-45b5-a183-0951c54243bc}");
public static readonly Guid GdbEngine = new Guid("{91744D97-430F-42C1-9779-A5813EBD6AB2}");
public static readonly Guid LldbEngine = new Guid("{5D630903-189D-4837-9785-699B05BEC2A9}");
}
}
|
import moment from 'moment';
function formatDateString(date, intl) {
const aYearAgo = moment(new Date()).add(-1, 'y');
const dateText = moment(date).isBefore(aYearAgo)
? moment(date).format(intl.formatMessage({ id: 'dateOverAYear' }))
: moment(date).fromNow();
return dateText;
}
export default {
formatDateString,
};
|
package test.processor;
import org.coffee.ioc.core.processor.Processor;
import test.ser.IHello;
public class A implements Processor{
public Object beforeInit(Object instance) {
if(instance instanceof IHello){
System.out.println("Before");
IHello i = (IHello)instance;
i.say();
}
return null;
}
public Object afterInit(Object instance) {
if(instance instanceof IHello){
System.out.println("After");
IHello i = (IHello)instance;
i.say();
}
return null;
}
public void destroy(Object instance) {
// TODO Auto-generated method stub
}
}
|
- Feature Name: `prior_art`
- Start Date: 2018-02-12
- RFC PR: [rust-lang/rfcs#2333](https://github.com/rust-lang/rfcs/pull/2333)
- Rust Issue: **self-executing**
# Summary
[summary]: #summary
Adds a *Prior art* section to the RFC template where RFC authors
may discuss the experience of other programming languages and their
communities with respect to what is being proposed. This section may
also discuss theoretical work such as papers.
# Motivation
[motivation]: #motivation
## Precedent has some importance
It is arguable whether or not precedent is important or whether proposals
should be considered solely on their own merits. This RFC argues that
precedent is important.
Precedent and in particular familiarity in and from other languages may
inform our choices in terms of naming, especially if that other language
is similar to Rust.
For additions to the standard library in particular, it should carry some
weight if a feature is supported in mainstream languages because the users
of those languages, which may also be rustaceans, are used to those features.
This is not to say that precedent alone is sufficient motivation for accepting
an RFC; but neither is it useless.
## Experiences from other languages are useful
This is the chief motivation of this RFC. By explicitly asking authors for
information about the similarity of their proposal to those in other languages,
we may get more information which aids us in better evaluating RFCs. Merely name
dropping that a language has a certain feature is not all - a discussion of the
experience the communities of the language in question has had is more useful.
A proposal need also not be a language or library proposal. If a proposal is
made for changes to how we work as a community, it can be especially valuable
how other communities have tackled a similar situation.
### Experiences are useful to the author themselves
During the process of writing an RFC, an author may change certain aspects
of the proposal from what they originally had in mind. They may tweak the RFC,
change certain aspects in a more radical way, and so on. Here, the benefit of
explicitly asking for and about prior art is that it makes the RFC author think
about the proposal in relation to other languages. In search for this
information, the author can come to new or better realizations about the
trade-offs, advantages, and drawbacks of their proposal. Thus, their RFC as
a whole is hopefully improved as a by-product.
## Papers can provide greater theoretical understanding
This RFC argues that it valuable to us to be provided with papers or similar
that explain proposals and/or their theoretical foundations in greater detail
where such resources exist. This provides RFC readers with references if they
want a deeper understanding of an RFC. At the same time, this alleviates the
need to explain the minutiae of the theoretical background. The finer details
can instead be referred to the referred-to papers.
## An improved historical record of Rust for posterity
Finally, by writing down and documenting where our ideas came from,
we can better preserve the history and evolution of Rust for posterity.
While this is not very important in right now, it will increase somewhat
in importance as time goes by.
# Guide-level explanation
[guide-level-explanation]: #guide-level-explanation
This Meta-RFC modifies the RFC template by adding a *Prior art* section
before the *Unresolved questions*. The newly introduced section is intended
to help authors reflect on the experience other languages have had with similar
and related concepts. This is meant to improve the RFC as a whole, but also
provide RFC readers with more details so that the proposal may be more fairly
and fully judged. The section also asks authors for other resources such as
papers where those exist. Finally, the section notes that precedent from other
languages on its own is not sufficient motivation to accept an RFC.
Please read the [reference-level-explanation] for exact details of what an RFC
author will see in the changed template.
# Reference-level explanation
[reference-level-explanation]: #reference-level-explanation
The implementation of this RFC consists of inserting the following
text to the RFC template before the section *Unresolved questions*:
> # Prior art
>
> Discuss prior art, both the good and the bad, in relation to this proposal.
> A few examples of what this can include are:
>
> - For language, library, cargo, tools, and compiler proposals:
> Does this feature exists in other programming languages and
> what experience have their community had?
> - For community proposals: Is this done by some other community and what
> were their experiences with it?
> - For other teams: What lessons can we learn from what other communities
> have done here?
> - Papers: Are there any published papers or great posts that discuss this?
> If you have some relevant papers to refer to, this can serve as a more
> detailed theoretical background.
>
> This section is intended to encourage you as an author to think about
> the lessons from other languages, provide readers of your RFC with a
> fuller picture. If there is no prior art, that is fine - your ideas are
> interesting to us whether they are brand new or if it is an adaptation
> from other languages.
>
> Note that while precedent set by other languages is some motivation, it does
> not on its own motivate an RFC. Please also take into consideration that rust
> sometimes intentionally diverges from common language features.
# Drawbacks
[drawbacks]: #drawbacks
This might encourage RFC authors into the thinking that just because a feature
exists in one language, it should also exist in Rust and that this can be the
sole argument. This RFC argues that the risk of this is small, and that with a
clear textual instruction in the RFC template, we can reduce it even further.
Another potential drawback is the risk that in a majority of cases, the prior
art section will simply be left empty with "N/A". Even if this is the case,
there will still be an improvement to the minority of RFCs that do include a
review of prior art. Furthermore, this the changes to the template proposed
in this RFC are by no means irreversible. If we find out after some time that
this was a bad idea, we can always revert back to the way it was before.
Finally, a longer template risks making it harder to contribute to the
RFC process as an author as you are expected to fill in more sections.
Some people who don't know a lot of other langauges may be intimidated into
thinking that they are expected to know a wide variety of langauges and that
their contribution is not welcome otherwise. This drawback can be mitigated
by more clearly communicating that the RFC process is a collaborative effort.
If an author does not have prior art to offer up right away, other participants
in the RFC discussion may be able to provide such information which can then
be amended into the RFC.
# Rationale and alternatives
[alternatives]: #alternatives
If we don't change the template as proposed in this RFC, the downsides
are that we don't get the benefits enumerated within the [motivation].
As always, there is the simple alternative of not doing the changes proposed
in the RFC.
Other than that, we can come to the understanding that those that
want may include a prior art section if they wish, even if it is not
in the template. This is already the case - authors can always provide
extra information. The benefit of asking for the information explicitly
in the template is that more authors are likely to provide such information.
This is discussed more in the [motivation].
Finally, we can ask for information about prior art to be provided in each
section (motivation, guide-level explanation, etc.). This is however likely to
reduce the coherence and readability of RFCs. This RFC argues that it is better
that prior art be discussed in one coherent section. This is also similar to
how papers are structured in that they include a "related work" section.
# Prior art
[prior-art]: #prior-art
In many papers, a section entitled *Related work* is included which can
be likened to this section. To not drive readers away or be attention
stealing from the main contributions of a paper, it is usually recommended
that this section be placed near the end of papers. For the reasons mentioned,
this is a good idea - and so to achieve the same effect, the section you are
currently reading will be placed precisely where it is placed right now, that
is, before the *Unresolved questions* section, which we can liken to a
*Future work* section inside a paper.
A review of the proposal templates for [`C++`], [`python`], [`Java`], [`C#`],
[`Scala`], [`Haskell`], [`Swift`], and [`Go`] did not turn up such a section
within those communities templates. Some of these templates are quite similar
and have probably inspired each other. To the RFC authors knowledge, no other
mainstream programming language features a section such as this.
[`C++`]: https://isocpp.org/std/submit-a-proposal
[`python`]: https://github.com/python/peps/blob/master/pep-0001.txt
[`Java`]: http://openjdk.java.net/jeps/2
[`C#`]: https://github.com/dotnet/csharplang/blob/master/proposals/proposal-template.md
[`Haskell`]: https://github.com/ghc-proposals/ghc-proposals/blob/master/proposals/0000-template.rst
[`Scala`]: https://github.com/scala/docs.scala-lang/blob/master/_sips/sip-template.md
[`Go`]: https://github.com/golang/proposal/blob/master/design/TEMPLATE.md
[`Swift`]: https://github.com/apple/swift-evolution/blob/master/0000-template.md
# Unresolved questions
[unresolved]: #unresolved-questions
There are none as of yet.
What is important in this RFC is that we establish whether we want a
prior art section or not, and what it should contain in broad terms.
The exact language and wording can always be tweaked beyond this.
|
#!/usr/bin/perl -w
use strict;
use Bio::AlignIO;
use File::Spec;
use Getopt::Long;
use List::Util qw(sum);
use Bio::SeqIO;
use constant GAP => '-';
my @distance;
$distance[0] = 0; # n.crassa -> n.crassa
$distance[1] = 1; # n.crassa -> n.tetrasperma (=2/3)
$distance[2] = 2; # n.crassa -> n.discreta (=1/3)
my $Factor = sum(@distance);
for my $x ( @distance ) {
$x = 1 - $x/$Factor;
}
# Algorithm
# -- process each alignment, write out score info (gap or identity #)
# in genomic coordinates
# -- can dump as a detailed per-site tab file or a wig file
# mercator is 0-based
my $chrom_file = '/data/genomes/dna/neurospora_crassa_OR74A_7.fasta';
my $aln_file_name = 'output.mfa';
my $alnformat = 'fasta';
my $info = '3way';
my $outfile = 'wig/neurospora_crassa_OR74A_7';
my $alndir = 'alignments/pecan_alignments';
my $debug = 0;
my $tab = 0; # write out report as TAB or WIG
my $ref_genome_index = 0;
GetOptions(
'g|c|genome|chrom:s'=> \$chrom_file,
'f|alnfile:s' => \$aln_file_name,
'af|format:s' => \$alnformat,
'v|verbose!' => \$debug,
't|tab' => \$tab, # TAB or WIG flag
'o|out|output:s' => \$outfile,
'r|refidx|ref:i' => \$ref_genome_index,
'd|desc:s' => \$info,
'a|i|input:s' => \$alndir,
);
my $gapname = "$info\_gaps";
my $gapdesc = "$info gapped alignment calc";
my $idname = "$info\_identical";
my $iddesc = "$info identical alignment calc";
unless( -d $alndir || -r $alndir ) {
die("cannot open alignment dir '$alndir'\n");
}
my (%chrom_lengths,@chrom_order);
{
my $inseq = Bio::SeqIO->new(-format => 'fasta',
-file => $chrom_file);
while( my $seq = $inseq->next_seq ) {
$chrom_lengths{$seq->display_id} = $seq->length;
push @chrom_order, $seq->display_id;
}
}
my $mapfile = File::Spec->catfile($alndir,'map');
open(my $mapfh => $mapfile ) || die "$mapfile: $!";
my %chroms;
while(<$mapfh>) {
my ($aln_id, @line) = split;
my ($chrom,$start,$end) = map { $line[$ref_genome_index + $_] } 0..2;
next if $chrom eq 'NA';
push @{$chroms{$chrom}}, [$aln_id, $start,$end]
}
my ($gapfh,$idfh);
if( $tab ) {
open($gapfh => ">$outfile.$info.tab") || die $!;
print $gapfh join("\t", qw(CHROM POSITION
ALN_ID ALN_COL
ALLELES IDENTICAL GAPPED)),"\n";
} else {
open($gapfh => ">$outfile.$info\_gap.wig") || die $!;
open($idfh => ">$outfile.$info\_identity.wig") || die $!;
printf $gapfh "track type=wiggle_0 name=\"%s\" description=\"%s\"\n",
$gapname,$gapdesc;
printf $idfh "track type=wiggle_0 name=\"%s\" description=\"%s\"\n",
$idname,$iddesc;
}
for my $chrom ( @chrom_order ) {
my (%scores,%gaps,%info);
unless( $tab ) {
printf $gapfh "fixedStep chrom=%s start=%s step=1\n",$chrom,1;
printf $idfh "fixedStep chrom=%s start=%s step=1\n",$chrom,1;
}
for my $pos ( @{$chroms{$chrom}} ) {
my ($aln_id, $start,$end) = @$pos;
my $fname = File::Spec->catfile($alndir,$aln_id,$aln_file_name);
my $alnio = Bio::AlignIO->new(-file => $fname,
-format => $alnformat);
if( my $aln = $alnio->next_aln ) {
my $col_count = $aln->length;
# simplified data structure, we're going to use substr
# for speed I hope
my (@seqs,@ids);
for my $seq ( $aln->each_seq ) {
push @seqs, $seq->seq;
push @ids, $seq->display_id; # just to double check
$seq = undef;
}
undef $aln;
# process each column
my $genome_index = $start+1;
for( my $col_index = 0; $col_index < $col_count; $col_index++ ) {
my $ref_base = uc(substr($seqs[$ref_genome_index],$col_index,1));
if( $ref_base ne GAP ) { # we skip all ref-base gap columns
# as these can't be written in ref coords
# iterate thru each sequence
my @debug = ($ref_base);
my $i = 0;
my ($score,$gap) = (0,0);
for my $rest ( @seqs ) {
if( $i != $ref_genome_index ) { # that is NOT the ref
my $base = uc(substr($seqs[$i],$col_index,1));
if( $base eq $ref_base ) {
# identical
$score += $distance[$i];
} elsif( $base eq GAP ) {
$gap += $distance[$i];
}
push @debug, $base;
}
$i++;
}
$scores{$genome_index} = $score;
$gaps{$genome_index} = $gap;
if( $tab ) {
$info{$genome_index} = [ $aln_id,
$col_index,
join(",",@debug),
];
}
$genome_index++;
}
}
} else {
warn("No alignments in $fname\n");
}
warn(" done with $chrom ($aln_id)\n");
}
warn("done with $chrom\n");
for( my $i = 1; $i <= $chrom_lengths{$chrom}; $i++ ) {
if( $tab ) {
if( exists $info{$i} ) {
print $gapfh join("\t", $chrom, $i, @{$info{$i}},
$scores{$i},$gaps{$i}),"\n";
}
} else {
if( exists $gaps{$i} ) {
printf $gapfh "%.2f\n", 1 - $gaps{$i};
printf $idfh "%.2f\n", $scores{$i};
} else {
print $gapfh "1.00\n";
print $idfh "0.00\n";
}
}
}
}
|
# opsmatic::handler
# Installs and configures the Opsmatic report and exception handler
include_recipe 'opsmatic::common'
chef_gem 'chef-handler-opsmatic' do
action :upgrade
version node['opsmatic']['handler_version']
end
require 'chef/handler/opsmatic'
chef_handler 'Chef::Handler::Opsmatic' do
source 'chef/handler/opsmatic'
arguments [
:integration_token => node['opsmatic']['integration_token'],
:collector_url => node['opsmatic']['handler_endpoint'],
:ssl_peer_verify => node['opsmatic']['handler_ssl_peer_verify']
]
action :nothing
end.run_action(:enable)
|
// ObjectSecurity_TTest.cs - NUnit Test Cases for ObjectSecurity<T>
//
// Authors:
// James Bellinger ([email protected])
#if NET_4_0
using System;
using System.Security.AccessControl;
using System.Security.Principal;
using NUnit.Framework;
namespace MonoTests.System.Security.AccessControl
{
[TestFixture]
public class ObjectSecurity_TTest
{
enum WillWorkRights
{
Value = 1
}
class WillWorkSecurity : ObjectSecurity<WillWorkRights>
{
public WillWorkSecurity ()
: base (false, ResourceType.Unknown)
{
}
}
struct WillFailRights
{
}
class WillFailSecurity : ObjectSecurity<WillFailRights>
{
public WillFailSecurity ()
: base (false, ResourceType.Unknown)
{
}
}
[Test]
public void TypesAreCorrect ()
{
WillWorkSecurity security = new WillWorkSecurity ();
Assert.AreEqual (security.AccessRightType, typeof (WillWorkRights));
Assert.AreEqual (security.AccessRuleType, typeof (AccessRule<WillWorkRights>));
Assert.AreEqual (security.AuditRuleType, typeof (AuditRule<WillWorkRights>));
}
[Test]
public void WillWorkOKUsingAccessFactory ()
{
WillWorkSecurity security = new WillWorkSecurity ();
SecurityIdentifier id = new SecurityIdentifier (WellKnownSidType.WorldSid, null);
AccessRule<WillWorkRights> rule = (AccessRule<WillWorkRights>)
security.AccessRuleFactory (id, 1, false,
InheritanceFlags.None, PropagationFlags.None,
AccessControlType.Allow);
Assert.AreEqual (rule.AccessControlType, AccessControlType.Allow);
Assert.AreEqual (rule.IdentityReference, id);
Assert.AreEqual (rule.InheritanceFlags, InheritanceFlags.None);
Assert.AreEqual (rule.PropagationFlags, PropagationFlags.None);
Assert.AreEqual (rule.Rights, WillWorkRights.Value);
}
[Test]
public void WillWorkOKUsingConstructor()
{
SecurityIdentifier id = new SecurityIdentifier (WellKnownSidType.WorldSid, null);
AccessRule<WillWorkRights> rule = new AccessRule<WillWorkRights> (id, WillWorkRights.Value,
AccessControlType.Allow);
Assert.AreEqual (rule.AccessControlType, AccessControlType.Allow);
Assert.AreEqual (rule.IdentityReference, id);
Assert.AreEqual (rule.Rights, WillWorkRights.Value);
}
[Test, ExpectedException (typeof (InvalidCastException))]
public void WillFailFailsUsingFactoryOnGetter()
{
WillFailSecurity security = new WillFailSecurity ();
SecurityIdentifier id = new SecurityIdentifier (WellKnownSidType.WorldSid, null);
AccessRule<WillFailRights> rule = (AccessRule<WillFailRights>)
security.AccessRuleFactory (id, 1, false,
InheritanceFlags.None, PropagationFlags.None,
AccessControlType.Allow);
WillFailRights rights = rule.Rights;
}
[Test, ExpectedException (typeof (InvalidCastException))]
public void WillFailFailsUsingConstructor()
{
SecurityIdentifier id = new SecurityIdentifier (WellKnownSidType.WorldSid, null);
AccessRule<WillFailRights> rule = new AccessRule<WillFailRights> (id, new WillFailRights(),
AccessControlType.Allow);
}
}
}
#endif
|
/* Hibernate, Relational Persistence for Idiomatic Java
*
* SPDX-License-Identifier: Apache-2.0
* Copyright: Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.reactive.loader.collection.impl;
import org.hibernate.HibernateException;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.internal.util.collections.ArrayHelper;
import org.hibernate.persister.collection.QueryableCollection;
import org.hibernate.reactive.loader.collection.ReactiveCollectionInitializer;
import java.io.Serializable;
import java.util.concurrent.CompletionStage;
/**
* A batching entity loader for {@link org.hibernate.loader.BatchFetchStyle#DYNAMIC}
* which selects between a single-key {@link ReactiveCollectionInitializer} and a
* batching {@link ReactiveDynamicBatchingCollectionInitializer} depending upon how many keys it's
* given.
*
* @see org.hibernate.loader.collection.DynamicBatchingCollectionInitializerBuilder.DynamicBatchingCollectionInitializer
*/
public class ReactiveDynamicBatchingCollectionDelegator extends ReactiveCollectionLoader {
private final int maxBatchSize;
private final ReactiveCollectionLoader singleKeyLoader;
private final ReactiveDynamicBatchingCollectionInitializer batchLoader;
public ReactiveDynamicBatchingCollectionDelegator(
QueryableCollection collectionPersister,
int maxBatchSize,
SessionFactoryImplementor factory,
LoadQueryInfluencers influencers) {
super( collectionPersister, factory, influencers );
this.maxBatchSize = maxBatchSize;
if ( collectionPersister.isOneToMany() ) {
this.singleKeyLoader = new ReactiveOneToManyLoader( collectionPersister, 1, factory, influencers );
}
else {
throw new UnsupportedOperationException();
// this.singleKeyLoader = new ReactiveBasicCollectionLoader( collectionPersister, 1, factory, influencers );
}
this.batchLoader = new ReactiveDynamicBatchingCollectionInitializer( collectionPersister, factory, influencers );
}
@Override
public void initialize(Serializable id, SharedSessionContractImplementor session) throws HibernateException {
// first, figure out how many batchable ids we have...
final Serializable[] batch = session.getPersistenceContextInternal()
.getBatchFetchQueue()
.getCollectionBatch( collectionPersister(), id, maxBatchSize );
final int numberOfIds = ArrayHelper.countNonNull( batch );
if ( numberOfIds <= 1 ) {
singleKeyLoader.loadCollection( session, id, collectionPersister().getKeyType() );
return;
}
final Serializable[] idsToLoad = new Serializable[numberOfIds];
System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds );
batchLoader.doBatchedCollectionLoad( (SessionImplementor) session, idsToLoad, collectionPersister().getKeyType() );
}
@Override
public CompletionStage<Void> reactiveInitialize(Serializable id, SharedSessionContractImplementor session) {
final Serializable[] batch = session.getPersistenceContextInternal()
.getBatchFetchQueue()
.getCollectionBatch( collectionPersister(), id, maxBatchSize );
final int numberOfIds = ArrayHelper.countNonNull( batch );
if ( numberOfIds <= 1 ) {
return singleKeyLoader.reactiveLoadCollection( session, id, collectionPersister().getKeyType() );
}
final Serializable[] idsToLoad = new Serializable[numberOfIds];
System.arraycopy( batch, 0, idsToLoad, 0, numberOfIds );
return batchLoader.doBatchedCollectionLoad( (SessionImplementor) session, idsToLoad,
collectionPersister().getKeyType() );
}
}
|
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
class UserPage extends StatefulWidget {
final String userid;
final String author;
const UserPage({Key key, @required this.userid, @required this.author})
: super(key: key);
@override
_UserPageState createState() => _UserPageState();
}
class _UserPageState extends State<UserPage> {
TextEditingController idController;
TextEditingController nameController;
@override
void initState() {
super.initState();
idController = TextEditingController(text: widget.userid);
nameController = TextEditingController(text: widget.author);
}
@override
void dispose() {
super.dispose();
idController.dispose();
nameController.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text("用户中心"),
),
body: Container(
padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 12),
child: Column(
children: [
TextField(
controller: idController,
decoration:
InputDecoration(helperText: "点击可编辑ID", labelText: "ID")),
TextField(
controller: nameController,
decoration: InputDecoration(
helperText: "点击可编辑NAME", labelText: "NAME")),
const SizedBox(
height: 52,
),
ElevatedButton(onPressed: () async{
// TODO: 更新Widget信息
MethodChannel channel = MethodChannel("com.cc.ToDo.widgets");
var res = await channel.invokeMethod("updateWidgetData", {
"userid":idController.text,
"author":nameController.text
});
print(res);
print(res.runtimeType);
}, child: Text("保存"))
],
),
),
);
}
}
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Bullet : MonoBehaviour
{
private bool _exploding;
private float _timeToDestroy;
private void Update()
{
if (_exploding)
{
_timeToDestroy -= Time.deltaTime;
if (_timeToDestroy <= 0f)
{
Destroy(gameObject);
}
}
}
void OnTriggerEnter(Collider other)
{
Destroy(gameObject);
if( other.gameObject.layer == LayerMask.NameToLayer("Buildings") ||
other.gameObject.layer == LayerMask.NameToLayer("Destroyable"))
{
Destroy(other.gameObject);
}
if(_exploding)
{
return;
}
if (other.gameObject.CompareTag("Destroyable"))
{
var tank = other.gameObject.GetComponent<Tank>();
if(tank != null)
{
tank.OnHit();
}
else
{
other.gameObject.SetActive(false);
}
}
// fire explosion and explode
//GetComponentInChildren<ParticleSystem>().Play();
//GetComponent<Rigidbody>().velocity = Vector3.zero;
//_timeToDestroy = 0.5f;
//_exploding = true;
}
}
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Aeson (encode)
import qualified Data.ByteString.Lazy as BS
import Data.Foldable (toList)
import qualified Data.Map as Map
import Grammar
main :: IO ()
main = BS.writeFile "output.tmGrammar.json" (encode grammar)
--------------------------------------------------------------------------------
grammar :: Grammar
grammar =
Grammar
{ grammarScopeName = "source.gcl",
grammarFileTypes = ["gcl"],
grammarFoldingStartMarker = Nothing,
grammarFoldingStopMarker = Nothing,
grammarFirstLineMatch = Nothing,
grammarPatterns = topLevelPatterns,
grammarRepository = repository
}
-- | Only allowed in the top-most level
topLevelPatterns :: [Reference]
topLevelPatterns =
map
ref
[var, con, let']
<> nestedPatterns
-- | Can appear in anywhere
nestedPatterns :: [Reference]
nestedPatterns =
map
ref
[ skip,
abort,
spec,
assertion,
loop,
conditional,
assignment,
comment
]
++ values
repository :: Repository
repository =
Map.fromList $
map
(\rule -> (ruleID rule, rule))
[ -- statements
skip,
abort,
spec,
assertion,
loop,
conditional,
assignment,
guardedCommand,
-- comment
comment,
-- declarations
con,
var,
let',
-- values & expressions
number,
bool,
-- types
intType,
boolType
]
--------------------------------------------------------------------------------
-- | Declarations
con :: Rule
con =
Rule
{ ruleID = "con",
ruleBegin = captureWord "con" "keyword.control.con",
ruleEnd = Just $ Capture "\\n" Map.empty,
ruleMatch = Nothing,
ruleName = Just "meta.declaration",
ruleInclude = types,
ruleContentName = Nothing
}
var :: Rule
var =
Rule
{ ruleID = "var",
ruleBegin = captureWord "var" "keyword.control.var",
ruleEnd = Just $ Capture "\\n" Map.empty,
ruleMatch = Nothing,
ruleName = Just "meta.declaration",
ruleInclude = types,
ruleContentName = Nothing
}
let' :: Rule
let' = match "let" "let" "keyword.control.let"
--------------------------------------------------------------------------------
-- | Comment
comment :: Rule
comment =
Rule
{ ruleID = "comment",
ruleBegin = Nothing,
ruleEnd = Nothing,
ruleMatch =
Just $
Capture "(\\-\\-.*$)" $
Map.fromList
[ (1, "comment.line.double-dash")
],
ruleName = Just "meta.comment.line",
ruleInclude = [],
ruleContentName = Nothing
}
--------------------------------------------------------------------------------
-- | Statements
skip :: Rule
skip = match "skip" "skip" "keyword.control.skip"
abort :: Rule
abort = match "abort" "abort" "keyword.control.abort"
spec :: Rule
spec =
Rule
{ ruleID = "spec",
ruleBegin = capture "\\{\\!" "punctuation.definition.quote.begin.markdown.gcl.spec.open",
ruleEnd = capture "\\!\\}" "punctuation.definition.quote.begin.markdown.gcl.spec.close",
ruleMatch = Nothing,
ruleName = Just "meta.statement.spec",
ruleInclude = [],
ruleContentName = Nothing
}
assertion :: Rule
assertion =
Rule
{ ruleID = "assertion",
ruleBegin = capture "\\{" "support.other.parenthesis.regexp.gcl.assertion.open",
ruleEnd = capture "\\}" "support.other.parenthesis.regexp.gcl.assertion.close",
ruleMatch = Nothing,
ruleName = Just "meta.statement.assertion",
ruleInclude = values,
ruleContentName = Nothing
}
loop :: Rule
loop =
Rule
{ ruleID = "loop",
ruleBegin = captureWord "do" "keyword.control.loop",
ruleEnd = captureWord "od" "keyword.control.loop",
ruleMatch = Nothing,
ruleName = Just "meta.statement.loop",
ruleInclude = ref guardedCommand : nestedPatterns,
ruleContentName = Nothing
}
conditional :: Rule
conditional =
Rule
{ ruleID = "conditional",
ruleBegin = captureWord "if" "keyword.control.conditional",
ruleEnd = captureWord "fi" "keyword.control.conditional",
ruleMatch = Nothing,
ruleName = Just "meta.statement.conditional",
ruleInclude = ref guardedCommand : nestedPatterns,
ruleContentName = Nothing
}
assignment :: Rule
assignment =
Rule
{ ruleID = "assignment",
ruleBegin = Nothing,
ruleEnd = Nothing,
ruleMatch =
Just $
Capture "(\\:\\=)" $
Map.fromList
[ (1, "keyword.control.assignment")
],
ruleName = Just "meta.statement.assignment",
ruleInclude = [],
ruleContentName = Nothing
}
guardedCommand :: Rule
guardedCommand =
Rule
{ ruleID = "guarded-command",
ruleBegin = Nothing,
ruleEnd = Nothing,
ruleMatch =
Just $
Capture "(\\-\\>)|(\\|)" $
Map.fromList
[ (1, "punctuation.section.embedded.arrow"),
(2, "punctuation.section.embedded.bar")
],
ruleName = Just "meta.statement.guardedCommands",
ruleInclude = [],
ruleContentName = Nothing
}
-- assignment :: Rule
-- assignment =
-- Rule
-- { ruleBegin = Nothing,
-- ruleEnd = Nothing,
-- ruleMatch = Just $ Capture "(:=)" $ Map.fromList [(1, "keyword.control.assigment")],
-- ruleName = Just "meta.statement.assignment",
-- ruleInclude = [],
-- ruleContentName = Just "meta.statement.assignment.values"
-- }
-- assignment :: Rule
-- assignment =
-- Rule
-- { ruleBegin =
-- Just $
-- Capture "(:=)" $
-- Map.fromList
-- [ (1, "keyword.control.assignment") ],
-- ruleEnd = Just $ Capture "(?=^\\S)" Map.empty,
-- ruleMatch = Nothing,
-- ruleName = Just "meta.statement.assignment",
-- ruleInclude = [],
-- ruleContentName = Just "invalid.meta.statement.assignment.values"
-- }
-- assignmentNames :: Rule
-- assignmentNames =
-- Rule
-- { ruleBegin = Nothing,
-- ruleEnd = Nothing,
-- ruleMatch = Just $ Capture ".+" Map.empty,
-- ruleName = Just "invalid.meta.assignment.names2",
-- ruleInclude = [],
-- ruleContentName = Nothing
-- }
-- declaration :: Rule
-- declaration =
-- Rule
-- { ruleBegin = Just $ Capture "^var" $ Map.fromList [(0, "storage.modifier")],
-- ruleEnd = Just $ Capture "(?=^\\S)" Map.empty,
-- ruleMatch = Nothing,
-- ruleName = Just "meta.declaration",
-- ruleInclude = [Ref "declaration-var"],
-- ruleContentName = Nothing
-- }
-- declarationVar :: Rule
-- declarationVar =
-- Rule
-- { ruleBegin = Just $ Capture "^var" $ Map.fromList [(0, "storage.modifier")],
-- ruleEnd = Just $ Capture "(?=^\\S)" Map.empty,
-- ruleMatch = Nothing,
-- ruleName = Just "meta.declaration",
-- ruleInclude = [Ref "built-in-type", Ref "declaration-var-names"],
-- ruleContentName = Nothing
-- }
--------------------------------------------------------------------------------
-- | Expressions & Values
values :: [Reference]
values =
map
ref
[ number,
bool
]
number :: Rule
number =
Rule
{ ruleID = "number",
ruleBegin = Nothing,
ruleEnd = Nothing,
ruleMatch = Just (Capture "\\b[0-9]+\\b" $ Map.fromList [(0, "constant.numeric")]),
ruleName = Just "constant.numeric",
ruleInclude = [],
ruleContentName = Nothing
}
bool :: Rule
bool =
Rule
{ ruleID = "bool",
ruleBegin = Nothing,
ruleEnd = Nothing,
ruleMatch = Just (Capture "\\b(True|False)\\b" $ Map.fromList [(0, "constant.language")]),
ruleName = Just "constant.language",
ruleInclude = [],
ruleContentName = Nothing
}
--------------------------------------------------------------------------------
-- | Types
-- | Appears in declarations
types :: [Reference]
types =
map
ref
[ intType,
boolType
]
intType :: Rule
intType = match "intType" "Int" "entity.name.type.int"
boolType :: Rule
boolType = match "boolType" "Bool" "entity.name.type.int"
|
import 'dart:async';
import 'dart:math' as math;
import 'package:flutter/material.dart';
import 'package:font_awesome_flutter/font_awesome_flutter.dart';
import 'package:netzpolitik_mobile/extensions/context_ext.dart';
import 'package:netzpolitik_mobile/models/article.dart';
import 'package:netzpolitik_mobile/persistence/article_dao.dart';
import 'package:netzpolitik_mobile/widgets/custom_views/wp_error_widget.dart';
import 'package:netzpolitik_mobile/widgets/custom_views/wp_grid_pagination.dart';
import 'package:netzpolitik_mobile/widgets/dashboard/articles/article_list_entry.dart';
import 'package:provider/provider.dart';
class BookmarksWidget extends StatefulWidget {
const BookmarksWidget();
@override
_BookmarksWidgetState createState() => _BookmarksWidgetState();
}
class _BookmarksWidgetState extends State<BookmarksWidget> implements ArticleDAOListener {
late ArticleDAO _articleDAO;
@override
void initState() {
_articleDAO = context.read<ArticleDAO>();
_articleDAO.listen(this);
super.initState();
}
@override
void dispose() {
_articleDAO.unlisten(this);
super.dispose();
}
@override
Widget build(BuildContext context) {
return FutureBuilder<List<Article>>(
future: _articleDAO.getAll(),
builder: (context, snapshot) {
//loading
if (snapshot.connectionState != ConnectionState.done) {
return CircularProgressIndicator();
}
final articles = snapshot.data;
if (articles == null || articles.isEmpty) {
return _buildEmptyWidget(context);
} else {
return _buildContent(context, articles);
}
},
);
}
Widget _buildEmptyWidget(BuildContext context) {
return Center(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: WPErrorWidget(
text: context.getString('no_bookmarks'),
icon: FontAwesomeIcons.sadCry,
),
),
);
}
Widget _buildContent(BuildContext context, List<Article> articles) {
return WPGridPagination<Article>(
itemBuilder: (position, article, isBig) => ArticleListEntry(article, isBig: isBig, identifier: 'bookmark'),
pageBuilder: (listSize) => Future.value(articles.sublist(listSize, math.min(articles.length, listSize + 10))),
errorLabel: context.getString('article_loading_error'),
);
}
@override
void onContentChanged() => setState(() {});
} |
package io.smallibs.pilin.effect
import io.smallibs.pilin.abstractions.Monad
import io.smallibs.pilin.effect.Effects.Companion.handle
import io.smallibs.pilin.standard.continuation.Continuation.Companion.continuation
import io.smallibs.pilin.standard.continuation.Continuation.Companion.monad
import io.smallibs.pilin.standard.continuation.Continuation.ContinuationK
import io.smallibs.pilin.standard.continuation.Continuation.ContinuationK.Companion.invoke
import io.smallibs.pilin.type.App
import kotlinx.coroutines.runBlocking
import org.junit.Test
import kotlin.test.assertEquals
class SingleEffectTest {
private class Console<F>(
val printString: (String) -> App<F, Unit>,
val readString: App<F, String>,
) : EffectHandler
private fun <A> id(a: A): A {
return a
}
private fun <F> effects(monad: Monad.API<F>): Effects<Console<F>, App<F, Unit>> =
handle { console ->
monad `do` {
val value = id(console.readString.bind())
id(console.printString("Hello $value").bind())
}
}
private fun console(): Console<ContinuationK<List<String>>> =
Console(
printString = { text ->
continuation { k ->
listOf("printString($text)") + k(Unit)
}
},
readString = continuation { k ->
listOf("readStream(World)") + k("World")
}
)
@Test
fun shouldPerformEffect() {
val handled = effects(monad<List<String>>()) with console()
val traces = runBlocking { handled().invoke { listOf() } }
assertEquals(listOf("readStream(World)", "printString(Hello World)"), traces)
}
} |
#!/bin/sh
set -e
#
# LXD images recipe: Composer
#
# Dependencies: curl, php, zsh
#
# Environment variables:
#
# - none
#
installComposer() {
# Check the dependencies
command -v curl > /dev/null || (echo "installComposer recipe requires curl, missing"; exit 1)
command -v php > /dev/null || (echo "installComposer recipe requires php, missing"; exit 1)
command -v zsh > /dev/null || (echo "installComposer recipe requires zsh, missing"; exit 1)
# Install Composer
curl -o /usr/local/bin/composer https://getcomposer.org/composer.phar
chmod +x /usr/local/bin/composer
# Add binaries to path and prevent Composer warning about root user / xdebug
export PATH="$PATH:/root/.composer/vendor/bin"
export COMPOSER_ALLOW_SUPERUSER=1
export COMPOSER_DISABLE_XDEBUG_WARN=1
# Downgrade from latest to stable release
composer selfupdate
rm /root/.composer/*.phar
# Add some default configuration
composer global config minimum-stability dev
composer global config prefer-stable true
cat >> /root/.zsh-additions << "EOF"
export PATH="$PATH:/root/.composer/vendor/bin"
export COMPOSER_ALLOW_SUPERUSER=1
export COMPOSER_DISABLE_XDEBUG_WARN=1
alias composer='noglob composer'
alias cgr='noglob cgr'
source /root/.composer/completion
EOF
composer --update-no-dev global require consolidation/cgr
cgr --update-no-dev bamarni/symfony-console-autocomplete hirak/prestissimo
symfony-autocomplete composer --shell zsh > /root/.composer/completion
}
|
# Allows for using keyword arguments with Struct instead of order dependent args
#
# Usage:
#
# SomeStruct = KeywordStruct.new(:attribute_one, :attribute_two)
# struct_instance = SomeStruct.new(attribute_one: 'hello', attribute_two: 'world')
module Core
class KeywordStruct < Struct
def initialize(**kwargs)
super(*members.map{|k| kwargs[k] })
end
end
end
|
<?php
// This file is used inside of the router function which
// does not have the $app variable in its scope.
// So we call it here and not in the router function
// mainly to avoid a "Cannot start session when headers already sent" error.
global $app;
// Start the session.
_session_start(
$app['configs']['app']['session']['cookie'],
$app['configs']['app']['session']['lifetime'],
$app['configs']['app']['session']['path'],
$app['configs']['app']['session']['domain'],
$app['configs']['app']['session']['secure'],
$app['configs']['app']['session']['http_only']
); |
using IRTools, Test
using IRTools: Meta, TypedMeta, meta, typed_meta
@generated f(x) = :(x+x)
@test meta(Tuple{typeof(gcd),Int,Int}) isa Meta
@test meta(Tuple{typeof(f),Int}) isa Meta
@test typed_meta(Tuple{typeof(gcd),Int,Int}) isa TypedMeta
@test typed_meta(Tuple{typeof(f),Int}) isa TypedMeta
|
<?php
defined('BASEPATH') OR exit('No direct script access allowed');
/*
| -------------------------------------------------------------------------
| URI ROUTING
| -------------------------------------------------------------------------
| This file lets you re-map URI requests to specific controller functions.
|
| Typically there is a one-to-one relationship between a URL string
| and its corresponding controller class/method. The segments in a
| URL normally follow this pattern:
|
| example.com/class/method/id/
|
| In some instances, however, you may want to remap this relationship
| so that a different class/function is called than the one
| corresponding to the URL.
|
| Please see the user guide for complete details:
|
| https://codeigniter.com/user_guide/general/routing.html
|
| -------------------------------------------------------------------------
| RESERVED ROUTES
| -------------------------------------------------------------------------
|
| There are three reserved routes:
|
| $route['default_controller'] = 'welcome';
|
| This route indicates which controller class should be loaded if the
| URI contains no data. In the above example, the "welcome" class
| would be loaded.
|
| $route['404_override'] = 'errors/page_missing';
|
| This route will tell the Router which controller/method to use if those
| provided in the URL cannot be matched to a valid route.
|
| $route['translate_uri_dashes'] = FALSE;
|
| This is not exactly a route, but allows you to automatically route
| controller and method names that contain dashes. '-' isn't a valid
| class or method name character, so it requires translation.
| When you set this option to TRUE, it will replace ALL dashes in the
| controller and method URI segments.
|
| Examples: my-controller/index -> my_controller/index
| my-controller/my-method -> my_controller/my_method
*/
/* Normal listings */
$route['default_controller'] = 'site/landing';
$route['all_listing'] = 'site/product/all_listing/$1';
$route['explore_listing'] = 'site/product/explore_listing/$1';
$route['property'] = "site/rentals/mapview/$1";
$route['create-wishlist-category'] = "site/rentals/rentalwishlistcategoryAdd";
$route['booking/(:any)'] = "site/rentals/rental_guest_booking";
$route['contact-us'] = "site/cms/contactus_businesstravel";
/*Open - Dashboard Links*/
$route['trips/upcoming'] = "site/cms/dashboard_trips";
$route['trips/upcoming/(:any)'] = "site/cms/dashboard_trips/$1";
$route['trips/previous'] = "site/cms/dashboard_trips_prve";
$route['trips/previous/(:any)'] = "site/cms/dashboard_trips_prve/$1";
$route['dashboard'] = "site/user_settings/index";
$route['dashboard/(:any)'] = "site/user_settings/index/$1";
$route['settings'] = "site/user_settings/display_user_settings";
$route['photo-video'] = "site/user/change_profile_photo";
$route['account-payout'] = "site/cms/dashboard_account_payout";
$route['account-trans'] = "site/cms/dashboard_account_trans";
$route['account-trans/(:any)'] = "site/cms/dashboard_account_trans/$1";
$route['account-trans/(:any)/(:any)/(:any)'] = "site/cms/dashboard_account_trans/$1/$2/$3";
$route['settings/password'] = "site/user_settings/password_settings";
$route['settings/preferences'] = "site/user_settings/preferences_settings";
$route['settings/notifications'] = "site/user_settings/notifications_settings";
$route['settings/notifications'] = "site/user_settings/notifications_settings";
$route['account-privacy'] = "site/cms/dashboard_account_privacy";
$route['account-security'] = "site/cms/dashboard_account_security";
$route['account-setting'] = "site/cms/dashboard_account_setting";
$route['your-wallet'] = "site/cms/dashboard_account_wallet";
$route['inbox'] = "site/cms/med_message";
$route['inbox/(:any)'] = "site/cms/med_message/$1";
$route['new_conversation/(:any)/(:any)'] = "site/cms/host_conversation/$1";
$route['verification'] = "site/user/verification";
$route['verification/(:any)'] = "site/user/verification/$1";
$route['invite'] = "site/cms/dashboard_invite";
$route['c/invite/(:any)'] = "site/cms/dashboard_invite_login";
$route['listing/(:any)'] = "site/cms/dashboard_listing/$1";
$route['listing/(:any)/(:any)'] = "site/cms/dashboard_listing/$1";
$route['order/(:any)'] = "site/order";
$route['order/(:any)/(:any)'] = "site/order";
$route['order/(:any)/(:any)/(:any)'] = "site/order";
$route['order/(:any)/(:any)/(:any)/(:any)'] = "site/order";
$route['host-payment-success/(:any)'] = "site/product/hostpayment_success/$1";
$route['display-review'] = "site/product/display_review";
$route['display-review/(:any)'] = "site/product/display_review/$1";
$route['display-review1'] = "site/product/display_review1";
$route['display-review1/(:any)'] = "site/product/display_review1/$1";
$route['display-dispute'] = "site/product/display_dispute";
$route['display-dispute/(:any)'] = "site/product/display_dispute/$1";
$route['display-dispute1'] = "site/product/display_dispute1";
$route['display-dispute1/(:any)'] = "site/product/display_dispute1/$1";
$route['cancel-booking-dispute'] = "site/product/display_dispute2";
$route['cancel-booking-dispute/(:any)'] = "site/product/display_dispute2/$1";
$route['display-dispute2'] = "site/product/cancel_booking_dispute";
$route['display-dispute2/(:any)'] = "site/product/cancel_booking_dispute/$1";
$route['listing-reservation'] = "site/cms/dashboard_listing_reservation";
$route['listing-reservation/(:any)'] = "site/cms/dashboard_listing_reservation/$1";
$route['listing-passed-reservation'] = "site/cms/dashboard_listing_pass_reservation";
$route['listing-passed-reservation/(:any)'] = "site/cms/dashboard_listing_pass_reservation";
$route['users/show/(:any)'] = "site/user_settings/user_profile";
$route['popular'] = "site/rentals/popular_list";
$route['user/(:any)/wishlists/(:any)/edit'] = "site/user/display_user_lists_edit";
$route['user/(:any)/wishlists/(:any)'] = "site/user/display_user_lists_home";
$route['users/(:any)/wishlists'] = "site/wishlists";
/*Close - Dashboard Links*/
/*Start -Cms Links*/
$route['pages/(:num)/(:any)'] = "site/cms/page_by_id";
$route['pages/(:any)'] = "site/cms";
$route['help'] = "site/help";
$route['help/(:any)'] = "site/help";
$route['help/(:any)/(:any)'] = "site/help";
$route['help/(:any)/(:any)/(:any)'] = "site/help";
$route['help/(:any)/(:any)/(:any)/(:any)'] = "site/help";
/*Close -Cms Links*/
/*Start- Language and Currency setting*/
$route['lang/(:any)'] = "site/product/language_change/$1";
$route['change-currency/(:any)'] = "site/product/change_currency/$1";
/*Close- Language and Currency setting*/
/* Login Routes */
$route['google-login'] = 'site/signupsignin/googleLogin';
$route['fb-login'] = 'site/signupsignin/FbLogin';
$route['fb-user-logout'] = 'site/signupsignin/Fblogout';
$route['user-logout'] = 'site/signupsignin/user_logout';
$route['linkedin-login'] = 'site/signupsignin/linkedInLogin';
/* Close-Login Routes */
$route['404_override'] = '';
$route['translate_uri_dashes'] = FALSE;
/* Property Listing Module */
$route['list_space'] = 'site/product/list_space';
$route['rental/(:any)'] = 'site/landing/display_product_detail/$1';
$route['manage_listing/(:any)'] = "site/product/manage_listing/$1";
$route['price_listing/(:any)'] = "site/product/price_listing/$1";
$route['update_price_listing/(:any)'] = "site/product/update_price_listing/$1";
$route['overview_listing/(:any)'] = "site/product/overview_listing/$1";
$route['insert_overview_listing/(:any)'] = "site/product/insert_overview_listing/$1";
$route['photos_listing/(:any)'] = "site/product/photos_listing/$1";
$route['photos_uploading'] = "site/product/photos_uploading";
$route['amenities_listing/(:any)'] = "site/product/amenities_listing/$1";
$route['space_listing/(:any)'] = "site/product/space_listing/$1";
$route['address_listing/(:any)'] = "site/product/address_listing/$1";
$route['cancel_policy/(:any)'] = "site/product/cancel_policy/$1";
$route['detail_list/(:any)'] = "site/product/detail_list/$1";
/* Exprience module */
$route['explore-experience'] = "site/experience/explore_experience/$1";
$route['experience/(:any)'] = "site/experience/dashboard_experience_listing";
$route['experience/(:any)/(:any)'] = "site/experience/dashboard_experience_listing";
$route['my_experience/upcoming'] = "site/experience/my_experience";
$route['my_experience/upcoming/(:any)'] = "site/experience/my_experience/$1";
$route['my_experience/previous'] = "site/experience/my_experience_prev";
$route['my_experience/previous/(:any)'] = "site/experience/my_experience_prev/$1";
$route['new_experience'] = "site/experience/new_experience";
$route['manage_experience'] = "site/experience/manage_experience";
$route['manage_experience/(:any)'] = "site/experience/manage_experience/$1";
$route['add_experience_new'] = "site/experience/add_experience_new";
$route['experience_language_details/(:any)'] = "site/experience/experience_language_details/$1";
$route['experience_organization_details/(:any)'] = "site/experience/experience_organization_details/$1";
$route['experience_title/(:any)'] = "site/experience/experience_title/$1";
$route['tagline_experience/(:any)'] = "site/experience/tagline_experience/$1";
$route['what_we_do/(:any)'] = "site/experience/what_we_do/$1";
$route['where_we_will_be/(:any)'] = "site/experience/where_we_will_be/$1";
$route['what_you_will_provide/(:any)'] = "site/experience/what_you_will_provide/$1";
$route['notes_to_guest/(:any)'] = "site/experience/notes_to_guest/$1";
$route['about_exp_host/(:any)'] = "site/experience/about_exp_host/$1";
$route['group_size/(:any)'] = "site/experience/group_size/$1";
$route['price/(:any)'] = "site/experience/price/$1";
$route['finishing_toches/(:any)'] = "site/experience/finishing_toches/$1";
$route['experience_details/(:any)'] = "site/experience/experience_details/$1";
$route['schedule_experience/(:any)'] = "site/experience/schedule_experience/$1";
$route['experience_image/(:any)'] = "site/experience/experience_image/$1";
$route['location_details/(:any)'] = "site/experience/location_details/$1";
$route['guest_requirement/(:any)'] = "site/experience/guest_requirement/$1";
$route['experience_cancel_policy/(:any)'] = "site/experience/experience_cancel_policy/$1";
$route['view_experience/(:any)'] = "site/experience/view_experience/$1";
$route['experience_booking/(:any)'] = "site/experience/experience_guest_booking";
$route['Experience_Order/(:any)'] = "site/Experience_Order";
$route['Experience_Order/(:any)/(:any)'] = "site/Experience_Order";
$route['Experience_Order/(:any)/(:any)/(:any)'] = "site/Experience_Order";
$route['Experience_Order/(:any)/(:any)/(:any)/(:any)'] = "site/Experience_Order";
$route['experience-reservation'] = "site/experience/dashboard_listing_reservation";
$route['experience-reservation/(:any)'] = "site/experience/dashboard_listing_reservation/$1";
$route['experience-passed-reservation'] = "site/experience/dashboard_listing_pass_reservation";
$route['experience-passed-reservation/(:any)'] = "site/experience/dashboard_listing_pass_reservation/$1";
$route['experience-transactions'] = "site/experience/dashboard_account_trans";
$route['experience-transactions/(:any)'] = "site/experience/dashboard_account_trans/$1";
$route['experience-review'] = "site/experience/display_review";
$route['experience-review/(:any)'] = "site/experience/display_review/$1";
$route['experience-review1/(:any)'] = "site/experience/display_review1/$1";
$route['experience-review1'] = "site/experience/display_review1";
$route['experience-dispute'] = "site/experience/display_dispute";
$route['experience-dispute/(:any)'] = "site/experience/display_dispute/$1";
$route['experience-dispute1'] = "site/experience/display_dispute1";
$route['experience-dispute1/(:any)'] = "site/experience/display_dispute1/$1";
$route['experience-cancel_booking_dispute'] = "site/experience/cancel_booking_dispute";
$route['experience-cancel_booking_dispute/(:any)'] = "site/experience/cancel_booking_dispute/$1";
$route['experience_inbox'] = "site/experience/med_message";
$route['experience_inbox/(:any)'] = "site/experience/med_message/$1";
$route['experience_conversation/(:any)/(:any)'] = "site/experience/host_conversation/$1";
$route['guide-payment-success/(:any)/(:any)'] = "site/experience/guidepayment_success/$1";
/*Admin Routes*/
$route['admin'] = "admin/adminlogin";
|
/*
* Copyright 2019
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.api.annotation.paging;
import org.apache.uima.cas.CAS;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.NumberTextField;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.wicketstuff.event.annotation.OnEvent;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.model.AnnotatorState;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.page.AnnotationPageBase;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.rendering.event.RenderAnnotationsEvent;
import de.tudarmstadt.ukp.clarin.webanno.support.lambda.ActionBarLink;
import de.tudarmstadt.ukp.clarin.webanno.support.lambda.LambdaAjaxSubmitLink;
import de.tudarmstadt.ukp.clarin.webanno.support.lambda.LambdaModel;
import wicket.contrib.input.events.EventType;
import wicket.contrib.input.events.InputBehavior;
import wicket.contrib.input.events.key.KeyType;
public class DefaultPagingNavigator extends Panel
{
private static final long serialVersionUID = -6315861062996783626L;
private AnnotationPageBase page;
private NumberTextField<Integer> gotoPageTextField;
private FocusPosition defaultFocusPosition = FocusPosition.TOP;
public DefaultPagingNavigator(String aId, AnnotationPageBase aPage)
{
super(aId);
setOutputMarkupPlaceholderTag(true);
page = aPage;
Form<Void> gotoPageTextFieldForm = new Form<>("gotoPageTextFieldForm");
gotoPageTextField = new NumberTextField<>("gotoPageText", Model.of(1), Integer.class);
// Using a LambdaModel here because the model object in the page may change and we want to
// always get the right one
gotoPageTextField.setModel(
PropertyModel.of(LambdaModel.of(() -> aPage.getModel()), "firstVisibleUnitIndex"));
// FIXME minimum and maximum should be obtained from the annotator state
gotoPageTextField.setMinimum(1);
//gotoPageTextField.setMaximum(LambdaModel.of(() -> aPage.getModelObject().getUnitCount()));
gotoPageTextField.setOutputMarkupId(true);
gotoPageTextFieldForm.add(gotoPageTextField);
LambdaAjaxSubmitLink gotoPageLink = new LambdaAjaxSubmitLink("gotoPageLink",
gotoPageTextFieldForm, this::actionGotoPage);
gotoPageTextFieldForm.setDefaultButton(gotoPageLink);
gotoPageTextFieldForm.add(gotoPageLink);
add(gotoPageTextFieldForm);
add(new ActionBarLink("showNext", t -> actionShowNextPage(t))
.add(new InputBehavior(new KeyType[] { KeyType.Page_down }, EventType.click)));
add(new ActionBarLink("showPrevious", t -> actionShowPreviousPage(t))
.add(new InputBehavior(new KeyType[] { KeyType.Page_up }, EventType.click)));
add(new ActionBarLink("showFirst", t -> actionShowFirstPage(t))
.add(new InputBehavior(new KeyType[] { KeyType.Home }, EventType.click)));
add(new ActionBarLink("showLast", t -> actionShowLastPage(t))
.add(new InputBehavior(new KeyType[] { KeyType.End }, EventType.click)));
}
public AnnotatorState getModelObject()
{
return page.getModelObject();
}
protected void actionShowPreviousPage(AjaxRequestTarget aTarget)
throws Exception
{
CAS cas = page.getEditorCas();
getModelObject().moveToPreviousPage(cas, defaultFocusPosition);
page.actionRefreshDocument(aTarget);
}
protected void actionShowNextPage(AjaxRequestTarget aTarget)
throws Exception
{
CAS cas = page.getEditorCas();
getModelObject().moveToNextPage(cas, defaultFocusPosition);
page.actionRefreshDocument(aTarget);
}
protected void actionShowFirstPage(AjaxRequestTarget aTarget)
throws Exception
{
CAS cas = page.getEditorCas();
getModelObject().moveToFirstPage(cas, defaultFocusPosition);
page.actionRefreshDocument(aTarget);
}
protected void actionShowLastPage(AjaxRequestTarget aTarget)
throws Exception
{
CAS cas = page.getEditorCas();
getModelObject().moveToLastPage(cas, defaultFocusPosition);
page.actionRefreshDocument(aTarget);
}
private void actionGotoPage(AjaxRequestTarget aTarget, Form<?> aForm)
throws Exception
{
CAS cas = page.getEditorCas();
getModelObject().moveToUnit(cas, gotoPageTextField.getModelObject(), defaultFocusPosition);
page.actionRefreshDocument(aTarget);
}
public void setDefaultFocusPosition(FocusPosition aPos)
{
defaultFocusPosition = aPos;
}
public FocusPosition getDefaultFocusPosition()
{
return defaultFocusPosition;
}
@OnEvent
public void onRenderAnnotations(RenderAnnotationsEvent aEvent)
{
aEvent.getRequestHandler().add(gotoPageTextField);
}
}
|
<?php
namespace myfanclub\tests\core\model;
use myfanclub\core\events\MyfcEvent;
class ExampleEvent implements MyfcEvent
{
public static function className()
{
return 'ExampleEvent';
}
public function handle($params = [])
{
return $params[0];
}
}
|
class RemoveAttributesFromTypes < ActiveRecord::Migration
def change
remove_column :types, :origin
remove_column :types, :leaves
remove_column :types, :caffeine
remove_column :types, :pairing
remove_column :types, :brew_time
remove_column :types, :tasting_notes
remove_column :types, :comments
end
end |
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Input;
using Harmony;
namespace WpfUnit
{
/// <summary>
/// </summary>
/// <remarks>
/// Given the singleton nature of the <see cref="Mouse" /> class, you should NOT enable
/// parallel tests for your controls when using this class.
/// </remarks>
/// <example>
/// Each test method should create its own <see cref="TestMouse" /> instance in order
/// to not be influenced by previous test methods (that may have left keys in a pressed state).
/// </example>
public sealed class TestMouse
{
private static readonly Dictionary<IInputElement, Point> Positions;
static TestMouse()
{
Positions = new Dictionary<IInputElement, Point>();
AssemblySetup.EnsureIsPatched();
}
/// <summary>
/// Initializes the <see cref="Mouse" /> to its default state:
/// It's relative position to every <see cref="IInputElement" /> is (0, 0)
/// and all buttons are unpressed.
/// </summary>
public TestMouse()
{
Reset();
}
/// <summary>
/// Resets the <see cref="Mouse" /> to its original state.
/// </summary>
private void Reset()
{
Positions.Clear();
}
/// <summary>
/// Causes the mouse to change its relative position to the given element.
/// After this method has been called, <see cref="Mouse.GetPosition(IInputElement)" />
/// returns the given value.
/// </summary>
/// <param name="element"></param>
/// <param name="relativePosition"></param>
public void SetMousePositionRelativeTo(IInputElement element, Point relativePosition)
{
Positions[element] = relativePosition;
}
/// <summary>
/// Causes the mouse to change its relative position to the given element
/// and raises the <see cref="UIElement.MouseMoveEvent" /> on it.
/// </summary>
/// <param name="element"></param>
/// <param name="relativePosition"></param>
public void MoveRelativeTo(UIElement element, Point relativePosition)
{
SetMousePositionRelativeTo(element, relativePosition);
element.RaiseEvent(new MouseEventArgs(Mouse.PrimaryDevice, Environment.TickCount)
{
RoutedEvent = UIElement.MouseMoveEvent
});
}
/// <summary>
/// Raises the <see cref="UIElement.MouseLeftButtonDownEvent" />
/// followed by the <see cref="UIElement.MouseLeftButtonUpEvent" />.
/// </summary>
/// <param name="element"></param>
public void LeftClick(UIElement element)
{
element.RaiseEvent(new MouseButtonEventArgs(Mouse.PrimaryDevice, Environment.TickCount, MouseButton.Left)
{
RoutedEvent = UIElement.MouseLeftButtonDownEvent
});
element.RaiseEvent(new MouseButtonEventArgs(Mouse.PrimaryDevice, Environment.TickCount, MouseButton.Left)
{
RoutedEvent = UIElement.MouseLeftButtonUpEvent
});
}
/// <summary>
/// Causes the mouse to change its relative position to the given element
/// and raises the <see cref="UIElement.MouseMoveEvent" />, followed by the
/// <see cref="UIElement.MouseLeftButtonDownEvent" /> and the
/// <see cref="UIElement.MouseLeftButtonUpEvent" />.
/// </summary>
/// <param name="element"></param>
/// <param name="relativePosition"></param>
public void LeftClickAt(UIElement element, Point relativePosition)
{
MoveRelativeTo(element, relativePosition);
LeftClick(element);
}
/// <summary>
/// Raises the <see cref="UIElement.MouseRightButtonDownEvent" />
/// followed by the <see cref="UIElement.MouseRightButtonUpEvent" />.
/// </summary>
/// <param name="element"></param>
public void RightClick(UIElement element)
{
element.RaiseEvent(new MouseButtonEventArgs(Mouse.PrimaryDevice, Environment.TickCount, MouseButton.Right)
{
RoutedEvent = UIElement.MouseRightButtonDownEvent
});
element.RaiseEvent(new MouseButtonEventArgs(Mouse.PrimaryDevice, Environment.TickCount, MouseButton.Right)
{
RoutedEvent = UIElement.MouseRightButtonUpEvent
});
}
/// <summary>
/// Causes the mouse to change its relative position to the given element
/// and raises the <see cref="UIElement.MouseMoveEvent" />, followed by the
/// <see cref="UIElement.MouseRightButtonDownEvent" /> and the
/// <see cref="UIElement.MouseRightButtonUpEvent" />.
/// </summary>
/// <param name="element"></param>
/// <param name="relativePosition"></param>
public void RightClickAt(UIElement element, Point relativePosition)
{
MoveRelativeTo(element, relativePosition);
RightClick(element);
}
/// <summary>
/// Causes the <see cref="UIElement.MouseWheelEvent" /> to be raised on the given control.
/// </summary>
/// <param name="element"></param>
/// <param name="wheelDelta">The number of wheel ticks which have changed</param>
public void RotateMouseWheel(UIElement element, int wheelDelta)
{
element.RaiseEvent(new MouseWheelEventArgs(Mouse.PrimaryDevice, Environment.TickCount, wheelDelta)
{
RoutedEvent = UIElement.MouseWheelEvent
});
}
/// <summary>
/// Causes the <see cref="UIElement.MouseWheelEvent" /> to be raised on the given control.
/// </summary>
/// <param name="element"></param>
public void RotateMouseWheelUp(UIElement element)
{
RotateMouseWheel(element, 120);
}
/// <summary>
/// Causes the <see cref="UIElement.MouseWheelEvent" /> to be raised on the given control.
/// </summary>
/// <param name="element"></param>
public void RotateMouseWheelDown(UIElement element)
{
RotateMouseWheel(element, -120);
}
[HarmonyPatch(typeof(Mouse))]
[HarmonyPatch("GetPosition")]
static class PatchMouseGetPosition
{
private static void Postfix(IInputElement relativeTo, ref Point __result)
{
Positions.TryGetValue(relativeTo, out __result);
}
}
[HarmonyPatch(typeof(MouseEventArgs))]
[HarmonyPatch("GetPosition")]
sealed class PatchMouseEventArgsGetPosition
{
public static void Postfix(IInputElement relativeTo, ref Point __result)
{
Positions.TryGetValue(relativeTo, out __result);
}
}
}
} |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include <cstdlib>
#include <memory>
#include <utility>
#include "arrow/array.h"
#include "arrow/chunked_array.h"
#include "arrow/datum.h"
#include "arrow/extension_type.h"
#include "arrow/ipc/json_simple.h"
#include "arrow/python/gdb.h"
#include "arrow/record_batch.h"
#include "arrow/scalar.h"
#include "arrow/table.h"
#include "arrow/type.h"
#include "arrow/util/debug.h"
#include "arrow/util/decimal.h"
#include "arrow/util/key_value_metadata.h"
#include "arrow/util/logging.h"
#include "arrow/util/macros.h"
#include "arrow/util/optional.h"
#include "arrow/util/string_view.h"
#include "arrow/util/variant.h"
namespace arrow {
using ipc::internal::json::ArrayFromJSON;
using ipc::internal::json::ChunkedArrayFromJSON;
using ipc::internal::json::ScalarFromJSON;
namespace gdb {
// Add a nested `arrow` namespace to exercise type lookup from GDB (ARROW-15652)
namespace arrow {
void DummyFunction() {}
} // namespace arrow
namespace {
class CustomStatusDetail : public StatusDetail {
public:
const char* type_id() const override { return "custom-detail-id"; }
std::string ToString() const override { return "This is a detail"; }
};
class UuidType : public ExtensionType {
public:
UuidType() : ExtensionType(fixed_size_binary(16)) {}
std::string extension_name() const override { return "uuid"; }
bool ExtensionEquals(const ExtensionType& other) const override {
return (other.extension_name() == this->extension_name());
}
std::shared_ptr<Array> MakeArray(std::shared_ptr<ArrayData> data) const override {
return std::make_shared<ExtensionArray>(data);
}
Result<std::shared_ptr<DataType>> Deserialize(
std::shared_ptr<DataType> storage_type,
const std::string& serialized) const override {
return Status::NotImplemented("");
}
std::string Serialize() const override { return "uuid-serialized"; }
};
// TODO migrate arrow::ipc::internal::json::ArrayFromJSON to Result<>?
std::shared_ptr<Array> SliceArrayFromJSON(const std::shared_ptr<DataType>& ty,
util::string_view json, int64_t offset = 0,
int64_t length = -1) {
std::shared_ptr<Array> array;
ARROW_CHECK_OK(ArrayFromJSON(ty, json, &array));
if (length != -1) {
return array->Slice(offset, length);
} else {
return array->Slice(offset);
}
}
} // namespace
void TestSession() {
// We define local variables for all types for which we want to test
// pretty-printing.
// Then, at the end of this function, we trap to the debugger, so that
// test instrumentation can print values from this frame by interacting
// with the debugger.
// The test instrumentation is in pyarrow/tests/test_gdb.py
#ifdef __clang__
_Pragma("clang diagnostic push");
_Pragma("clang diagnostic ignored \"-Wunused-variable\"");
#elif defined(__GNUC__)
_Pragma("GCC diagnostic push");
_Pragma("GCC diagnostic ignored \"-Wunused-variable\"");
#endif
arrow::DummyFunction();
// Status & Result
auto ok_status = Status::OK();
auto error_status = Status::IOError("This is an error");
auto error_detail_status =
error_status.WithDetail(std::make_shared<CustomStatusDetail>());
auto ok_result = Result<int>(42);
auto error_result = Result<int>(error_status);
auto error_detail_result = Result<int>(error_detail_status);
// Optionals
util::optional<int> int_optional{42};
util::optional<int> null_int_optional{};
// Variants
using VariantType = util::Variant<int, bool, std::string>;
VariantType int_variant{42};
VariantType bool_variant{false};
VariantType string_variant{std::string("hello")};
// String views
util::string_view string_view_empty{};
util::string_view string_view_abc{"abc"};
std::string special_chars = std::string("foo\"bar") + '\x00' + "\r\n\t\x1f";
util::string_view string_view_special_chars(special_chars);
std::string very_long = "abc" + std::string(5000, 'K') + "xyz";
util::string_view string_view_very_long(very_long);
// Buffers
Buffer buffer_null{nullptr, 0};
Buffer buffer_abc{string_view_abc};
Buffer buffer_special_chars{string_view_special_chars};
char mutable_array[3] = {'a', 'b', 'c'};
MutableBuffer buffer_mutable{reinterpret_cast<uint8_t*>(mutable_array), 3};
auto heap_buffer = std::make_shared<Buffer>(string_view_abc);
auto heap_buffer_mutable = *AllocateBuffer(buffer_abc.size());
memcpy(heap_buffer_mutable->mutable_data(), buffer_abc.data(), buffer_abc.size());
// KeyValueMetadata
auto empty_metadata = key_value_metadata({}, {});
auto metadata = key_value_metadata(
{"key_text", "key_binary"}, {"some value", std::string("z") + '\x00' + "\x1f\xff"});
// Decimals
Decimal128 decimal128_zero{};
Decimal128 decimal128_pos{"98765432109876543210987654321098765432"};
Decimal128 decimal128_neg{"-98765432109876543210987654321098765432"};
BasicDecimal128 basic_decimal128_zero{};
BasicDecimal128 basic_decimal128_pos{decimal128_pos.native_endian_array()};
BasicDecimal128 basic_decimal128_neg{decimal128_neg.native_endian_array()};
Decimal256 decimal256_zero{};
Decimal256 decimal256_pos{
"9876543210987654321098765432109876543210987654321098765432109876543210987654"};
Decimal256 decimal256_neg{
"-9876543210987654321098765432109876543210987654321098765432109876543210987654"};
BasicDecimal256 basic_decimal256_zero{};
BasicDecimal256 basic_decimal256_pos{decimal256_pos.native_endian_array()};
BasicDecimal256 basic_decimal256_neg{decimal256_neg.native_endian_array()};
// Data types
NullType null_type;
auto heap_null_type = null();
BooleanType bool_type;
auto heap_bool_type = boolean();
Date32Type date32_type;
Date64Type date64_type;
Time32Type time_type_s(TimeUnit::SECOND);
Time32Type time_type_ms(TimeUnit::MILLI);
Time64Type time_type_us(TimeUnit::MICRO);
Time64Type time_type_ns(TimeUnit::NANO);
auto heap_time_type_ns = time64(TimeUnit::NANO);
TimestampType timestamp_type_s(TimeUnit::SECOND);
TimestampType timestamp_type_ms_timezone(TimeUnit::MILLI, "Europe/Paris");
TimestampType timestamp_type_us(TimeUnit::MICRO);
TimestampType timestamp_type_ns_timezone(TimeUnit::NANO, "Europe/Paris");
auto heap_timestamp_type_ns_timezone = timestamp(TimeUnit::NANO, "Europe/Paris");
DayTimeIntervalType day_time_interval_type;
MonthIntervalType month_interval_type;
MonthDayNanoIntervalType month_day_nano_interval_type;
DurationType duration_type_s(TimeUnit::SECOND);
DurationType duration_type_ns(TimeUnit::NANO);
BinaryType binary_type;
StringType string_type;
LargeBinaryType large_binary_type;
LargeStringType large_string_type;
FixedSizeBinaryType fixed_size_binary_type(10);
auto heap_fixed_size_binary_type = fixed_size_binary(10);
Decimal128Type decimal128_type(16, 5);
Decimal256Type decimal256_type(42, 12);
auto heap_decimal128_type = decimal128(16, 5);
ListType list_type(uint8());
LargeListType large_list_type(large_utf8());
auto heap_list_type = list(uint8());
auto heap_large_list_type = large_list(large_utf8());
FixedSizeListType fixed_size_list_type(float64(), 3);
auto heap_fixed_size_list_type = fixed_size_list(float64(), 3);
DictionaryType dict_type_unordered(int16(), utf8());
DictionaryType dict_type_ordered(int16(), utf8(), /*ordered=*/true);
auto heap_dict_type = dictionary(int16(), utf8());
MapType map_type_unsorted(utf8(), binary());
MapType map_type_sorted(utf8(), binary(), /*keys_sorted=*/true);
auto heap_map_type = map(utf8(), binary());
StructType struct_type_empty({});
StructType struct_type(
{field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
auto heap_struct_type =
struct_({field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
std::vector<int8_t> union_type_codes({7, 42});
FieldVector union_fields(
{field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
SparseUnionType sparse_union_type(union_fields, union_type_codes);
DenseUnionType dense_union_type(union_fields, union_type_codes);
UuidType uuid_type{};
std::shared_ptr<DataType> heap_uuid_type = std::make_shared<UuidType>();
// Schema
auto schema_empty = schema({});
auto schema_non_empty = schema({field("ints", int8()), field("strs", utf8())});
auto schema_with_metadata = schema_non_empty->WithMetadata(
key_value_metadata({"key1", "key2"}, {"value1", "value2"}));
// Fields
Field int_field("ints", int64());
Field float_field("floats", float32(), /*nullable=*/false);
auto heap_int_field = field("ints", int64());
// Scalars
NullScalar null_scalar;
auto heap_null_scalar = MakeNullScalar(null());
BooleanScalar bool_scalar_null{};
BooleanScalar bool_scalar{true};
auto heap_bool_scalar = *MakeScalar(boolean(), true);
Int8Scalar int8_scalar_null{};
UInt8Scalar uint8_scalar_null{};
Int64Scalar int64_scalar_null{};
UInt64Scalar uint64_scalar_null{};
Int8Scalar int8_scalar{-42};
UInt8Scalar uint8_scalar{234};
Int64Scalar int64_scalar{-9223372036854775807LL - 1};
UInt64Scalar uint64_scalar{18446744073709551615ULL};
HalfFloatScalar half_float_scalar{48640}; // -1.5
FloatScalar float_scalar{1.25f};
DoubleScalar double_scalar{2.5};
Time32Scalar time_scalar_s{100, TimeUnit::SECOND};
Time32Scalar time_scalar_ms{1000, TimeUnit::MILLI};
Time64Scalar time_scalar_us{10000, TimeUnit::MICRO};
Time64Scalar time_scalar_ns{100000, TimeUnit::NANO};
Time64Scalar time_scalar_null{time64(TimeUnit::NANO)};
DurationScalar duration_scalar_s{-100, TimeUnit::SECOND};
DurationScalar duration_scalar_ms{-1000, TimeUnit::MILLI};
DurationScalar duration_scalar_us{-10000, TimeUnit::MICRO};
DurationScalar duration_scalar_ns{-100000, TimeUnit::NANO};
DurationScalar duration_scalar_null{duration(TimeUnit::NANO)};
TimestampScalar timestamp_scalar_s{12345, timestamp(TimeUnit::SECOND)};
TimestampScalar timestamp_scalar_ms{-123456, timestamp(TimeUnit::MILLI)};
TimestampScalar timestamp_scalar_us{1234567, timestamp(TimeUnit::MICRO)};
TimestampScalar timestamp_scalar_ns{-12345678, timestamp(TimeUnit::NANO)};
TimestampScalar timestamp_scalar_null{timestamp(TimeUnit::NANO)};
TimestampScalar timestamp_scalar_s_tz{12345,
timestamp(TimeUnit::SECOND, "Europe/Paris")};
TimestampScalar timestamp_scalar_ms_tz{-123456,
timestamp(TimeUnit::MILLI, "Europe/Paris")};
TimestampScalar timestamp_scalar_us_tz{1234567,
timestamp(TimeUnit::MICRO, "Europe/Paris")};
TimestampScalar timestamp_scalar_ns_tz{-12345678,
timestamp(TimeUnit::NANO, "Europe/Paris")};
TimestampScalar timestamp_scalar_null_tz{timestamp(TimeUnit::NANO, "Europe/Paris")};
MonthIntervalScalar month_interval_scalar{23};
MonthIntervalScalar month_interval_scalar_null{};
DayTimeIntervalScalar day_time_interval_scalar{{23, -456}};
DayTimeIntervalScalar day_time_interval_scalar_null{};
MonthDayNanoIntervalScalar month_day_nano_interval_scalar{{1, 23, -456}};
MonthDayNanoIntervalScalar month_day_nano_interval_scalar_null{};
Date32Scalar date32_scalar{23};
Date32Scalar date32_scalar_null{};
Date64Scalar date64_scalar{45 * 86400000LL};
Date64Scalar date64_scalar_null{};
Decimal128Scalar decimal128_scalar_pos_scale_pos{Decimal128("1234567"),
decimal128(10, 4)};
Decimal128Scalar decimal128_scalar_pos_scale_neg{Decimal128("-1234567"),
decimal128(10, 4)};
Decimal128Scalar decimal128_scalar_neg_scale_pos{Decimal128("1234567"),
decimal128(10, -4)};
Decimal128Scalar decimal128_scalar_neg_scale_neg{Decimal128("-1234567"),
decimal128(10, -4)};
Decimal128Scalar decimal128_scalar_null{decimal128(10, 4)};
auto heap_decimal128_scalar = *MakeScalar(decimal128(10, 4), Decimal128("1234567"));
Decimal256Scalar decimal256_scalar_pos_scale_pos{
Decimal256("1234567890123456789012345678901234567890123456"), decimal256(50, 4)};
Decimal256Scalar decimal256_scalar_pos_scale_neg{
Decimal256("-1234567890123456789012345678901234567890123456"), decimal256(50, 4)};
Decimal256Scalar decimal256_scalar_neg_scale_pos{
Decimal256("1234567890123456789012345678901234567890123456"), decimal256(50, -4)};
Decimal256Scalar decimal256_scalar_neg_scale_neg{
Decimal256("-1234567890123456789012345678901234567890123456"), decimal256(50, -4)};
Decimal256Scalar decimal256_scalar_null{decimal256(50, 4)};
auto heap_decimal256_scalar = *MakeScalar(
decimal256(50, 4), Decimal256("1234567890123456789012345678901234567890123456"));
BinaryScalar binary_scalar_null{};
BinaryScalar binary_scalar_unallocated{std::shared_ptr<Buffer>{nullptr}};
BinaryScalar binary_scalar_empty{Buffer::FromString("")};
BinaryScalar binary_scalar_abc{Buffer::FromString("abc")};
BinaryScalar binary_scalar_bytes{
Buffer::FromString(std::string() + '\x00' + "\x1f\xff")};
StringScalar string_scalar_null{};
StringScalar string_scalar_unallocated{std::shared_ptr<Buffer>{nullptr}};
StringScalar string_scalar_empty{Buffer::FromString("")};
StringScalar string_scalar_hehe{Buffer::FromString("héhé")};
StringScalar string_scalar_invalid_chars{
Buffer::FromString(std::string("abc") + '\x00' + "def\xffghi")};
LargeBinaryScalar large_binary_scalar_abc{Buffer::FromString("abc")};
LargeStringScalar large_string_scalar_hehe{Buffer::FromString("héhé")};
FixedSizeBinaryScalar fixed_size_binary_scalar{Buffer::FromString("abc"),
fixed_size_binary(3)};
FixedSizeBinaryScalar fixed_size_binary_scalar_null{fixed_size_binary(3)};
std::shared_ptr<Array> dict_array;
ARROW_CHECK_OK(ArrayFromJSON(utf8(), R"(["foo", "bar", "quux"])", &dict_array));
DictionaryScalar dict_scalar{{std::make_shared<Int8Scalar>(42), dict_array},
dictionary(int8(), utf8())};
DictionaryScalar dict_scalar_null{dictionary(int8(), utf8())};
std::shared_ptr<Array> list_value_array;
ARROW_CHECK_OK(ArrayFromJSON(int32(), R"([4, 5, 6])", &list_value_array));
ListScalar list_scalar{list_value_array};
ListScalar list_scalar_null{list(int32())};
LargeListScalar large_list_scalar{list_value_array};
LargeListScalar large_list_scalar_null{large_list(int32())};
FixedSizeListScalar fixed_size_list_scalar{list_value_array};
FixedSizeListScalar fixed_size_list_scalar_null{fixed_size_list(int32(), 3)};
auto struct_scalar_type = struct_({field("ints", int32()), field("strs", utf8())});
StructScalar struct_scalar{
ScalarVector{MakeScalar(int32_t(42)), MakeScalar("some text")}, struct_scalar_type};
StructScalar struct_scalar_null{struct_scalar_type};
auto sparse_union_scalar_type =
sparse_union(FieldVector{field("ints", int32()), field("strs", utf8())}, {7, 42});
auto dense_union_scalar_type =
dense_union(FieldVector{field("ints", int32()), field("strs", utf8())}, {7, 42});
SparseUnionScalar sparse_union_scalar{MakeScalar(int32_t(43)), 7,
sparse_union_scalar_type};
SparseUnionScalar sparse_union_scalar_null{7, sparse_union_scalar_type};
DenseUnionScalar dense_union_scalar{MakeScalar(int32_t(43)), 7,
dense_union_scalar_type};
DenseUnionScalar dense_union_scalar_null{7, dense_union_scalar_type};
auto extension_scalar_type = std::make_shared<UuidType>();
ExtensionScalar extension_scalar{
std::make_shared<FixedSizeBinaryScalar>(Buffer::FromString("0123456789abcdef"),
extension_scalar_type->storage_type()),
extension_scalar_type};
ExtensionScalar extension_scalar_null{extension_scalar_type};
std::shared_ptr<Scalar> heap_map_scalar;
ARROW_CHECK_OK(
ScalarFromJSON(map(utf8(), int32()), R"([["a", 5], ["b", 6]])", &heap_map_scalar));
auto heap_map_scalar_null = MakeNullScalar(heap_map_scalar->type);
// Array and ArrayData
auto heap_null_array = SliceArrayFromJSON(null(), "[null, null]");
auto heap_int32_array = SliceArrayFromJSON(int32(), "[-5, 6, null, 42]");
ArrayData int32_array_data{*heap_int32_array->data()};
Int32Array int32_array{heap_int32_array->data()->Copy()};
auto heap_int32_array_no_nulls = SliceArrayFromJSON(int32(), "[-5, 6, 3, 42]");
const char* json_int32_array = "[-1, 2, -3, 4, null, -5, 6, -7, 8, null, -9, -10]";
auto heap_int32_array_sliced_1_9 = SliceArrayFromJSON(int32(), json_int32_array, 1, 9);
auto heap_int32_array_sliced_2_6 = SliceArrayFromJSON(int32(), json_int32_array, 2, 6);
auto heap_int32_array_sliced_8_4 = SliceArrayFromJSON(int32(), json_int32_array, 8, 4);
auto heap_int32_array_sliced_empty =
SliceArrayFromJSON(int32(), json_int32_array, 6, 0);
const char* json_bool_array =
"[false, false, true, true, null, null, false, false, true, true, "
"null, null, false, false, true, true, null, null]";
auto heap_bool_array = SliceArrayFromJSON(boolean(), json_bool_array);
auto heap_bool_array_sliced_1_9 = SliceArrayFromJSON(boolean(), json_bool_array, 1, 9);
auto heap_bool_array_sliced_2_6 = SliceArrayFromJSON(boolean(), json_bool_array, 2, 6);
auto heap_bool_array_sliced_empty =
SliceArrayFromJSON(boolean(), json_bool_array, 6, 0);
auto heap_list_array = SliceArrayFromJSON(list(int64()), "[[1, 2], null, []]");
ListArray list_array{heap_list_array->data()};
const char* json_double_array = "[-1.5, null]";
auto heap_double_array = SliceArrayFromJSON(float64(), json_double_array);
const char* json_float16_array = "[0, 48640]";
auto heap_float16_array =
*SliceArrayFromJSON(uint16(), json_float16_array)->View(float16());
auto heap_date32_array =
SliceArrayFromJSON(date32(), "[0, null, 18336, -9004, -719162, -719163]");
auto heap_date64_array = SliceArrayFromJSON(
date64(), "[1584230400000, -777945600000, -62135596800000, -62135683200000, 123]");
const char* json_time_array = "[null, -123, 456]";
auto heap_time32_array_s =
SliceArrayFromJSON(time32(TimeUnit::SECOND), json_time_array);
auto heap_time32_array_ms =
SliceArrayFromJSON(time32(TimeUnit::MILLI), json_time_array);
auto heap_time64_array_us =
SliceArrayFromJSON(time64(TimeUnit::MICRO), json_time_array);
auto heap_time64_array_ns = SliceArrayFromJSON(time64(TimeUnit::NANO), json_time_array);
auto heap_month_interval_array =
SliceArrayFromJSON(month_interval(), "[123, -456, null]");
auto heap_day_time_interval_array =
SliceArrayFromJSON(day_time_interval(), "[[1, -600], null]");
auto heap_month_day_nano_interval_array =
SliceArrayFromJSON(month_day_nano_interval(), "[[1, -600, 5000], null]");
const char* json_duration_array = "[null, -1234567890123456789]";
auto heap_duration_array_s =
SliceArrayFromJSON(duration(TimeUnit::SECOND), json_duration_array);
auto heap_duration_array_ns =
SliceArrayFromJSON(duration(TimeUnit::NANO), json_duration_array);
auto heap_timestamp_array_s = SliceArrayFromJSON(
timestamp(TimeUnit::SECOND),
R"([null, "1970-01-01 00:00:00", "1900-02-28 12:34:56", "3989-07-14 00:00:00"])");
auto heap_timestamp_array_ms = SliceArrayFromJSON(
timestamp(TimeUnit::MILLI),
R"([null, "1900-02-28 12:34:56.123", "3989-07-14 00:00:00.789"])");
auto heap_timestamp_array_us = SliceArrayFromJSON(
timestamp(TimeUnit::MICRO),
R"([null, "1900-02-28 12:34:56.654321", "3989-07-14 00:00:00.456789"])");
auto heap_timestamp_array_ns = SliceArrayFromJSON(
timestamp(TimeUnit::NANO), R"([null, "1900-02-28 12:34:56.987654321"])");
auto heap_decimal128_array = SliceArrayFromJSON(
decimal128(30, 6),
R"([null, "-1234567890123456789.012345", "1234567890123456789.012345"])");
auto heap_decimal256_array = SliceArrayFromJSON(
decimal256(50, 6), R"([null, "-123456789012345678901234567890123456789.012345"])");
auto heap_decimal128_array_sliced = heap_decimal128_array->Slice(1, 1);
auto heap_fixed_size_binary_array =
SliceArrayFromJSON(fixed_size_binary(3), "[null, \"abc\", \"\\u0000\\u001f\xff\"]");
auto heap_fixed_size_binary_array_zero_width =
SliceArrayFromJSON(fixed_size_binary(0), R"([null, ""])");
auto heap_fixed_size_binary_array_sliced = heap_fixed_size_binary_array->Slice(1, 1);
const char* json_binary_array = "[null, \"abcd\", \"\\u0000\\u001f\xff\"]";
auto heap_binary_array = SliceArrayFromJSON(binary(), json_binary_array);
auto heap_large_binary_array = SliceArrayFromJSON(large_binary(), json_binary_array);
const char* json_string_array = "[null, \"héhé\", \"invalid \xff char\"]";
auto heap_string_array = SliceArrayFromJSON(utf8(), json_string_array);
auto heap_large_string_array = SliceArrayFromJSON(large_utf8(), json_string_array);
auto heap_binary_array_sliced = heap_binary_array->Slice(1, 1);
// ChunkedArray
ArrayVector array_chunks(2);
ARROW_CHECK_OK(ArrayFromJSON(int32(), "[1, 2]", &array_chunks[0]));
ARROW_CHECK_OK(ArrayFromJSON(int32(), "[3, null, 4]", &array_chunks[1]));
ChunkedArray chunked_array{array_chunks};
// RecordBatch
auto batch_schema = schema({field("ints", int32()), field("strs", utf8())});
ArrayVector batch_columns{2};
ARROW_CHECK_OK(ArrayFromJSON(int32(), "[1, 2, 3]", &batch_columns[0]));
ARROW_CHECK_OK(ArrayFromJSON(utf8(), R"(["abc", null, "def"])", &batch_columns[1]));
auto batch = RecordBatch::Make(batch_schema, /*num_rows=*/3, batch_columns);
auto batch_with_metadata = batch->ReplaceSchemaMetadata(
key_value_metadata({"key1", "key2", "key3"}, {"value1", "value2", "value3"}));
// Table
ChunkedArrayVector table_columns{2};
ARROW_CHECK_OK(
ChunkedArrayFromJSON(int32(), {"[1, 2, 3]", "[4, 5]"}, &table_columns[0]));
ARROW_CHECK_OK(ChunkedArrayFromJSON(
utf8(), {R"(["abc", null])", R"(["def"])", R"(["ghi", "jkl"])"},
&table_columns[1]));
auto table = Table::Make(batch_schema, table_columns);
// Datum
Datum empty_datum{};
Datum scalar_datum{MakeNullScalar(boolean())};
Datum array_datum{heap_int32_array};
Datum chunked_array_datum{chunked_array};
Datum batch_datum{batch};
Datum table_datum{table};
#ifdef __clang__
_Pragma("clang diagnostic pop");
#elif defined(__GNUC__)
_Pragma("GCC diagnostic pop");
#endif
// Hook into debugger
::arrow::internal::DebugTrap();
}
} // namespace gdb
} // namespace arrow
|
---
first_name: Peter
full_name: Peter Karman
last_name: Karman
name: pkarman
redirect_from: "/team/pkarman/"
published: true
---
|
// file : libbuild2/dist/rule.hxx -*- C++ -*-
// license : MIT; see accompanying LICENSE file
#ifndef LIBBUILD2_DIST_RULE_HXX
#define LIBBUILD2_DIST_RULE_HXX
#include <libbuild2/types.hxx>
#include <libbuild2/utility.hxx>
#include <libbuild2/rule.hxx>
#include <libbuild2/action.hxx>
#include <libbuild2/target.hxx>
namespace build2
{
namespace dist
{
// This is the default rule that simply matches all the prerequisites.
//
// A custom rule (usually the same as perform_update) may be necessary to
// establish group links (so that we see the dist variable set on a group)
// or to see through non-see-through groups (like lib{}; see the
// bin::lib_rule for an example). Note that in the latter case the rule
// should "see" all its members for the dist case.
//
class rule: public simple_rule
{
public:
rule () {}
virtual bool
match (action, target&, const string&) const override;
virtual recipe
apply (action, target&) const override;
};
}
}
#endif // LIBBUILD2_DIST_RULE_HXX
|
package com.mass.util;
import java.lang.annotation.*;
/**
* @Auther :huiqiang
* @Description :
* @Date: Create in 下午5:59 2018/5/22 2018
* @Modify:
*/
@Documented
@Inherited
@Target(ElementType.FIELD)
@Retention(RetentionPolicy.RUNTIME)
public @interface NotNull {
String message() default "有字段";
}
|
// constants
const headers = () => {
return {'Content-Type': 'application/json',
Accepts: 'application/json',
Authorization: localStorage.getItem('token') }
};
const signup_headers = {
'Content-Type': 'application/json',
Accepts: 'application/json'
};
const URL_ROOT = 'http://localhost:3001';
const API_ROOT = `${URL_ROOT}/api/v1`;
// auth fetches
const signup = (signupBody) => {
return fetch(`${URL_ROOT}/signup`, {
method: 'POST',
headers: signup_headers,
body: JSON.stringify({user: signupBody})
}).then(res => res.json());
};
const login = (email, password) => {
return fetch(`${URL_ROOT}/login`, {
method: 'POST',
headers: headers(),
body: JSON.stringify({ email, password })
}).then(res => res.json());
};
const getLoggedInUser = () => {
return fetch(`${URL_ROOT}/current_user`, {
headers: headers()
}).then(res => res.json())
};
// users
const getUsers = () => {
return fetch(`${API_ROOT}/users`, {
headers: headers()
}).then(res => res.json());
};
const getUser = (uid) => {
return fetch(`${API_ROOT}/users/${uid}`, {
headers: headers()
}).then(res => res.json());
};
const patchUser = (uid, patchUserBody) => {
return fetch(`${API_ROOT}/users/${uid}`, {
method: 'PATCH',
headers: headers(),
body: JSON.stringify({ user: patchUserBody })
}).then(res => res.json());
};
// follows
const postFollow = (follower_id, followed_id) => {
return fetch(`${API_ROOT}/connections`, {
method: 'POST',
headers: headers(),
body: JSON.stringify({ follower_id, followed_id })
}).then(res => res.json());
};
const postUnfollow = (follower_id, followed_id) => {
return fetch(`${API_ROOT}/unfollow`, {
method: 'POST',
headers: headers(),
body: JSON.stringify({ follower_id, followed_id })
}).then(res => res.json());
};
const getFollowing = (uid) => {
return fetch(`${API_ROOT}/users/${uid}/following`, {
headers: headers()
}).then(res => res.json());
};
const getFollowers = (uid) => {
return fetch(`${API_ROOT}/users/${uid}/followers`, {
headers: headers()
}).then(res => res.json());
};
// flaps
const getFlaps = () => {
return fetch(`${API_ROOT}/flaps`, {
headers: headers()
}).then(res => res.json());
};
const getFlap = (fid) => {
return fetch(`${API_ROOT}/flaps/${fid}`, {
headers: headers()
}).then(res => res.json());
};
const postFlap = (content, user_id, parent) => {
return fetch(`${API_ROOT}/flaps`, {
method: 'POST',
headers: headers(),
body: JSON.stringify({ content, user_id, parent })
}).then(res => res.json());
};
const patchFlap = (fid) => {
return fetch(`${API_ROOT}/flaps/${fid}`, {
method: 'PATCH',
headers: headers()
}).then(res => res.json());
};
export default {
auth: {
signup,
login,
getLoggedInUser
},
flaps: {
getFlaps,
getFlap,
postFlap,
patchFlap
},
users: {
getUsers,
getUser,
patchUser,
},
follows: {
postFollow,
postUnfollow,
getFollowing,
getFollowers
}
};
|
var _ = require('lodash'),
http = require('http');
module.exports = require('./component').extend({
buildControllers: function() {
this.initUse();
this.buildModels();
this.buildViews();
var dir = this.cwd + '/controller/';
this.controllers = this.initComponentsFromPath(dir, this.controllers, {
express: this.express,
cwd: this.cwd,
models: this.models,
views: this.views,
logger: this.logger,
getController: function(name) {
return this.controllers[name];
}.bind(this),
});
return;
},
buildModels: function() {
this.buildStores();
var dir = this.cwd + '/model/';
this.models = this.initComponentsFromPath(dir, this.models, { _stores: this.stores });
},
buildViews: function() {
var dir = this.cwd + '/view/';
this.views = this.initComponentsFromPath(dir, this.views);
},
buildStores: function() {
var dir = this.cwd + '/store/';
this.stores = this.initComponentsFromPath(dir, this.stores);
},
run: function() {
var server = http.createServer(this.express);
server.on('error', function(e) {
if (e.code === 'EADDRINUSE') this.logger.fatal('port %s already in use', this.listen.port);
}.bind(this));
server.listen(this.listen.port, this.listen.host, function(err) {
this.logger.info('listening on http://' + [this.listen.host, this.listen.port].join(':'));
}.bind(this));
},
initUse: function() {
if (this._initUse) return;
else this._initUse = true;
var middlewares = this.initComponentsWith(this.use);
_.values(middlewares).forEach(function(middleware) {
this.express.use(middleware);
}, this);
}
}, {
attributes: {
controllers: {
initializer: 'buildControllers',
default: function() {
return {};
}
},
models: {
default: function() {
return {};
}
},
views: {
default: function() {
return {};
}
},
stores: {
default: function() {
return {};
}
},
listen: {
required: true,
default: function() {
return {
host: 'localhost',
port: 3000,
}
},
},
use: {
default: function() {
return []
},
},
express: {
required: true,
default: function() {
return require('express')();
},
},
}
});
|
// Package log provides a logger. The logger currently wraps sirupsen/logrus's
// Logger but it could be easily replaced.
package log
import (
"io"
"github.com/sirupsen/logrus"
)
// Logger is used to log error, warning and info messages
type Logger interface {
Error(...interface{})
Errorf(string, ...interface{})
Warning(...interface{})
Warningf(string, ...interface{})
Info(...interface{})
Infof(string, ...interface{})
PrefixLogger(string) Logger
SetLevel(Level)
}
type logger struct {
logger *logrus.Logger
}
// New creates a new logger
func New(output io.Writer) Logger {
logrusLogger := logrus.New()
logrusLogger.Out = output
l := &logger{
logger: logrusLogger,
}
return l
}
// PrefixLogger returns a sub-logger that uses a prefix
func (l *logger) PrefixLogger(prefix string) Logger {
return newPrefixLogger(l, prefix)
}
// SetLevel sets the logging level
func (l *logger) SetLevel(level Level) {
logrusLevel := logrus.InfoLevel
switch level {
case ErrorLevel:
logrusLevel = logrus.ErrorLevel
case WarningLevel:
logrusLevel = logrus.WarnLevel
case InfoLevel:
logrusLevel = logrus.InfoLevel
}
l.logger.SetLevel(logrusLevel)
}
// Error logs an error message
func (l *logger) Error(args ...interface{}) {
l.logger.Error(args...)
}
// Errorf logs an error message with the given format
func (l *logger) Errorf(format string, args ...interface{}) {
l.logger.Errorf(format, args...)
}
// Warning logs a warning message
func (l *logger) Warning(args ...interface{}) {
l.logger.Warning(args...)
}
// Warningf logs a warning message with the given format
func (l *logger) Warningf(format string, args ...interface{}) {
l.logger.Warningf(format, args...)
}
// Info logs a info message
func (l *logger) Info(args ...interface{}) {
l.logger.Info(args...)
}
// Infof logs a info message with the given format
func (l *logger) Infof(format string, args ...interface{}) {
l.logger.Infof(format, args...)
}
|
import Icon from "@components/@core/icon";
import loadable from "@loadable/component";
import { getIcons } from "@utils/getIcons";
import React from "react";
import IconsPageLoading from "./loading";
export default function IconSetViewer({ icon }) {
const IconSet = loadable.lib(() => getIcons(icon.id));
return (
<>
<h2>Icons</h2>
<IconSet fallback={<IconsPageLoading />}>
{({ default: icons }) => (
<div className="icons">
{Object.keys(icons).map(name => (
<Icon key={name} icon={icons[name]} name={name} />
))}
</div>
)}
</IconSet>
</>
);
}
|
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'applied_gift_cards.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
_$_AppliedGiftCards _$_$_AppliedGiftCardsFromJson(Map<String, dynamic> json) {
return _$_AppliedGiftCards(
amountUsedV2:
PriceV2.fromJson(json['amountUsedV2'] as Map<String, dynamic>),
balanceV2: PriceV2.fromJson(json['balanceV2'] as Map<String, dynamic>),
id: json['id'] as String,
);
}
Map<String, dynamic> _$_$_AppliedGiftCardsToJson(
_$_AppliedGiftCards instance) =>
<String, dynamic>{
'amountUsedV2': instance.amountUsedV2,
'balanceV2': instance.balanceV2,
'id': instance.id,
};
|
# == Schema Information
#
# Table name: patient_physiologicals
#
# id :bigint(8) not null, primary key
# patient_id :bigint(8)
# other_diseases :text
# continuing_medication :text
# previous_surgeries :text
# hospitalization :text
# first_menstruation :text
# complaints :text
# gestation :text
# children :text
# abortion :text
# created_at :datetime not null
# updated_at :datetime not null
#
class Patient::Physiological < ApplicationRecord
end
|
use std::sync::mpsc::{Sender, Receiver};
use std::sync::mpsc;
use crate::lib::Solver;
use crate::lib::intcode_computer;
pub(crate) struct Day5Solver {}
impl Solver for Day5Solver {
fn solve(&self, lines: Vec<String>, part_two: bool) -> String {
let orig_program: Vec<i128> = intcode_computer::read_program(&lines[0]);
let mut program: Vec<i128> = orig_program.clone();
let (input_sender, input_receiver): (Sender<i128>, Receiver<i128>) = mpsc::channel();
let (output_sender, output_receiver): (Sender<i128>, Receiver<i128>) = mpsc::channel();
if !part_two {
input_sender.send(1).ok();
} else {
input_sender.send(5).ok();
};
intcode_computer::run_program(input_receiver, output_sender, &mut program);
return output_receiver.recv().unwrap().to_string();
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::lib::test_solver;
fn test_program(input: i128, program_raw: &[&str], expected_output: i128) {
let mut program: Vec<i128> = program_raw[0]
.split(',')
.map(|s| s.parse::<i128>().unwrap())
.collect();
let (input_sender, input_receiver): (Sender<i128>, Receiver<i128>) = mpsc::channel();
let (output_sender, output_receiver): (Sender<i128>, Receiver<i128>) = mpsc::channel();
input_sender.send(input);
intcode_computer::run_program(input_receiver, output_sender, &mut program);
let output = output_receiver.recv().unwrap();
assert_eq!(output, expected_output);
}
#[test]
fn test_run_program() {
test_program(42, &["3,0,4,0,99"], 42);
}
#[test]
fn test_part_two_examples() {
test_program(8, &["3,9,8,9,10,9,4,9,99,-1,8"], 1);
test_program(7, &["3,9,8,9,10,9,4,9,99,-1,8"], 0);
test_program(7, &["3,9,7,9,10,9,4,9,99,-1,8"], 1);
test_program(9, &["3,9,7,9,10,9,4,9,99,-1,8"], 0);
test_program(8, &["3,3,1108,-1,8,3,4,3,99"], 1);
test_program(7, &["3,3,1108,-1,8,3,4,3,99"], 0);
test_program(7, &["3,3,1107,-1,8,3,4,3,99"], 1);
test_program(9, &["3,3,1107,-1,8,3,4,3,99"], 0);
}
} |
import 'dart:math';
import 'dart:ui' as ui;
import 'package:flutter/material.dart';
import 'named_image.dart';
import 'painter_presets.dart';
class SpinwheelPainter extends CustomPainter {
/// List of menu options as strings.
final List<dynamic> _items;
final int _itemCount;
/// Boolean that determines whether spinner shoudl rotate clockwise or counter-clockwise.
final bool _clockwise;
final Animation _rotationAnimation;
/// The index of the sector that acts as the selection sector (highlighted).
final int _selectSector;
final double _orientation;
// Same functionality for below fields as in SpinnerWheel
Paint _wheelPaint;
Paint _borderPaint;
Paint _sectorDividerPaint;
Paint _centerPiecePaint;
Paint _highlightPaint;
Paint _shutterPaint;
final bool _shouldDrawDividers;
final bool _shouldDrawBorder;
final bool _shouldDrawCenterPiece;
final bool _shouldHighlight;
final bool _hideOthers;
final bool _highlightWhileRotating;
final bool _isImageList;
// Angle occupied by each sector.
final double _sectorAngle;
List<ui.Image> _loadedImages;
SpinwheelPainter(
this._itemCount,
this._isImageList,
this._items,
this._loadedImages,
this._clockwise,
this._rotationAnimation,
this._selectSector,
this._shouldDrawDividers,
this._shouldDrawBorder,
this._shouldDrawCenterPiece,
this._hideOthers,
this._shouldHighlight,
this._highlightWhileRotating,
this._wheelPaint,
this._borderPaint,
this._sectorDividerPaint,
this._centerPiecePaint,
this._highlightPaint,
this._shutterPaint,
this._orientation,
) : _sectorAngle = 2 * pi / _itemCount {
setPresets();
}
void setPresets() {
PainterPresets presets = PainterPresets();
_wheelPaint = _wheelPaint ?? presets.wheelPaintPreset;
_borderPaint = _borderPaint ?? presets.borderPaintPreset;
_sectorDividerPaint =
_sectorDividerPaint ?? presets.sectorDividerPaintPreset;
_centerPiecePaint = _centerPiecePaint ?? presets.centerPiecePaintPreset;
_highlightPaint = _highlightPaint ?? presets.highlightPaintPreset;
_shutterPaint = _shutterPaint ?? presets.shutterPaintPreset;
}
@override
void paint(Canvas canvas, Size size) {
if (_isImageList && _loadedImages.length != _items.length) return;
// Calculating common constraints, offsets and angles.
final width = size.width;
final height = size.height;
final radius = width / 2;
final circleCenter = Offset(width / 2, height / 2);
// Angles at which each consecutive sector will be drawn on the wheel.
final sectorOffsetAngles = [
for (int i = 0; i < _itemCount; i++) _sectorAngle * i
];
// Angular offset for each string of text in the provided list of options.
// 2pi radian (360°) is temporarily added for calculating textRotation of the
// last element in the list of options.
sectorOffsetAngles.add(2 * pi);
final itemRotations = [
for (int i = 0; i < _itemCount; i++)
(sectorOffsetAngles[i] + sectorOffsetAngles[i + 1]) / 2
];
sectorOffsetAngles.remove(2 * pi);
// Value used for rotation animation.
double rot = _sectorAngle;
// If counter-clockwise, reverse the direction of rotation.
if (!_clockwise) {
rot = -_sectorAngle;
}
// Function where most of the painting occurs.
paintSpinner(canvas, width, height, radius, circleCenter, rot,
sectorOffsetAngles, itemRotations);
}
paintSpinner(
Canvas canvas,
double width,
double height,
double radius,
Offset circleCenter,
double rot,
List<double> sectorOffsetAngles,
List<double> itemRotations) {
canvas.save();
// Painting the big circle/wheel.
drawWheel(canvas, radius, circleCenter);
if (_shouldDrawBorder) drawBorder(canvas, radius, circleCenter);
// This line of code animates the rotation of the wheel
// by taking in the animation (param) value and multiplying it
// with the sector angle. As it is multiplied by a fixed angle,
// the rotation is locked to only multiples of this angle.
// Rotating to an appropriate orientation.
rotateCanvas(canvas, radius, pi * 1.5);
// Custom orientation provided by user.
rotateCanvas(canvas, radius, _orientation);
// Rotation animation takes place here.
rotateCanvas(canvas, radius, rot * _rotationAnimation.value);
// Drawing components according to settings provided.
if (_isImageList && _loadedImages != null)
drawImages(
canvas, radius, circleCenter, sectorOffsetAngles, itemRotations);
else if (!_isImageList)
drawTexts(canvas, radius, circleCenter, itemRotations);
if (_shouldDrawDividers)
drawSectorDividers(canvas, radius, circleCenter, sectorOffsetAngles);
canvas.restore();
rotateCanvas(canvas, radius, pi * 1.5);
rotateCanvas(canvas, radius, _orientation);
if (_shouldHighlight)
drawSelectionSector(canvas, radius, circleCenter, sectorOffsetAngles);
if (_hideOthers)
drawShutter(canvas, radius, circleCenter, sectorOffsetAngles);
if (_shouldDrawCenterPiece) drawCenterPiece(canvas, radius, circleCenter);
}
// Function used to rotate the canvas i.e, the wheel about its center.
// As the default rotate method does not
// provide a way to specify the pivot of rotation, the canvas is translated
// to and from the pivot offset which is the center of the wheel.
void rotateCanvas(Canvas canvas, double radius, double angle) {
canvas.translate(radius, radius);
canvas.rotate(angle);
canvas.translate(-radius, -radius);
}
void drawWheel(Canvas canvas, double radius, Offset circleCenter) {
canvas.drawCircle(circleCenter, radius, _wheelPaint);
}
void drawBorder(Canvas canvas, double radius, Offset circleCenter) {
canvas.drawCircle(circleCenter, radius, _borderPaint);
}
drawImages(Canvas canvas, double radius, Offset circleCenter,
List<double> sectorOffsetAngles, List<double> itemRotations) {
for (var i = 0; i < _itemCount; i++) {
canvas.save();
// Clipper in the shape of a sector.
Path clip = Path();
clip.moveTo(circleCenter.dx, circleCenter.dy);
clip.arcTo(Rect.fromCircle(center: circleCenter, radius: radius),
sectorOffsetAngles[i], _sectorAngle, false);
clip.lineTo(circleCenter.dx, circleCenter.dy);
canvas.clipPath(clip);
// This rotation is necessary to appropriately clip the images.
rotateCanvas(canvas, radius, pi * 1.5);
rotateCanvas(canvas, radius, itemRotations[i]);
paintLoadedImage(canvas, radius, circleCenter, _loadedImages[i],
(_items[i] as NamedImage));
canvas.restore();
}
}
void paintLoadedImage(Canvas canvas, double radius, Offset circleCenter,
ui.Image image, NamedImage imgInfo) {
paintImage(
canvas: canvas,
rect: Rect.fromCenter(
center: Offset(radius * imgInfo.offsetX, radius * imgInfo.offsetY),
width: radius * 2,
height: radius * 2),
image: image,
colorFilter: imgInfo.filter,
fit: BoxFit.scaleDown,
scale: 0.4,
);
}
void drawTexts(Canvas canvas, double radius, Offset circleCenter,
List<double> itemRotations) {
for (var i = 0; i < _itemCount; i++) {
rotateCanvas(canvas, radius, itemRotations[i]);
paintText(
canvas,
radius,
Offset(circleCenter.dx + (radius * 2) / 10,
circleCenter.dy - (radius * 2) / 15),
_items[i],
);
rotateCanvas(canvas, radius, -itemRotations[i]);
}
}
void paintText(
Canvas canvas,
double radius,
Offset offset,
String text,
) {
// Painter that paints the text on to the canvas.
var textPainter = TextPainter(
maxLines: 1,
text: TextSpan(
text: text,
style: TextStyle(
fontSize: radius / 5,
color: Colors.black,
fontWeight: FontWeight.bold)),
textDirection: TextDirection.ltr,
textAlign: TextAlign.center,
textWidthBasis: TextWidthBasis.longestLine,
);
textPainter.layout(minWidth: 0, maxWidth: radius / 1.25);
textPainter.paint(canvas, offset);
}
void drawSectorDividers(Canvas canvas, double radius, Offset circleCenter,
List<double> sectorOffsetAngles) {
for (var i = 0; i < _itemCount; i++) {
rotateCanvas(canvas, radius, sectorOffsetAngles[i]);
canvas.drawLine(
circleCenter, Offset(radius * 2, radius), _sectorDividerPaint);
rotateCanvas(canvas, radius, -sectorOffsetAngles[i]);
}
}
void drawSelectionSector(
Canvas canvas,
double radius,
Offset circleCenter,
List<double> sectorOffsetAngles,
) {
if (_highlightWhileRotating ||
_rotationAnimation.status != AnimationStatus.forward) {
canvas.drawArc(
Rect.fromCircle(center: circleCenter, radius: radius),
sectorOffsetAngles[_selectSector],
_sectorAngle,
true,
_highlightPaint);
}
}
void drawShutter(Canvas canvas, double radius, Offset circleCenter,
List<double> sectorOffsetAngles) {
double shutterStartAngle;
double sweepAngle = _sectorAngle * (_itemCount - 1);
if (_selectSector + 1 < _itemCount)
shutterStartAngle = _sectorAngle * (_selectSector + 1);
else
shutterStartAngle = 0;
canvas.drawArc(
Rect.fromCircle(center: circleCenter, radius: radius),
shutterStartAngle,
sweepAngle,
true,
_shutterPaint,
);
}
void drawCenterPiece(Canvas canvas, double radius, Offset circleCenter) {
canvas.drawCircle(circleCenter, radius * 0.1, _centerPiecePaint);
}
@override
bool shouldRepaint(SpinwheelPainter oldDelegate) {
return true;
}
}
|
=head1 LICENSE
Copyright [1999-2014] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
# Ensembl module for Bio::EnsEMBL::Variation::Genotype
#
#
=head1 NAME
Bio::EnsEMBL::Variation::Genotype - Abstract base class representing a genotype
=head1 SYNOPSIS
print $genotype->variation()->name(), "\n";
print $genotype->allele1(), '/', $genotype->allele2(), "\n";
=head1 DESCRIPTION
This is an abstract base class representing a genotype. Specific types of
genotype are represented by subclasses such as IndividualGenotype and
PopulationGenotype.
=head1 METHODS
=cut
use strict;
use warnings;
package Bio::EnsEMBL::Variation::Genotype;
use Bio::EnsEMBL::Storable;
use Bio::EnsEMBL::Utils::Scalar qw(assert_ref);
use Bio::EnsEMBL::Variation::Utils::Sequence qw(strain_ambiguity_code);
use vars qw(@ISA $AUTOLOAD);
@ISA = qw(Bio::EnsEMBL::Storable);
sub new_fast {
my $class = shift;
my $hashref = shift;
return bless $hashref, $class;
}
=head2 allele
Args : int $index
string $new_allele (optional)
Examples : $allele1 = $genotype->allele(1);
$allele2 = $genotype->allele2();
Description: Getter/Setter for one of the alleles that compose this genotype.
Can be called as $genotype->allele(1), or via AUTOLOAD as
$genotype->allele1()
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub allele {
my $self = shift;
my $index = shift;
my $allele = shift;
$index = 1 unless defined($index) && $index >= 1;
$index--;
$self->{genotype}->[$index] = $allele if defined($allele);
return defined($self->{genotype}->[$index]) ? $self->{genotype}->[$index] : undef;
}
=head2 genotype
Examples : @alleles = @{$genotype->genotype};
Description: Getter for the genotype as an arrayref of alleles
Returntype : arrayref of strings
Exceptions : none
Caller : general
Status : Stable
=cut
sub genotype {
return $_[0]->{genotype}
}
=head2 genotype_string
Arg [1] : (optional) bool $sort
Examples : $genotype_string = $genotype->genotype_string;
Description: Gets the genotype as a '|'-separated string. Pass "1" as first
argument to alphabetically sort genotype.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub genotype_string {
my $self = shift;
my $sort = shift;
my @gt = @{$self->genotype || []};
@gt = sort @gt if defined($sort);
return join '|', @gt;
}
=head2 variation
Arg [1] : (optional) Bio::EnsEMBL::Variation::Variation $var
Example : $var = $genotype->variation();
Description: Getter/Setter for the Variation as
Returntype : Bio::EnsEMBL::Variation::Variation
Exceptions : throw on bad argument
Caller : general
Status : Stable
=cut
sub variation {
my $self = shift;
if(@_) {
my $v = shift;
if(defined($v) && (!ref($v) || !$v->isa('Bio::EnsEMBL::Variation::Variation'))) {
throw('Bio::EnsEMBL::Variation::Variation argument expected');
}
return $self->{variation} = $v;
}
if(!defined($self->{variation}) && defined($self->{_variation_id})) {
my $va = $self->adaptor->db->get_VariationAdaptor;
if(defined($va)) {
my $v = $va->fetch_by_dbID($self->{_variation_id});
if(defined($v)) {
$self->{variation} = $v;
}
}
}
return $self->{'variation'};
}
=head2 subsnp
Arg [1] : string $newval (optional)
The new value to set the subsnp attribute to
Example : print $genotype->subsnp();
Description: Getter/Setter for the subsnp attribute.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub subsnp{
my $self = shift;
if(@_) {
$self->{'subsnp'} = shift;
}
my $ssid = $self->{'subsnp'};
if(defined($ssid)) {
$ssid = 'ss'.$ssid unless $ssid =~ /^ss/;
}
return $ssid;
}
=head2 subsnp_handle
Arg [1] : string $newval (optional)
The new value to set the subsnp_handle attribute to
Example : print $genotype->subsnp_handle();
Description: Getter/Setter for the subsnp_handle attribute.
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub subsnp_handle{
my $self = shift;
my $handle = shift;
# if changing handle
if(defined($handle)) {
$self->{'subsnp_handle'} = $handle;
}
elsif (!defined($self->{'subsnp_handle'})) {
# Check that this allele has an adaptor attached
assert_ref($self->adaptor(),'Bio::EnsEMBL::Variation::DBSQL::BaseGenotypeAdaptor');
$self->{'subsnp_handle'} = $self->adaptor->get_subsnp_handle($self);
}
return $self->{'subsnp_handle'};
}
=head2 ambiguity_code
Example : print $genotype->ambiguity_code();
Description: Get the ambiguity code for this genotype
Returntype : string
Exceptions : none
Caller : general
Status : Stable
=cut
sub ambiguity_code {
return strain_ambiguity_code($_[0]->genotype_string);
}
=head2 phased
Example : $p = $genotype->phased()
Description: Getter for the phased status of this genotype.
Returntype : int
Exceptions : none
Caller : general
Status : Stable
=cut
sub phased {
return $_[0]->{phased};
}
sub AUTOLOAD {
my $self = shift;
my $method = $AUTOLOAD;
$method =~ s/.*:://;
if($method =~ /(allele)\_?(\d+)/) {
$method = $1;
unshift @_, $2;
}
else {
return;
}
return $self->$method(@_);
}
1;
|
<?php
namespace App\Engines\ParserEngine\Modules\Interface;
use App\Engines\ParserEngine;
use App\Engines\ParserEngine\Parsers\ArchiveParser;
interface XmlInterface
{
public static function parse(ArchiveParser $parser): ParserEngine;
}
|
-- 1.
-- Modify the `fibs` function to only return the first 20 Fibonacci numbers.
fibs :: [Integer]
fibs = 1 : scanl (+) 1 fibs
first20Fibs :: [Integer]
first20Fibs = take 20 fibs
-- 2.
-- Modify `fibs` to return the Fibonacci numbers that are less than 100.
lessThan100Fibs :: [Integer]
lessThan100Fibs = takeWhile (<100) fibs
-- 3.
-- Write the `factorial` function as a scan.
factorial :: [Integer]
factorial = take 20 $ scanl (*) 1 [2..]
|
#!/usr/bin/env bash
set -e
if [ -f "${SUBPROJECT}/install.sh" ]; then
(cd "${SUBPROJECT}"; ./install.sh)
else
git clone https://github.com/BNFC/bnfc.git
cd bnfc/source
sudo cabal install --global
fi |
var http = require('http');
var app = require('./app');
app.set('port' process.env.PORT||3000);
var server = http.createServer(app);
}
server.listen(process.env.PORT||3000);
|
// @doc
// https://github.com/artemii235/developer-docs/blob/mm/docs/basic-docs/atomic-swap-dex/dex-api.md#cancel_order
interface CancelOrderType {
uuid: string
};
export default function cancelOrderFactory() {
return {
cancelOrder(params: CancelOrderType) {
const serverparams = Object.assign({}, params, {
method: 'cancel_order'
});
return this.privateCall(serverparams);
}
};
}
|
#!/bin/sh
set -e
mkdir -p /radium_data/logs
/radium/radium --data-dir /radium_data -b 0.0.0.0:8080 2>/radium_data/logs/stderr.log |
# Have an athletic mindset.
The last play is over.
You can't do anything about it.
Feeling bad about it or replaying it in your head over and over does not help anyone.
Feel bad about it for 60 seconds if you must.
Then, focus on what you can do now.
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Concurrent;
using FluentAssertions;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Its.Domain.Tests;
using Microsoft.Its.Recipes;
using NCrunch.Framework;
using NUnit.Framework;
using Test.Domain.Ordering;
using static Microsoft.Its.Domain.Tests.CurrentConfiguration;
using static Microsoft.Its.Domain.Sql.Tests.TestDatabases;
using static Microsoft.Its.Domain.Tests.NonEventSourcedCommandTarget;
namespace Microsoft.Its.Domain.Sql.Tests
{
[NUnit.Framework.Category("Command scheduling")]
[TestFixture]
[UseSqlStorageForScheduledCommands]
[UseSqlEventStore]
[UseInMemoryCommandTargetStore]
[DisableCommandAuthorization]
[ExclusivelyUses("ItsCqrsTestsEventStore", "ItsCqrsTestsReadModels", "ItsCqrsTestsCommandScheduler")]
public class SqlCommandSchedulerClockTests : SchedulerClockTests
{
protected static CommandScheduler.Clock CreateClock(string named, DateTimeOffset startTime) =>
(CommandScheduler.Clock) Configuration
.Current
.SchedulerClockRepository()
.CreateClock(named, startTime);
protected int GetScheduledCommandNumberOfAttempts(Guid aggregateId)
{
using (var db = Configuration.Current.CommandSchedulerDbContext())
{
return db.ScheduledCommands
.SingleOrDefault(c => c.AggregateId == aggregateId)
.IfNotNull()
.Then(c => c.Attempts)
.ElseDefault();
}
}
private void TriggerConcurrencyExceptionOnOrderCommands(Guid orderId)
{
Func<EventStoreDbContext> eventStoreContext = () =>
{
// quick, add a new event in order to trigger a concurrency exception at the moment the scheduler tries to apply the command
var repository = new SqlEventSourcedRepository<Order>();
var o = repository.GetLatest(orderId).Result;
o.Apply(new Annotate<Order>("triggering a concurrency exception", Any.Guid().ToString()));
repository.Save(o).Wait();
return EventStoreDbContext();
};
var orderRepository = new SqlEventSourcedRepository<Order>(createEventStoreDbContext: eventStoreContext);
Configuration.Current.UseDependency<IEventSourcedRepository<Order>>(_ => orderRepository);
}
private void StopTriggeringConcurrencyExceptions() =>
Configuration.Current.UseDependency<IEventSourcedRepository<Order>>(_ => new SqlEventSourcedRepository<Order>());
[Test]
public override void A_clock_cannot_be_moved_to_a_prior_time()
{
// arrange
var name = Any.AlphanumericString(8, 8);
CreateClock(name, DateTimeOffset.UtcNow);
// act
Action moveBackwards = () => AdvanceClock(DateTimeOffset.UtcNow.Subtract(TimeSpan.FromSeconds(5)), name).Wait();
// assert
moveBackwards.ShouldThrow<InvalidOperationException>()
.And
.Message
.Should()
.Contain("A clock cannot be moved backward");
}
[Test]
public override void Two_clocks_cannot_be_created_having_the_same_name()
{
var name = Any.CamelCaseName();
CreateClock(name, DateTimeOffset.UtcNow);
Action createAgain = () =>
CreateClock(name, DateTimeOffset.UtcNow.AddDays(1));
createAgain.ShouldThrow<ConcurrencyException>()
.And
.Message
.Should()
.Contain($"A clock named '{name}' already exists");
}
[Test]
public async Task When_a_clock_is_advanced_then_unassociated_commands_are_not_triggered()
{
// arrange
var clockOne = CreateClock(Any.CamelCaseName(), Clock.Now());
var clockTwo = CreateClock(Any.CamelCaseName(), Clock.Now());
var deliveryAttempts = new ConcurrentBag<IScheduledCommand>();
Configuration.Current.TraceScheduledCommands(onDelivering: command => { deliveryAttempts.Add(command); });
await Schedule(
new CreateCommandTarget(Any.CamelCaseName()),
Clock.Now().AddDays(1),
clock: clockOne);
await Schedule(
new CreateCommandTarget(Any.CamelCaseName()),
Clock.Now().AddDays(1),
clock: clockTwo);
// act
await AdvanceClock(TimeSpan.FromDays(2), clockOne.Name);
//assert
deliveryAttempts
.Should().HaveCount(1)
.And
.OnlyContain(c => ((CommandScheduler.Clock) c.Clock).Name == clockOne.Name);
}
[Test]
public override async Task When_a_scheduler_clock_is_advanced_then_the_domain_clock_is_coordinated_to_the_scheduler_clock_for_events_written_as_a_result()
{
// arrange
var targetId = Any.CamelCaseName();
var scheduledCreationTime = Clock.Now().AddDays(1);
await Schedule(new CreateCommandTarget(targetId), scheduledCreationTime);
// act
await AdvanceClock(by: TimeSpan.FromDays(7));
//assert
var target = await Get<NonEventSourcedCommandTarget>(targetId);
target.Should().NotBeNull();
target.CreatedTime.Should().Be(scheduledCreationTime);
}
[Test]
public async Task When_a_scheduled_command_fails_and_the_clock_is_advanced_again_then_it_can_be_retried()
{
// arrange
var target = new NonEventSourcedCommandTarget { IsValid = false };
await Save(target);
await Schedule(
target.Id,
new TestCommand(),
Clock.Now().AddDays(10));
// act
await AdvanceClock(TimeSpan.FromDays(10.1));
target.CommandsFailed
.Should()
.HaveCount(1);
target
.CommandsEnacted
.Should()
.HaveCount(0);
target.IsValid = true;
await AdvanceClock(TimeSpan.FromHours(1));
target
.CommandsFailed
.Should()
.HaveCount(1);
target
.CommandsEnacted
.Should()
.HaveCount(1);
}
[Test]
public async Task When_a_scheduled_command_fails_due_to_a_concurrency_exception_then_it_is_retried_by_default()
{
var order = CommandSchedulingTests_EventSourced.CreateOrder();
await Save(order);
TriggerConcurrencyExceptionOnOrderCommands(order.Id);
await Schedule(order.Id, new Cancel());
for (var i = 1; i < 6; i++)
{
Console.WriteLine("Advancing clock");
GetScheduledCommandNumberOfAttempts(order.Id)
.Should()
.Be(i);
await AdvanceClock(by: TimeSpan.FromDays(20));
}
}
[Test]
public async Task When_a_scheduled_command_fails_due_to_a_concurrency_exception_then_commands_that_its_handler_scheduled_are_not_duplicated()
{
var order = CommandSchedulingTests_EventSourced.CreateOrder();
await Save(new CustomerAccount(order.CustomerId).Apply(new ChangeEmailAddress(Any.Email())));
await Save(order);
TriggerConcurrencyExceptionOnOrderCommands(order.Id);
await Schedule(order.Id, new Cancel());
for (var i = 1; i < 3; i++)
{
await AdvanceClock(by: TimeSpan.FromDays(1));
}
StopTriggeringConcurrencyExceptions();
await AdvanceClock(by: TimeSpan.FromDays(1));
await SchedulerWorkComplete();
var customer = await Get<CustomerAccount>(order.CustomerId);
customer.Events()
.OfType<CustomerAccount.OrderCancelationConfirmationEmailSent>()
.Count()
.Should()
.Be(1);
}
[Test]
public async Task When_a_scheduled_command_fails_due_to_a_concurrency_exception_then_it_is_not_marked_as_applied()
{
// arrange
var order = CommandSchedulingTests_EventSourced.CreateOrder();
var ship = new Ship();
order.Apply(ship);
order.Apply(new ChargeCreditCardOn
{
Amount = 10,
ChargeDate = Clock.Now().AddDays(10)
});
await Save(order);
TriggerConcurrencyExceptionOnOrderCommands(order.Id);
// act
await AdvanceClock(by: TimeSpan.FromDays(20));
// assert
using (var db = CommandSchedulerDbContext())
{
// make sure we actually triggered a concurrency exception
db.Errors
.Where(e => e.ScheduledCommand.AggregateId == order.Id)
.ToArray()
.Should()
.Contain(e => e.Error.Contains("ConcurrencyException"));
var scheduledCommand = db.ScheduledCommands.Single(c => c.AggregateId == order.Id);
scheduledCommand.AppliedTime.Should().NotHaveValue();
scheduledCommand.Attempts.Should().Be(1);
}
}
[Test]
public async Task When_two_different_callers_advance_the_same_clock_at_the_same_time_then_commands_are_only_run_once()
{
// arrange
var order = CommandSchedulingTests_EventSourced.CreateOrder();
var barrier = new Barrier(2);
// act
order.Apply(new ShipOn(Clock.Now().AddDays(5)));
await Save(order);
var eventCount = order.Events().Count();
var orderScheduler = Configuration.Current
.CommandDeliverer<Order>()
.InterceptDeliver(async (c, next) =>
{
barrier.SignalAndWait(TimeSpan.FromSeconds(10));
await next(c);
});
Configuration.Current.UseDependency(_ => orderScheduler);
var caller1 = Task.Run(() => AdvanceClock(TimeSpan.FromDays(10)));
var caller2 = Task.Run(() => AdvanceClock(TimeSpan.FromDays(10)));
Task.WaitAll(caller1, caller2);
(await Get<Order>(order.Id))
.Events()
.Count()
.Should()
.Be(eventCount + 1, "the scheduled command should only be applied once");
}
[Test]
public async Task When_a_clock_is_advanced_then_resulting_SuccessfulCommands_are_included_in_the_result()
{
// arrange
var shipmentId = Any.AlphanumericString(8, 8);
var customerAccountId = Any.Guid();
await Save(new CustomerAccount(customerAccountId)
.Apply(new ChangeEmailAddress(Any.Email())));
var order = CommandSchedulingTests_EventSourced.CreateOrder(customerAccountId: customerAccountId);
order.Apply(new ShipOn(shipDate: Clock.Now().AddMonths(1).Date)
{
ShipmentId = shipmentId
});
await Save(order);
// act
var result = await AdvanceClock(to: Clock.Now().AddMonths(2));
//assert
result.SuccessfulCommands
.Should()
.ContainSingle(_ => _.ScheduledCommand
.IfTypeIs<IScheduledCommand<Order>>()
.Then(c => c.TargetId == order.Id.ToString())
.ElseDefault());
}
[Test]
public async Task When_a_command_schedules_another_command_on_a_specific_clock_the_new_command_is_on_the_same_clock()
{
// arrange
var targetId = Any.Guid();
var nextCommandId = Any.CamelCaseName();
var theFirst = DateTimeOffset.Parse("2012-01-01");
var theThird = theFirst.AddDays(2);
var theFourth = theThird.AddDays(1);
var customClock = CreateClock(Any.CamelCaseName(), theFirst);
var delivered = new ConcurrentBag<IScheduledCommand>();
Configuration.Current
.TraceScheduledCommands(
onDelivered: command =>
{
delivered.Add(command);
});
var target = new CommandSchedulerTestAggregate(targetId);
await Save(target);
await Schedule(
targetId,
new CommandSchedulerTestAggregate.CommandThatSchedulesAnotherCommand
{
NextCommand = new CommandSchedulerTestAggregate.Command
{
CommandId = nextCommandId
},
NextCommandDueTime = theFourth
},
theThird,
clock: customClock);
// act
await AdvanceClock(theThird, customClock.Name);
await AdvanceClock(theFourth, customClock.Name);
//assert
delivered.Should().HaveCount(2);
delivered
.OfType<ScheduledCommand<CommandSchedulerTestAggregate>>()
.Select(_ => _.Command)
.OfType<CommandSchedulerTestAggregate.Command>()
.Should()
.Contain(c => c.CommandId == nextCommandId);
}
}
} |
import ImperativeBase from './ImperativeBase'
// fallback to experimental CSS transform if browser doesn't have it (fix for Safari 9)
if (typeof document.createElement('div').style.transform == 'undefined') {
if (typeof CSSStyleDeclaration !== 'undefined') {
// doesn't exist in Jest+@skatejs/ssr environment
Object.defineProperty(CSSStyleDeclaration.prototype, 'transform', {
set(value) {
this.webkitTransform = value
},
get() {
return this.webkitTransform
},
enumerable: true,
})
}
}
/**
* Manages a DOM element. Exposes a set of recommended APIs for working with
* DOM efficiently. Currently doesn't do much yet...
*/
export default class ElementOperations {
constructor(private __element: ImperativeBase) {}
connectChildElement(child: ImperativeBase) {
if (
// When using the imperative API, this statement is
// true, so the DOM elements need to be connected.
!child.parentNode ||
// This condition is irrelevant when strictly using the
// imperative API. However, it is possible that when
// using the HTML API that the HTML-API node can be placed
// somewhere that isn't another HTML-API node, and the
// imperative Node can be gotten and used to add the
// node to another imperative Node. In this case, the
// HTML-API node will be added to the proper HTMLparent.
(child.parentElement && child.parentElement !== this.__element)
// When an HTML-API node is already child of the
// relevant parent, or it is child of a shadow root of
// the relevant parent, there there's nothing to do,
// everything is already as expected, so the following
// conditional body is skipped.
) {
this.__add(child)
}
}
disconnectChildElement(child: ImperativeBase) {
// If DeclarativeBase#remove was called first, we don't need to
// call this again.
if (!child.parentNode) return
this.__remove(child)
}
private __shouldRender = false
applyImperativeNodeProperties() {
if (!this.__shouldRender) return
this.__applyOpacity()
this.__applySize()
}
set shouldRender(shouldRender: boolean) {
this.__shouldRender = shouldRender
requestAnimationFrame(() => {
this.__applyStyle('display', shouldRender ? 'block' : 'none')
})
}
get shouldRender(): boolean {
return this.__shouldRender
}
private __add(child: ImperativeBase) {
this.__element.appendChild(child)
}
private __remove(child: ImperativeBase) {
// This conditional check is needed incase the element was already
// removed from the HTML-API side.
if (child.parentNode === this.__element) this.__element.removeChild(child)
}
private __applySize() {
const {x, y} = this.__element.calculatedSize
this.__applyStyle('width', `${x}px`)
this.__applyStyle('height', `${y}px`)
// NOTE: we ignore the Z axis on elements, since they are flat.
}
private __applyOpacity() {
this.__applyStyle('opacity', this.__element.opacity)
}
/**
* Apply a style property to the element.
*
* @param {string} property The CSS property we will a apply.
* @param {string} value The value the CSS property wil have.
*/
private __applyStyle(property: string, value: string) {
this.__element.style.setProperty(property, value)
}
}
export {ElementOperations}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.