text
stringlengths 27
775k
|
---|
import { noop } from "lodash";
import { IGraphBehavior } from "./Graph.types";
import { Ref, Selection, Zoom } from "./Graph.types.internal";
export class GraphBehavior implements IGraphBehavior {
public zoomBy: (k: number) => void;
public resetZoom: () => void;
constructor() {
this.zoomBy = noop;
this.resetZoom = noop;
}
public setupZoomBehavior(
zoomSelection: Selection,
zoomRef: Ref<Zoom | undefined>
): void {
this.zoomBy = (k: number) => {
zoomRef.current?.scaleBy(zoomSelection, k);
};
this.resetZoom = () => {
zoomRef.current?.scaleTo(zoomSelection, 1);
zoomRef.current?.translateTo(zoomSelection, 0, 0, [0, 0]);
};
}
}
|
<?php
namespace CaptainHook\App\Integration;
use CaptainHook\App\Plugin\Hook\PreserveWorkingTree;
class PreserveWorkingTreePluginTest extends IntegrationTestCase
{
public function testMoveWorkingTreeChangesBeforeHook(): void
{
$repoPath = $this->setUpRepository();
$this->setConfig($repoPath, 'plugins', [
['plugin' => '\\CaptainHook\\App\\Plugin\\Hook\\PreserveWorkingTree'],
['plugin' => '\\CaptainHook\\App\\Integration\\Plugin\\PostCheckoutEnvCheck'],
]);
$this->enableHook($repoPath, 'pre-commit', [
['action' => 'echo "this is an action"'],
['action' => 'git status --porcelain=v1'],
]);
$this->enableHook($repoPath, 'post-checkout');
// Commit our changes to captainhook.json.
$this->mustRunInShell(['git', 'commit', '-m', 'Update captainhook.json', 'captainhook.json'], $repoPath);
// Create a file and stage it.
$this->filesystem()->touch($repoPath . '/foo.txt');
$this->mustRunInShell(['git', 'add', 'foo.txt'], $repoPath);
// Make changes to the working tree that aren't staged.
$this->filesystem()->appendToFile(
$repoPath . '/README.md',
"\nWorking tree changes that aren't staged.\n"
);
// Look at `git status` to see the changes.
$statusResult = $this->runInShell(['git', 'status', '--porcelain=v1'], $repoPath);
$this->assertStringContainsString(' M README.md', $statusResult->getStdout());
$this->assertStringContainsString('A foo.txt', $statusResult->getStdout());
// Ensure the skip post-checkout environment variable is not set before committing.
$envResult = $this->runInShell(['env'], $repoPath);
$this->assertStringNotContainsString(PreserveWorkingTree::SKIP_POST_CHECKOUT_VAR, $envResult->getStdout());
// Commit the file that's staged in the index.
$commitResult = $this->runInShell(['git', 'commit', '-m', 'Add foo.txt'], $repoPath);
// Output from actions appears in STDERR, so let's check it instead of STDOUT.
// One of our actions is `git status`, so we want to assert that we do
// not see the working tree changes listed, since they should have been
// cached and cleared from the working tree.
$this->assertStringContainsString('this is an action', $commitResult->getStderr());
$this->assertStringNotContainsString(' M README.md', $commitResult->getStderr());
// Since we have post-checkout enabled, and our pre-commit hook executes
// `git checkout`, we want to test our post-commit hook plugin creates a
// file with the environment variables dumped to it and that the skip
// post-checkout env var is one of them.
$this->assertStringContainsString(
PreserveWorkingTree::SKIP_POST_CHECKOUT_VAR,
file_get_contents($repoPath . '/env.txt')
);
// Look at `git status` again for the things we expect to see (or not).
$statusResult = $this->runInShell(['git', 'status', '--porcelain=v1'], $repoPath);
$this->assertStringContainsString(' M README.md', $statusResult->getStdout());
$this->assertStringNotContainsString('A foo.txt', $statusResult->getStdout());
// Ensure the skip post-checkout environment variable is not set after committing.
$envResult = $this->runInShell(['env'], $repoPath);
$this->assertStringNotContainsString(PreserveWorkingTree::SKIP_POST_CHECKOUT_VAR, $envResult->getStdout());
}
}
|
#!/bin/bash
# Get an updated config.sub and config.guess
cp -r ${BUILD_PREFIX}/share/libtool/build-aux/config.* .
if [[ "${target_platform}" == osx-* ]]; then
# turn off Werror for clang ...
sed -i.bak -E "s/-Werror/-Wno-error/" configure.ac
fi
bash ./autogen.sh
# https://github.com/json-c/json-c/issues/406
export CPPFLAGS="${CPPFLAGS/-DNDEBUG/}"
./configure --prefix=$PREFIX --host=$HOST --build=$BUILD
make ${VERBOSE_AT}
make check ${VERBOSE_AT}
make install
|
from typing import Dict, List, Union
from config_manager import base_configuration
from rama import rama_config_template
class RamaConfig(base_configuration.BaseConfiguration):
def __init__(self, config: Union[str, Dict], changes: List[Dict] = []) -> None:
super().__init__(
configuration=config,
template=rama_config_template.RamaConfigTemplate.base_template,
changes=changes,
)
self._validate_config()
def _validate_config(self) -> None:
"""Check for non-trivial associations in config.
Raises:
AssertionError: if any rules are broken by config.
"""
pass
|
// code-examples/Rounding/match-deep-pair-script.scala
class Role
case object Manager extends Role
case object Developer extends Role
case class Person(name: String, age: Int, role: Role)
val alice = new Person("Alice", 25, Developer)
val bob = new Person("Bob", 32, Manager)
val charlie = new Person("Charlie", 32, Developer)
for (item <- Map(1 -> alice, 2 -> bob, 3 -> charlie)) {
item match {
case (id, p @ Person(_, _, Manager)) => print(p + " is overpaid.\n")
case (id, p @ Person(_, _, _)) => print(p + " is underpaid.\n")
}
}
|
<?php defined('BASEPATH') OR exit('No direct script access allowed');
/**
* Settings Plugin
*
* Allows settings to be used in content tags.
*
* @author PyroCMS Dev Team
* @package PyroCMS\Core\Modules\Settings\Plugins
*/
class Plugin_Settings extends Plugin
{
/**
* Load a variable
*
* Magic method to get the setting.
*
* @param string
* @param string
* @return string
*/
function __call($name, $data)
{
return $this->settings->item($name);
}
}
/* End of file plugin.php */ |
using CoodeshPharmaIncAPI.Models;
using CoodeshPharmaIncAPI.Models.Extensions;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using System;
namespace CoodeshPharmaIncAPI.Database.Configurations
{
public class UserConfiguration : IEntityTypeConfiguration<User>
{
public void Configure(EntityTypeBuilder<User> builder)
{
builder
.Property(t => t.Gender)
.HasColumnType("nvarchar(12)");
builder
.Property(t => t.Birthday)
.HasConversion(t => (DateTime)t, t => t)
.HasColumnType("date");
builder
.Property(t => t.Registered)
.HasConversion(t => (DateTime)t, t => t)
.HasColumnType("datetime");
builder
.Property(t => t.Nationality)
.HasColumnType("nvarchar(8)");
builder
.HasOne(t => t.Name)
.WithOne()
.HasForeignKey<User>("NameId")
.OnDelete(DeleteBehavior.SetNull);
builder
.HasOne(t => t.Contact)
.WithOne()
.HasForeignKey<User>("ContactId")
.OnDelete(DeleteBehavior.SetNull);
builder
.HasOne(t => t.Login)
.WithOne()
.HasForeignKey<User>("LoginId")
.OnDelete(DeleteBehavior.SetNull);
builder
.HasOne(t => t.Location)
.WithOne()
.HasForeignKey<User>("LocationId")
.OnDelete(DeleteBehavior.SetNull);
builder
.HasOne(t => t.Picture)
.WithOne()
.HasForeignKey<User>("PictureId")
.OnDelete(DeleteBehavior.SetNull);
builder
.Property(t => t.Imported_T)
.HasColumnName("imported_t")
.HasDefaultValueSql("GETDATE()");
builder
.Property(t => t.Status)
.HasColumnName("status")
.HasConversion(t => t.GetName(), t => t.GetStatus());
}
}
}
|
const errors = require('./errors');
/**
* @constructor Smsglobal
*
* @param {string} key smsglobal api key
* @param {string} secret smsglobal api secret
*/
const Smsglobal = function (key, secret) {
if (!(this instanceof Smsglobal)) {
return new Smsglobal(key, secret);
}
this.key = key || process.env.SMSGLOBAL_API_KEY;
this.secret = secret || process.env.SMSGLOBAL_API_SECRET;
// TODO: Remove env variable and create client with credentials
if (key && secret) {
process.env['SMSGLOBAL_API_KEY'] = key;
process.env['SMSGLOBAL_API_SECRET'] = secret;
}
// check if credetials not supplier in constructor
if (this.key === undefined || this.secret === undefined) {
throw new Error(errors.smsglobal);
}
};
Smsglobal.prototype.sms = require('./api/Sms');
Smsglobal.prototype.otp = require('./api/Otp');
module.exports = Smsglobal;
|
module BraintreeRails
class IndividualDetailsValidator < Validator
Validations = [
[:first_name, :last_name, :email, :date_of_birth, :address, :presence => true]
]
def validate(individual)
validate_association(individual, :address)
end
end
end
|
module Main where
data Binario = Zero | Um deriving (Show, Eq)
binList :: [Binario] -> [Int]
binList xs = [if x == Um then 1 else 0 | x <- xs]
main :: IO ()
main = do
print $ binList [Zero, Um, Zero, Zero, Um]
|
<?php
/**
* Lets you include a nested group of fields inside a template.
* This control gives you more flexibility over form layout.
*
* Note: the child fields within a field group aren't rendered using FieldHolder(). Instead,
* SmallFieldHolder() is called, which just prefixes $Field with a <label> tag, if the Title is set.
*
* <b>Usage</b>
*
* <code>
* FieldGroup::create(
* FieldGroup::create(
* HeaderField::create('FieldGroup 1'),
* TextField::create('Firstname')
* ),
* FieldGroup::create(
* HeaderField::create('FieldGroup 2'),
* TextField::create('Surname')
* )
* )
* </code>
*
* <b>Adding to existing FieldGroup instances</b>
*
* <code>
* function getCMSFields() {
* $fields = parent::getCMSFields();
*
* $fields->addFieldToTab(
* 'Root.Main',
* FieldGroup::create(
* TimeField::create("StartTime","What's the start time?"),
* TimeField::create("EndTime","What's the end time?")
* ),
* 'Content'
* );
*
* return $fields;
*
* }
* </code>
*
* <b>Setting a title to a FieldGroup</b>
*
* <code>
* $fields->addFieldToTab("Root.Main",
* FieldGroup::create(
* TimeField::create('StartTime','What's the start time?'),
* TimeField::create('EndTime', 'What's the end time?')
* )->setTitle('Time')
* );
* </code>
*
* @package forms
* @subpackage fields-structural
*/
class FieldGroup extends CompositeField {
protected $zebra;
public function __construct($arg1 = null, $arg2 = null) {
if(is_array($arg1) || is_a($arg1, 'FieldSet')) {
$fields = $arg1;
} else if(is_array($arg2) || is_a($arg2, 'FieldList')) {
$this->title = $arg1;
$fields = $arg2;
} else {
$fields = func_get_args();
if(!is_object(reset($fields))) $this->title = array_shift($fields);
}
parent::__construct($fields);
}
/**
* Returns the name (ID) for the element.
* In some cases the FieldGroup doesn't have a title, but we still want
* the ID / name to be set. This code, generates the ID from the nested children
*/
public function Name(){
if(!$this->title) {
$fs = $this->FieldList();
$compositeTitle = '';
$count = 0;
foreach($fs as $subfield){
$compositeTitle .= $subfield->getName();
if($subfield->getName()) $count++;
}
/** @skipUpgrade */
if($count == 1) $compositeTitle .= 'Group';
return preg_replace("/[^a-zA-Z0-9]+/", "", $compositeTitle);
}
return preg_replace("/[^a-zA-Z0-9]+/", "", $this->title);
}
/**
* Set an odd/even class
*
* @param string $zebra one of odd or even.
*/
public function setZebra($zebra) {
if($zebra == 'odd' || $zebra == 'even') $this->zebra = $zebra;
else user_error("setZebra passed '$zebra'. It should be passed 'odd' or 'even'", E_USER_WARNING);
return $this;
}
/**
* @return string
*/
public function getZebra() {
return $this->zebra;
}
/**
* @return string
*/
public function Message() {
$fs = array();
$this->collateDataFields($fs);
foreach($fs as $subfield) {
if($m = $subfield->Message()) $message[] = rtrim($m, ".");
}
return (isset($message)) ? implode(", ", $message) . "." : "";
}
/**
* @return string
*/
public function MessageType() {
$fs = array();
$this->collateDataFields($fs);
foreach($fs as $subfield) {
if($m = $subfield->MessageType()) $MessageType[] = $m;
}
return (isset($MessageType)) ? implode(". ", $MessageType) : "";
}
public function php($data) {
return;
}
}
|
#!/bin/bash
EXE=fp_rci.exe
#EXE=fp_rci_byte
CALLS_FILE=calls.oic
CALLS_NOSLICE=calls.oic.noslice
SCRIPTS_DIR=/home/jan/research/relay-race/scripts
CG_DIR=$1
LOG=$CG_DIR/log
LOGNOSLICE=$CG_DIR/lognoslice
PWD=`pwd`
TIMEOUT=21600 # 6 hours timeout
POLLTIME=5 # seconds between check for timeout
# copy-paste of timeout checker...
limit_time()
{
limit=$1
proc_id=$2
poll_time=$3
i=0
while [ "$i" -lt "$limit" ]
do
sleep $poll_time
kill -0 $proc_id > /dev/null 2>&1
if [ $? -ne "0" ]; then # already terminated
return 0
fi
i=$((i+$poll_time))
done
kill -9 $proc_id > /dev/null 2>&1
return 1
}
check_time_for()
{
pid=$1
log=$2
limit_time $TIMEOUT $pid $POLLTIME
if [ $? -ne "0" ]; then
echo "TIMED OUT!!!" >> $log
return 0
fi
}
date > $LOG
echo "incrementing generation number"
$SCRIPTS_DIR/next_num.py gen_num.txt
echo "running funptr analysis w/ option $@ AND SLICE (CI, FI also)" | tee --append $LOG
nice -n 15 ./$EXE -r -u jan -cg $@ -o $CALLS_FILE -testCIFI -offw 2>&1 | tee --append $LOG &
check_time_for $! $LOG
nice -n 15 ./scc_stats.exe -cg $CG_DIR/$CALLS_FILE 2>&1 | tee --append $LOG
date >> $LOG
date > $LOGNOSLICE
echo "incrementing generation number"
$SCRIPTS_DIR/next_num.py gen_num.txt
echo "running funptr analysis w/ option $@ AND NO SLICE" | tee --append $LOGNOSLICE
nice -n 15 ./$EXE -r -u jan -cg $@ -o $CALLS_NOSLICE -noNFP 2>&1 | tee --append $LOGNOSLICE &
check_time_for $! $LOGNOSLICE
nice -n 15 ./scc_stats.exe -cg $CG_DIR/$CALLS_NOSLICE 2>&1 | tee --append $LOGNOSLICE
date >> $LOGNOSLICE
echo "generating scc plot" | tee --append $LOG
./scc_compare.exe -cg $CG_DIR/ -o $1/ 2>&1 | tee --append $LOG
(cd $1; $SCRIPTS_DIR/plot.gp)
echo `pwd`
date >> $LOG
$SCRIPTS_DIR/contextstats.sh $LOG > $CG_DIR/log.cstats
$SCRIPTS_DIR/contextstats.sh $LOGNOSLICE > $CG_DIR/logno.cstats
tar -cjvf $CG_DIR/results.tar.bz2 $CG_DIR/calls.* $CG_DIR/*.dat $CG_DIR/log* $CG_DIR/*.ps
|
using Genie, Genie.Router
using Genie.Renderer, Genie.Renderer.Html, Genie.Renderer.Json
using JSON
using SwagUI
swagger_document = JSON.parsefile("./swagger.json")
urls = Array{Dict{String, Any}, 1}()
url1 = Dict{String, Any}()
url1["url"] = "https://petstore.swagger.io/v2/swagger.json"
url1["name"] = "Spec1"
url2 = Dict{String, Any}()
url2["url"] = "https://petstore.swagger.io/v2/swagger.json"
url2["name"] = "Spec2"
push!(urls, url1)
push!(urls, url2)
options = Options()
# options.custom_css = ".swagger-ui .topbar { display: none }"
options.show_explorer = true
# options.swagger_options["validatorUrl"] = nothing
options.swagger_options["url"] = "https://petstore.swagger.io/v2/swagger.json"
# options.swagger_options["urls"] = urls
route("/docs") do
render_swagger(nothing, options=options)
# render_swagger(swagger_document, options=options)
end
up(8001, async = false) |
/**
* @jest-environment jsdom
*/
import { render, fireEvent } from '@testing-library/svelte'
import { get } from 'svelte/store'
import Component from '../test/Component.svelte';
import { counter } from '../test/counterStore';
describe('Integration with Svelte', () => {
test('Should render component without breaking', () => {
const { container } = render(Component)
expect(container).toBeInstanceOf(HTMLBodyElement)
})
test('Should increment the counter', async () => {
const { getByText } = render(Component)
const button = getByText('+')
await fireEvent.click(button)
expect(get(counter)).toBe(1)
expect(getByText('1')).toBeInTheDocument()
})
test('Should decrement the counter', async () => {
const { getByText } = render(Component)
const button = getByText('-')
await fireEvent.click(button)
expect(get(counter)).toBe(0)
expect(getByText('0')).toBeInTheDocument()
})
test('Should not reset the counter', async () => {
const { getByText } = render(Component)
const button = getByText('reset')
await fireEvent.click(button)
expect(get(counter)).toBe(0)
expect(getByText('0')).toBeInTheDocument()
})
}) |
<!-- Module Name and description are required -->
# Helm Tiller Helper Module
Provides a helper used by the Helm provider, that sets up a namespace, service
account, and permissions for Tiller to run on.
<!-- Compatibility section is optional -->
## Compatibility
This module is compatible with Terraform `<= 0.12.0`
This module is compatible with Helm provider version `0.7.0` - it may or may
not work with higher versions.
<!-- Usage section is required -->
## Usage
<!-- NOTE: Examples should go into an `/examples` directory, with a link here
along the following lines:
There are multiple examples included in the [examples](./examples/) folder but
simple usage is as follows:
-->
```hcl
module "your_custom_name_for_your_instance_of_this_module" {
source = "[email protected]:thesis/terraform-helm-tiller-helper.git"
tiller_namespace_name = "your-namespace-for-tiller"
}
provider "helm" {
version = "= x.y.z"
kubernetes {
host = "url-of-kubernetes-host"
token = "reference-to-your-client-config-access-token"
cluster_ca_certificate = "reference-to-your-cluster-ca-certificate"
}
tiller_image = "gcr.io/kubernetes-helm/tiller:v2.11.0"
service_account = "${module.helm_provider_helper.tiller_service_account}"
override = ["spec.template.spec.automountserviceaccounttoken=true"]
namespace = "${module.helm_provider_helper.tiller_namespace}"
install_tiller = true
}
```
<!-- BEGINNING OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
## Inputs
| Name | Description | Type | Default | Required |
|------|-------------|:----:|:-----:|:-----:|
| tiller\_namespace\_name | The name of the namespace you want tiller to live in. | string | `""` | no |
## Outputs
| Name | Description |
|------|-------------|
| tiller\_namespace | The tiller namespace name. |
| tiller\_service\_account | The tiller service account name. |
<!-- END OF PRE-COMMIT-TERRAFORM DOCS HOOK -->
<!-- Notes section is optional -->
## Notes
Note: The values for Kubernetes `token` and `cluster_ca_certificate` should not
be stored in plain text in your Terraform config, or commited to source control.
We recommend sourcing the token from your client config data:
`token = "${data.google_client_config.default.access_token}"`
and decoding the certificate from the associated Kubernetes module's cluster:
`cluster_ca_certificate` = `${base64decode(module.gke_cluster.cluster_ca_certificate)}`
<!-- License is required -->
## License
See [LICENSE](./LICENSE).
|
#ifndef CC_TYPES_H
#define CC_TYPES_H
namespace cc
{
using CC_INT32t = int;
using CC_UINT32t = unsigned int;
using CC_BOOL = bool;
using CC_LONG64 = long long;
}
#endif |
import { Assert, outputLink, suite, test, testCurve, testCurveISInfos, testCurveTransform, testISTs } from './manager';
import {arrayFromFunction, arraySamples, DEG, lerp, M4, Matrix, V, V3, Vector, VV} from 'ts3dutils';
import {
AABB2,
BezierCurve,
breakDownPPCurves,
Curve,
curvePoint,
curvePointMF,
Edge,
EllipseCurve,
EllipsoidSurface,
followAlgorithm2d,
MathFunctionR2R,
NURBS,
P3,
ParabolaCurve,
PlaneSurface,
parabola4Projection,
} from '..';
import { PI, sin } from '../src/math';
suite('EllipseCurve', () => {
const curve = EllipseCurve.UNIT.shearX(2, 1);
test('withBounds', assert => {
const newCurve = curve.withBounds(1, 2);
assert.equal(newCurve.tMin, 1);
assert.equal(newCurve.tMax, 2);
});
test('testCurve', assert => {
testCurve(assert, EllipseCurve.UNIT);
testCurve(assert, curve);
});
test('UNIT.shearX(2, 3)', assert => testCurve(assert, EllipseCurve.UNIT.shearX(2, 2)));
test('isTsWithPlane', assert => {
const plane = new P3(V(2, 7, 1).unit(), 2);
testISTs(assert, curve.scale(1, 3, 1), plane, 2);
});
//test('rightAngled', assert => {
// const curveRA = curve.rightAngled()
// assert.ok(curveRA.f1.isPerpendicularTo(curveRA.f2))
// assert.ok(curveRA.isColinearTo(curve))
// arrayRange(-10, 10, 1).forEach(t => assert.ok(curveRA.containsPoint(curve.at(t))))
//},
test('isTsWithSurface(EllipsoidSurface)', assert => {
const s = EllipsoidSurface.sphere(5);
const c = new EllipseCurve(V(5, 2), V3.Z.negated(), V(-1, 1.2246467991473532e-16, 0), 0, PI);
testISTs(assert, c, s, 2);
});
test('isTsWithSurface(PlaneSurface)', assert => {
const c = EllipseCurve.UNIT.translate(1.2, -1);
const s = new PlaneSurface(P3.ZX);
testISTs(assert, c, s, 1);
});
test('isTsWithSurface(PlaneSurface) 2', assert => {
const c = EllipseCurve.UNIT;
const s = P3.YZ.translate(0.5, 0);
testISTs(assert, c, s, 1);
});
test('distanceToPoint', assert => {
const curve = EllipseCurve.forAB(10, 15);
const p = V(12, 12);
const closestT = curve.closestTToPoint(p);
const pDist = curve.at(closestT).distanceTo(p);
const EPS = 0.001;
assert.push(
pDist < curve.at(closestT - EPS).distanceTo(p),
curve.at(closestT - EPS).distanceTo(p),
'> ' + pDist,
'' + (pDist - curve.at(closestT - EPS).distanceTo(p)) + 'larger',
);
assert.push(
pDist < curve.at(closestT + EPS).distanceTo(p),
curve.at(closestT + EPS).distanceTo(p),
'> ' + pDist,
);
});
test('isColinearTo', assert => {
assert.ok(EllipseCurve.forAB(1, 2).isColinearTo(EllipseCurve.forAB(1, -2)));
});
const c1 = EllipseCurve.semicircle(5);
test('isInfosWithEllipse', assert => {
const c1 = EllipseCurve.semicircle(5),
c2 = EllipseCurve.semicircle(5, V(3, 0));
testCurveISInfos(assert, c1, c2, 1, 'c1 c2');
const verticalEllipse = new EllipseCurve(V(2, 0), V(1, 1), V(1, 10));
testCurveISInfos(assert, c1, verticalEllipse, 2, 'c1 verticalEllipse');
const verticalEllipse2 = new EllipseCurve(V(10, 2), V(1, 1), V(1, 10));
testCurveISInfos(assert, c1, verticalEllipse2, 0, 'c1 verticalEllipse2');
const smallEllipse = EllipseCurve.forAB(2, 3);
testCurveISInfos(assert, c1, smallEllipse, 0, 'c1 smallEllipse');
});
test('c1 test', assert => {
const test = new EllipseCurve(V(6, 1, 0), V(3, 1, 0), V(4, 0, 0));
testCurveISInfos(assert, c1, test, 1, 'c1 test');
});
test('isInfosWithBezier2D', assert => {
const ell = EllipseCurve.forAB(3, 1);
const bez = BezierCurve.graphXY(2, -3, -3, 2, -2, 3);
testCurveISInfos(assert, ell, bez, 3);
});
test('transform4', assert => {
const c = EllipseCurve.UNIT.withBounds(0, 3);
const ps = arrayFromFunction(128, (i, l) => c.at(lerp(c.tMin, c.tMax, i / (l - 1))));
const p3 = new P3(V3.X, 2);
const proj1 = M4.projectPlanePoint(V(-2, 0, -1), p3);
testCurveTransform(assert, c, proj1);
});
test('transform4 2', assert => {
const c = EllipseCurve.UNIT;
const ps = arrayFromFunction(128, (i, l) => c.at(lerp(c.tMin, c.tMax, i / (l - 1))));
const mv = M4.scale(0.5, 1, 1)
.rotateZ(20 * DEG)
.translate(0, -2)
.rotateX(90 * DEG);
const perspective = M4.perspective(45, 1, 1, 2).times(mv);
//const m4 = mv
//const m4 = M4.product(M4.rotateX(90* DEG), perspective,M4.rotateX(-90* DEG))
const m4 = perspective;
testCurveTransform(assert, c, m4);
});
test('transform4 3', assert => {
const c = new EllipseCurve(V3.O, V3.X, V3.Y, -3, 2).translate(1, -4, 0);
const m = M4.product(M4.rotateX(90 * DEG), M4.perspective(45, 1, 2, 5), M4.rotateX(-90 * DEG));
testCurveTransform(assert, c, m);
});
test('transform4 4', assert => {
const c = new EllipseCurve(V(1, 0, -3), V3.X, V(0, 6.123233995736766e-17, 1), 0, 3.141592653589793);
const m = M4.perspective(45, 1, 2, 4);
testCurveTransform(assert, c, m);
});
test('transform4 map parabola onto itself', assert => {
//const c = new EllipseCurve(V3.O, V3.X, V3.Y, -3, 3)
const c = ParabolaCurve.XY.withBounds(0.2, 2);
const m = M4.product(
M4.translate(0, -1),
// prettier-ignore
new M4(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 1, 0, -1
),
M4.translate(0, 1),
);
console.log(m.str);
testCurveTransform(assert, c, m);
});
test('transform4 map parabola onto line', assert => {
//const c = new EllipseCurve(V3.O, V3.X, V3.Y, -3, 3)
const c = ParabolaCurve.XY.withBounds(0-2, 2);
const m =
new M4(
0, 0, 0, 1,
0, 1, 0, 0,
0, 0, 1, 0,
1, 0, 0, 0
);
const m2 = M4.translate(m.m[3], m.m[7], m.m[11]).transform(m);
const P = M4.product(m2.as3x3().inversed());
console.log(m.rank());
console.log('P');
console.log(P.str);
console.log('m2');
console.log(m2.str);
console.log('P * m');
console.log(P.times(m).str);
console.log();
// prettier-ignore
//const m = M4.product(M4.translate(0, -1),
// new M4(
// 1, 0, 0, 0,
// 0, 1, 0, 0,
// 0, 0, 1, 0,
// 0, 1, 0, -1
//),M4.translate(0, 1))
//console.log(m.str)
testCurveTransform(assert, c, m);
});
test('transform4 at(0) on vanishing plane', assert => {
const c = new EllipseCurve(V3.O, V3.X, V3.Y, 0.2, 3);
const m = M4.perspectivePlane(P3.YZ.translate(1, 0, 0));
testCurveTransform(assert, c, m);
});
function p4(p: Vector) {
const [xw, yw, zw, w] = p.v
return new V3(xw, yw, w)
}
function p42(p: Vector) {
const [xw, yw, zw, w] = p.v
return new V3(xw / w, yw / w, 1)
}
function p43(p: V3) {
const [x, y, z] = p
return new V3(x / z, y / z, 1)
}
test('transform4 fooooo', assert => {
const c = new ParabolaCurve(V3.O, V3.X, V3.Y, -2, 2);
//const m = M4.translate(0, 1, 1);
const m = M4.IDENTITY;
//const cm = c.transform(m);
const p = M4.permutation4(2, 3) as M4;
const pm = m.times(p)
const ss = arraySamples(c.tMin, c.tMax, 16).flatMap(t => (p => [p, p.div(p.z)])(pm.transformPoint(c.at(t))));
console.log(pm.str)
outputLink(assert, {
edges: [c, parabola4Projection(pm, c.tMin, c.tMax)].map(c => Edge.forCurveAndTs(c)),
drPs: ss,
drLines: ss,
});
});
});
|
#! /data/data/com.termux/files/usr/bin/bash
clear
echo " _____ _ _ "
echo " | __ \ ( ) | "
echo " | | | | ___ _ __ |/| |_ "
echo " | | | |/ _ \| '_ \ | __| "
echo " | |__| | (_) | | | | | |_ "
echo " |_____/ \___/|_| |_| \__| "
echo
echo
echo " ____ "
echo " | _ \ "
echo " | |_) | ___ ______ _ _ "
echo " | _ < / _ \ | ____| (_) | "
echo " | |_) | __/ | |____ ___| | "
echo " |____/ \___| | __\ \ / / | | "
echo " | |___\ V /| | | "
echo " |______\_/ |_|_| "
echo
read -p "Use this tool for educational purpose only press enter to continue " Enter
echo
clear
echo
echo " 1. Root Terminal "
echo
echo "English "
echo
echo "Version 1.1"
read -p "Enter your choice : " choice
apt update && apt upgrade -y
apt install wget -y
apt install openssl-tool -y
apt install proot -y
apt install bash -y
apt install nano -y
apt install neofetch -y
termux-setup-storage
cd /data/data/com.termux/files/usr/etc/
cp bash.bashrc bash.bashrc.bak
mkdir /data/data/com.termux/files/usr/etc/Root
cd /data/data/com.termux/files/usr/etc/Root
wget https://raw.githubusercontent.com/EXALAB/AnLinux-Resources/master/Scripts/Installer/Kali/kali.sh
bash kali.sh
echo $Enter
clear
neofetch
echo
if [ "$choice" = "1" ];
then
echo " bash /data/data/com.termux/files/usr/etc/Root/start-kali.sh " >> /data/data/com.termux/files/usr/etc/bash.bashrc
echo
echo "Restart your termux to become root user "
fi
echo
echo "Root credits to Anlinux"
echo
echo
echo "contact me on :"
echo "Whatsapp : 08994422616"
echo "Discord : Hacker.Data#3344 "
echo
echo
echo "Youtube : Hacker Data "
echo "https://youtube.com/channel/UCVLcpH8Juykpn6TTpN5eEAA "
|
package org.casualmiracles.finance.models
import org.scalatest.FunSuite
import org.scalatest.Matchers
class ValuatorSuite extends FunSuite with Matchers{
test("Eso Model for American Call on Stock/Hoadley") {
val input = ContractParameters(
"eso", "americancallonstock", 29.0, 30.0, 0.05, 0.30, 0.0, 40.0/365.0, 6,
false, false)
val output = Valuator.valuateContract(input)
println(output)
output.pr.get.unPr(0)(0) should be ( 1.8751879170094086)
}
test("Eso Model for American Call on Stock/Antony Banks") {
val input = ContractParameters(
"eso", "americancallonstock", 100.0, 100.0, 0.05, 0.25, 0.04, 1, 5,
true, false)
val output = Valuator.valuateContract(input)
output.pr.get.unPr(0)(0) should be ( 10.499751521439398)
}
} |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: DuplicateEntryException.java,v 1.4 2010/01/04 15:51:00 cwl Exp $
*/
package com.sleepycat.je.dbi;
/**
* Exception to indicate that an entry is already present in a node.
*/
@SuppressWarnings("serial")
class DuplicateEntryException extends RuntimeException {
DuplicateEntryException() {
super();
}
DuplicateEntryException(String message) {
super(message);
}
}
|
# run full update function
run-full-update-basic-setup() {
sudo apt-get update -y
sudo apt-get -u upgrade --assume-no
sudo apt-get upgrade -y
sudo apt-get autoremove -y
}
|
require 'terminal_output/styling/controls/sgr'
require 'terminal_output/styling/controls/color/mode/rgb/gradient'
require 'terminal_output/styling/controls/color/mode/rgb/scale'
require 'terminal_output/styling/controls/color'
require 'terminal_output/styling/controls/color/standard'
require 'terminal_output/styling/controls/color/high_intensity'
require 'terminal_output/styling/controls/color/eight_bit'
require 'terminal_output/styling/controls/color/eight_bit/standard'
require 'terminal_output/styling/controls/color/eight_bit/high_intensity'
require 'terminal_output/styling/controls/color/eight_bit/rgb'
require 'terminal_output/styling/controls/color/eight_bit/greyscale'
require 'terminal_output/styling/controls/color/twenty_four_bit'
require 'terminal_output/styling/controls/color/twenty_four_bit/unabridged'
require 'terminal_output/styling/controls/color/twenty_four_bit/abridged'
require 'terminal_output/styling/controls/style'
require 'terminal_output/styling/controls/trait'
require 'terminal_output/styling/controls/trait/underline'
require 'terminal_output/styling/controls/trait/blink'
require 'terminal_output/styling/controls/trait/strikethrough'
require 'terminal_output/styling/controls/trait/overline'
require 'terminal_output/styling/controls/trait/font/weight'
require 'terminal_output/styling/controls/trait/font/slant'
require 'terminal_output/styling/controls/trait/font/fraktur'
require 'terminal_output/styling/controls/trait/color/reverse_video'
require 'terminal_output/styling/controls/trait/color/foreground'
require 'terminal_output/styling/controls/trait/color/foreground/transparent'
require 'terminal_output/styling/controls/trait/color/background'
require 'terminal_output/styling/controls/text'
require 'terminal_output/styling/controls/io'
require 'terminal_output/styling/controls/writer'
|
----
title: "Changing the World Through Video Games"
date: 2020-07-31
----
Hello Its Brendan and I am just learning about how video games can change the world!
Video Games can Change the world for good not just for evil.
The First topic is social innovation and collobration!
Video Games can Enable the World to have more creators in it.
|
package com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.datasource
import android.content.ContentResolver
import android.database.Cursor
import android.net.Uri
import android.provider.MediaStore
import android.provider.MediaStore.Images.Media.*
import android.util.Log
import com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.MimeType
import com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.ext.equalsMimeType
import com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.ui.album.model.Album
import com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.ui.album.model.AlbumMetaData
import com.vitanov.multiimagepicker.FishBunMod.java.com.sangcomz.fishbun.util.future.CallableFutureTask
import java.io.File
import java.io.InputStream
import java.lang.Exception
import java.util.*
import java.util.concurrent.Callable
import kotlin.collections.LinkedHashMap
class ImageDataSourceImpl(private val contentResolver: ContentResolver) : ImageDataSource {
private val addedPathList = arrayListOf<Uri>()
override fun getAlbumList(
allViewTitle: String, exceptMimeTypeList: List<MimeType>,
specifyFolderList: List<String>
): CallableFutureTask<List<Album>> {
return CallableFutureTask(Callable<List<Album>> {
val albumDataMap = LinkedHashMap<Long, AlbumData>()
val orderBy = "$_ID DESC"
val projection = arrayOf(_ID, BUCKET_DISPLAY_NAME, MIME_TYPE, BUCKET_ID, SIZE)
val c = contentResolver.query(EXTERNAL_CONTENT_URI, projection, null, null, orderBy)
var totalCount = 0
var allViewThumbnailPath: Uri = Uri.EMPTY
c?.let {
while (c.moveToNext()) {
val bucketId = c.getInt(c.getColumnIndex(BUCKET_ID)).toLong()
val bucketDisplayName =
c.getString(c.getColumnIndex(BUCKET_DISPLAY_NAME)) ?: continue
val bucketMimeType = c.getString(c.getColumnIndex(MIME_TYPE)) ?: continue
val imgId = c.getInt(c.getColumnIndex(MediaStore.MediaColumns._ID))
val size = c.getInt(c.getColumnIndex(MediaStore.MediaColumns.SIZE))
if (isExceptImage(
bucketMimeType,
bucketDisplayName,
exceptMimeTypeList,
specifyFolderList
)
) continue
val albumData = albumDataMap[bucketId]
Log.i("Image", "Size: $size")
if(size <= 0) continue
if (albumData == null) {
val imagePath =
Uri.withAppendedPath(EXTERNAL_CONTENT_URI, "" + imgId)
albumDataMap[bucketId] =
AlbumData(
bucketDisplayName,
imagePath,
1
)
if (allViewThumbnailPath == Uri.EMPTY) allViewThumbnailPath = imagePath
} else {
albumData.imageCount++
}
totalCount++
}
c.close()
}
if (totalCount == 0) albumDataMap.clear()
val albumList = ArrayList<Album>()
if (!isNotContainsSpecifyFolderList(specifyFolderList, allViewTitle)
&& albumDataMap.isNotEmpty()
)
albumList.add(
0, Album(
0,
allViewTitle,
AlbumMetaData(
totalCount,
allViewThumbnailPath.toString()
)
)
)
albumDataMap.map {
val value = it.value
Album(
it.key,
value.displayName,
AlbumMetaData(
value.imageCount,
value.thumbnailPath.toString()
)
)
}.also {
albumList.addAll(it)
}
albumList
})
}
override fun getAllBucketImageUri(
bucketId: Long,
exceptMimeTypeList: List<MimeType>,
specifyFolderList: List<String>
): CallableFutureTask<List<Uri>> {
return CallableFutureTask(Callable<List<Uri>> {
val imageUris = arrayListOf<Uri>()
val selection = "$BUCKET_ID = ?"
val bucketId: String = bucketId.toString()
val sort = "$_ID DESC"
val selectionArgs = arrayOf(bucketId)
val images = EXTERNAL_CONTENT_URI
val c = if (bucketId != "0") {
contentResolver.query(images, null, selection, selectionArgs, sort)
} else {
contentResolver.query(images, null, null, null, sort)
}
c?.let {
try {
if (c.moveToFirst()) {
do {
val mimeType = c.getString(c.getColumnIndex(MIME_TYPE)) ?: continue
val folderName = c.getString(c.getColumnIndex(BUCKET_DISPLAY_NAME)) ?: continue
if (isExceptMemeType(exceptMimeTypeList, mimeType)
|| isNotContainsSpecifyFolderList(specifyFolderList, folderName)
) continue
val imgId = c.getInt(c.getColumnIndex(MediaStore.MediaColumns._ID))
val size = c.getInt(c.getColumnIndex(MediaStore.MediaColumns.SIZE))
val path = Uri.withAppendedPath(EXTERNAL_CONTENT_URI, "" + imgId)
if (isValidImage(size)) {
Log.i("Image", "Image Exist");
imageUris.add(path)
} else {
Log.i("Image", "Image Doesn't Exist");
}
} while (c.moveToNext())
}
} finally {
if (!c.isClosed) c.close()
}
}
imageUris
})
}
private fun isValidImage(size: Int): Boolean {
return size > 0;
}
override fun getAlbumMetaData(
bucketId: Long,
exceptMimeTypeList: List<MimeType>,
specifyFolderList: List<String>
): CallableFutureTask<AlbumMetaData> {
return CallableFutureTask(Callable<AlbumMetaData> {
val selection = "$BUCKET_ID = ?"
val bucketId: String = bucketId.toString()
val sort = "$_ID DESC"
val selectionArgs = arrayOf(bucketId)
val images = EXTERNAL_CONTENT_URI
val c = if (bucketId != "0") {
contentResolver.query(images, null, selection, selectionArgs, sort)
} else {
contentResolver.query(images, null, null, null, sort)
}
var count = 0
var thumbnailPath: Uri = Uri.EMPTY
c?.let {
try {
if (c.moveToFirst()) {
do {
val mimeType = c.getString(c.getColumnIndex(MIME_TYPE)) ?: continue
val folderName =
c.getString(c.getColumnIndex(BUCKET_DISPLAY_NAME)) ?: continue
if (isExceptMemeType(exceptMimeTypeList, mimeType)
|| isNotContainsSpecifyFolderList(specifyFolderList, folderName)
) continue
val imgId = c.getInt(c.getColumnIndex(MediaStore.MediaColumns._ID))
if (thumbnailPath == Uri.EMPTY) {
thumbnailPath =
Uri.withAppendedPath(EXTERNAL_CONTENT_URI, "" + imgId)
}
count++
} while (c.moveToNext())
}
} finally {
if (!c.isClosed) c.close()
}
}
AlbumMetaData(count, thumbnailPath.toString())
})
}
override fun getDirectoryPath(bucketId: Long): CallableFutureTask<String> {
return CallableFutureTask(Callable<String> {
var path = ""
val selection = "$BUCKET_ID = ?"
val bucketId: String = bucketId.toString()
val selectionArgs = arrayOf(bucketId)
val images = EXTERNAL_CONTENT_URI
val c = if (bucketId != "0") {
contentResolver.query(images, null, selection, selectionArgs, null)
} else {
contentResolver.query(images, null, null, null, null)
}
c?.let {
try {
if (c.moveToFirst()) {
path = getPathDir(
c.getString(c.getColumnIndex(DATA)),
c.getString(c.getColumnIndex(DISPLAY_NAME))
)
}
} finally {
if (!c.isClosed) c.close()
}
}
path
})
}
override fun addAddedPath(addedImage: Uri) {
addedPathList.add(addedImage)
}
override fun addAllAddedPath(addedImagePathList: List<Uri>) {
addedPathList.addAll(addedImagePathList)
}
override fun getAddedPathList(): List<Uri> {
return addedPathList
}
private fun getPathDir(path: String, fileName: String): String {
return path.replace("/$fileName", "")
}
private fun isExceptMemeType(
mimeTypes: List<MimeType>,
mimeType: String
): Boolean {
for (type in mimeTypes) {
if (type.equalsMimeType(mimeType)) return true
}
return false
}
private fun isNotContainsSpecifyFolderList(
specifyFolderList: List<String>,
displayBundleName: String
): Boolean {
return if (specifyFolderList.isEmpty()) false
else !specifyFolderList.contains(displayBundleName)
}
private fun isExceptImage(
bucketMimeType: String,
bucketDisplayName: String,
exceptMimeTypeList: List<MimeType>,
specifyFolderList: List<String>
) = (isExceptMemeType(exceptMimeTypeList, bucketMimeType)
|| isNotContainsSpecifyFolderList(specifyFolderList, bucketDisplayName)
)
private data class AlbumData(
val displayName: String,
val thumbnailPath: Uri,
var imageCount: Int
)
} |
import {
CreateTask,
DeleteTask,
LoadTasks,
UpdateStatusTask,
} from '@/domain/usecases';
import faker from '@faker-js/faker';
export class CreateTaskSpy implements CreateTask {
params = {};
async create(params: CreateTask.Params): Promise<CreateTask.Result> {
this.params = params;
}
}
export class LoadTasksSpy implements LoadTasks {
userId = '';
result = [
{
id: faker.datatype.uuid(),
title: faker.random.word(),
description: faker.random.word(),
finished: false,
},
{
id: faker.datatype.uuid(),
title: faker.random.word(),
description: faker.random.word(),
finished: true,
},
] as LoadTasks.Result[];
async loadByUserId(userId: string): Promise<LoadTasks.Result[]> {
this.userId = userId;
return this.result;
}
}
export class UpdateStatusTaskSpy implements UpdateStatusTask {
params = {} as UpdateStatusTask.Params;
result = true;
async update(
params: UpdateStatusTask.Params
): Promise<UpdateStatusTask.Result> {
this.params = params;
return this.result;
}
}
export class DeleteTaskSpy implements DeleteTask {
params = {};
result = true;
async delete(params: DeleteTask.Params): Promise<DeleteTask.Result> {
this.params = params;
return this.result;
}
}
|
#include "systemc.h"
SC_MODULE (TestIntitializer)
{
sc_in_clk clk; // The mandatory clock, as this is synchronous logic.
sc_out<bool> rst; // The mandatory reset, as this is synchronous logic.
sc_out<bool> start; // Input used to signal that is is ok to start the masters.
sc_in<bool> done; // Output used to signal that the masters are done sending.
void init();
SC_CTOR(TestIntitializer)
{
//Initialize processes as threads, with reset active high
SC_CTHREAD(init,clk.pos());
}
};
|
%%--------------------------------------------------------------------
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%--------------------------------------------------------------------
%% File: orber_iiop_inrequest.erl
%%
%% Description:
%% This file contains the handling of incomming requests
%%
%%-----------------------------------------------------------------
-module(orber_iiop_inrequest).
-include_lib("orber/src/orber_iiop.hrl").
-include_lib("orber/include/corba.hrl").
-include_lib("orber/include/orber_pi.hrl").
%%-----------------------------------------------------------------
%% External exports
%%-----------------------------------------------------------------
-export([start/5, start_fragment_collector/8]).
%%-----------------------------------------------------------------
%% Internal exports
%%-----------------------------------------------------------------
-export([handle_message/5, fragment_collector/8]).
%%-----------------------------------------------------------------
%% Macros
%%-----------------------------------------------------------------
-define(DEBUG_LEVEL, 8).
%%-----------------------------------------------------------------
%% External interface functions
%%-----------------------------------------------------------------
start(GIOPHdr, Message, Type, Socket, Env) ->
spawn_link(orber_iiop_inrequest, handle_message,
[GIOPHdr, Message, Type, Socket, Env]).
start_fragment_collector(GIOPHdr, Message, Type, Socket, ReqId, Proxy, MaxFrags, Env) ->
spawn_link(orber_iiop_inrequest, fragment_collector,
[GIOPHdr, Message, Type, Socket, ReqId, Proxy, MaxFrags, Env]).
%%-----------------------------------------------------------------
%% Internal functions
%%-----------------------------------------------------------------
%%-----------------------------------------------------------------
%% Func: fragment_collector/4
%%-----------------------------------------------------------------
fragment_collector(GIOPHdr, Bytes, SocketType, Socket, ReqId, Proxy, MaxFrags, Env) ->
case catch collect(Proxy, [], GIOPHdr#giop_message.byte_order, ReqId,
MaxFrags, 0) of
{ok, Buffer} ->
NewGIOP = GIOPHdr#giop_message
{message = list_to_binary([GIOPHdr#giop_message.message|Buffer])},
%% NOTE, the third argument to dec_message_header must be complete
%% message (i.e. AllBytes), otherwise we cannot handle indirection.
case handle_message(NewGIOP, list_to_binary([Bytes| Buffer]),
SocketType, Socket, Env) of
message_error ->
Proxy ! {message_error, self(), ReqId},
ok;
_ ->
ok
end;
ok ->
ok;
{'EXCEPTION', E} ->
Proxy ! {message_error, self(), ReqId},
Reply = marshal_exception(Env, ReqId, E, enc_reply),
orber_socket:write(SocketType, Socket, Reply)
end.
collect(_Proxy, _Buffer, _ByteOrder, _ReqId, MaxFrags, MaxFrags) ->
orber:dbg("[~p] ~p:collect(~p)~nMax fragments limit reached.",
[?LINE, ?MODULE, MaxFrags], ?DEBUG_LEVEL),
{'EXCEPTION', #'IMP_LIMIT'{completion_status=?COMPLETED_NO}};
collect(Proxy, Buffer, ByteOrder, ReqId, MaxFrags, FragCounter) ->
receive
{Proxy, #giop_message{byte_order = ByteOrder,
message = Message,
fragments = true} = GIOPHdr} ->
{_, #fragment_header{request_id=ReqId}, FragBody, _, _} =
cdr_decode:dec_message_header(null, GIOPHdr, Message),
collect(Proxy, [FragBody | Buffer], ByteOrder, ReqId,
MaxFrags, FragCounter+1);
{Proxy, #giop_message{byte_order = ByteOrder,
message = Message,
fragments = false} = GIOPHdr} ->
{_, #fragment_header{request_id=ReqId}, FragBody, _, _} =
cdr_decode:dec_message_header(null, GIOPHdr, Message),
{ok, lists:reverse([FragBody | Buffer])};
{Proxy, GIOPHdr, _Data, _} ->
orber:dbg("[~p] orber_iiop_inrequest:collect(~p, ~p)~n"
"Incorrect Fragment. Might be different byteorder.",
[?LINE, ByteOrder, GIOPHdr], ?DEBUG_LEVEL),
{'EXCEPTION', #'MARSHAL'{completion_status=?COMPLETED_NO}};
{Proxy, cancel_request_header} ->
ok;
Other ->
orber:dbg("[~p] ~p:collect(~p)~n"
"Unable to collect all fragments: ~p",
[?LINE, ?MODULE, Buffer, Other], ?DEBUG_LEVEL),
{'EXCEPTION', #'MARSHAL'{completion_status=?COMPLETED_NO}}
end.
%%-----------------------------------------------------------------
%% Func: handle_message/4
%%-----------------------------------------------------------------
handle_message(GIOPHdr, Message, SocketType, Socket, Env) ->
%% Warning. We shouldn't set the flags like this here. But, for now, we'll
%% do it due to performance reasons.
put(oe_orber_flags, Env#giop_env.flags),
case catch cdr_decode:dec_message_header(null, GIOPHdr, Message) of
Hdr when is_record(Hdr, cancel_request_header) ->
%% We just skips this message for the moment, the standard require that
%% the client handles the reply anyway.
message_error;
{location_forward, Object, ReqId, Version, OldObj} ->
Reply = call_interceptors_out(Env#giop_env{version = Version},
ReqId, [Object], OldObj,
'location_forward',
"location_forward",
{{'tk_objref', "", ""}, [],[]}),
orber_socket:write(SocketType, Socket, Reply);
{object_forward, Object, ReqId, Version, _OldObj} ->
Reply = handle_locate_request(Env#giop_env{version = Version},
{object_forward, Object, ReqId}),
orber_socket:write(SocketType, Socket, Reply);
{Version, Hdr} when is_record(Hdr, locate_request_header) ->
Reply = handle_locate_request(Env#giop_env{version = Version}, Hdr),
orber_socket:write(SocketType, Socket, Reply);
{Version, ReqHdr, Rest, Len, ByteOrder} when is_record(ReqHdr, request_header) ->
handle_request(Env#giop_env{version = Version}, ReqHdr, Rest, Len,
ByteOrder, SocketType, Socket, Message);
Other ->
%% This cluase takes care of all erranous messages.
orber:dbg("[~p] orber_iiop_inrequest:handle_message(~p)~n"
"Decoding Msg Header failed: ~p",
[?LINE, Message, Other], ?DEBUG_LEVEL),
Reply = cdr_encode:enc_message_error(Env),
orber_socket:write(SocketType, Socket, Reply),
message_error
end.
send_reply(oneway, _SocketType, _Socket) ->
ok;
send_reply(Reply, SocketType, Socket) ->
orber_socket:write(SocketType, Socket, Reply).
%%-----------------------------------------------------------------
%% Func: handle_request
%%-----------------------------------------------------------------
handle_request(#giop_env{interceptors = false} = Env, ReqHdr, Rest, Len, ByteOrder,
SocketType, Socket, Message) ->
NewEnv = check_context(ReqHdr#request_header.service_context, [], Env),
case decode_body(NewEnv, ReqHdr, Rest, Len, ByteOrder, Message, enc_reply) of
{error, E} ->
orber_socket:write(SocketType, Socket, E);
{NewEnv2, Hdr, Par, TypeCodes} ->
Result = invoke_request(Hdr, Par, SocketType, TypeCodes, Env),
Reply = evaluate(NewEnv2, Hdr, Result, TypeCodes,
enc_reply, 'no_exception'),
send_reply(Reply, SocketType, Socket)
end;
handle_request(Env, ReqHdr, Rest, Len, ByteOrder, SocketType, Socket, Message) ->
NewEnv = check_context(ReqHdr#request_header.service_context, [], Env),
case catch call_interceptors(SocketType, NewEnv, ReqHdr,
Rest, Len, ByteOrder, Message) of
{error, E} ->
%% Failed to decode body.
orber_socket:write(SocketType, Socket, E);
{'EXCEPTION', Exc} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_message(~p)~n"
"Invoking the interceptors resulted in: ~p",
[?LINE, Message, Exc], ?DEBUG_LEVEL),
Reply = marshal_exception(NewEnv,
ReqHdr#request_header.request_id,
Exc, enc_reply),
orber_socket:write(SocketType, Socket, Reply);
{'EXIT', R} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_message(~p)~n"
"Invoking the interceptors resulted in: ~p",
[?LINE, ReqHdr, R], ?DEBUG_LEVEL),
Reply = marshal_exception(NewEnv,
ReqHdr#request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_MAYBE},
enc_reply),
orber_socket:write(SocketType, Socket, Reply);
Reply ->
send_reply(Reply, SocketType, Socket)
end.
check_context([], [], Env) ->
Env;
check_context([], Acc, Env) ->
Env#giop_env{ctx = Acc};
check_context([#'CSI_SASContextBody'
{label = ?CSI_MsgType_MTEstablishContext,
value = #'CSI_EstablishContext'
{client_context_id = _Id,
authorization_token = _AuthToken,
identity_token = _IdToken,
client_authentication_token = _CAuthToken}}|Rest], Acc, Env) ->
check_context(Rest, [#'IOP_ServiceContext'
{context_id=?IOP_SecurityAttributeService,
context_data = #'CSI_SASContextBody'
{label = ?CSI_MsgType_MTCompleteEstablishContext,
value = #'CSI_CompleteEstablishContext'
{client_context_id = 0,
context_stateful = false,
final_context_token = [0,255]}}}|Acc], Env);
check_context([_|Rest], Acc, Env) ->
check_context(Rest, Acc, Env).
%%-----------------------------------------------------------------
%% Func: call_interceptors
%%-----------------------------------------------------------------
call_interceptors(SocketType, #giop_env{interceptors = {native, Ref, PIs},
ctx = Ctx} = Env,
ReqHdr, Rest, Len, ByteOrder, Msg) ->
NewRest = orber_pi:in_request_enc(PIs, ReqHdr, Ref, Rest),
case decode_body(Env, ReqHdr, NewRest, Len, ByteOrder, Msg, enc_reply) of
{NewEnv, Hdr, Par, TypeCodes} ->
NewPar = orber_pi:in_request(PIs, ReqHdr, Ref, Par),
ResultInv = invoke_request(Hdr, NewPar, SocketType, TypeCodes, NewEnv),
Result = orber_pi:out_reply(PIs, ReqHdr, Ref, ResultInv, Ctx),
case evaluate(NewEnv, ReqHdr, Result, TypeCodes, enc_reply_split,
'no_exception') of
{ReplyHdr, Reply, HdrL, _BodyL, Flags} ->
NewReply = orber_pi:out_reply_enc(PIs, ReqHdr, Ref, Reply, Ctx),
MessSize = HdrL+size(NewReply),
cdr_encode:enc_giop_message_header(NewEnv, 'reply', Flags,
MessSize, [ReplyHdr|NewReply]);
Other ->
Other
end;
Other ->
Other
end;
call_interceptors(SocketType, #giop_env{interceptors = {portable, _PIs}} = Env,
ReqHdr, Rest, Len, ByteOrder, Msg) ->
case decode_body(Env, ReqHdr, Rest, Len, ByteOrder, Msg, enc_reply) of
{NewEnv, Hdr, Par, TypeCodes} ->
Result = invoke_request(Hdr, Par, SocketType, TypeCodes, NewEnv),
evaluate(NewEnv, ReqHdr, Result, TypeCodes, enc_reply, 'no_exception');
Other ->
Other
end.
%%-----------------------------------------------------------------
%% Func: call_interceptors_out
%%-----------------------------------------------------------------
call_interceptors_out(#giop_env{interceptors = {native, Ref, PIs}, ctx = Ctx} = Env,
ReqId, Result, Obj, Type, Operation, TypeCodes) ->
ReqHdr = #request_header{object_key = Obj,
service_context = Ctx,
response_expected = true,
request_id = ReqId,
operation = Operation},
NewResult = (catch orber_pi:out_reply(PIs, ReqHdr, Ref, Result, Ctx)),
{ReplyHdr, Reply, HdrL, _BodyL, Flags} =
evaluate(Env, ReqHdr, NewResult, TypeCodes, enc_reply_split, Type),
NewReply =
case catch orber_pi:out_reply_enc(PIs, ReqHdr, Ref, Reply, Ctx) of
{'EXCEPTION', Exception} ->
%% Since evaluate don't need TypeCodes or Status no need to supply
%% them.
evaluate(Env, ReqHdr, {'EXCEPTION', Exception}, undefined,
enc_reply_split, undefined);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_location_forward(~p)~n"
"Resulted in exit: ~p", [?LINE, PIs, E], ?DEBUG_LEVEL),
marshal_exception(Env, ReqId,
#'MARSHAL'{completion_status=?COMPLETED_NO},
enc_reply);
R ->
R
end,
MessSize = HdrL+size(NewReply),
cdr_encode:enc_giop_message_header(Env, 'reply', Flags, MessSize,
[ReplyHdr|NewReply]);
call_interceptors_out(#giop_env{interceptors = {portable, _PIs}} = Env,
ReqId, Result, _Obj, Type, _, TypeCodes) ->
Hdr = #request_header{response_expected = true,
request_id = ReqId},
evaluate(Env, Hdr, Result, TypeCodes, enc_reply, Type);
call_interceptors_out(Env, ReqId, Result, _Obj, Type, _, TypeCodes) ->
Hdr = #request_header{response_expected = true,
request_id = ReqId},
evaluate(Env, Hdr, Result, TypeCodes, enc_reply, Type).
%%-----------------------------------------------------------------
%% Func: decode_body/2
%%-----------------------------------------------------------------
decode_body(#giop_env{version = Version} = Env, ReqHdr, Rest, Len,
ByteOrder, Message, Func) ->
case catch cdr_decode:dec_request_body(Version, ReqHdr, Rest, Len,
ByteOrder, Message) of
{NewVersion, ReqHdr, Par, TypeCodes} ->
{Env#giop_env{version = NewVersion}, ReqHdr, Par, TypeCodes};
{'EXCEPTION', E} ->
orber:dbg("[~p] orber_iiop_inrequest:decode_body(~p, ~p)~n"
"Failed decoding request body: ~p",
[?LINE, ReqHdr, Message, E], ?DEBUG_LEVEL),
{error, marshal_exception(Env, ReqHdr#request_header.request_id,
E, Func)};
Other ->
%% This cluase takes care of all erranous messages.
orber:dbg("[~p] orber_iiop_inrequest:decode_body(~p, ~p)~n"
"Failed decoding request body: ~p",
[?LINE, ReqHdr, Message, Other], ?DEBUG_LEVEL),
{error, marshal_exception(Env, ReqHdr#request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_NO},
Func)}
end.
%%-----------------------------------------------------------------
%% Func: handle_locate_request/2
%%-----------------------------------------------------------------
handle_locate_request(Env, {object_forward, Object, ReqId}) ->
case catch cdr_encode:enc_locate_reply(
Env#giop_env{request_id = ReqId,
tc = {'tk_objref', "", ""},
result = Object,
reply_status = 'object_forward'}) of
{'EXCEPTION', Exception} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_locate_request(object_forward)~n"
"Raised the exception: ~p", [?LINE, Exception], ?DEBUG_LEVEL),
marshal_locate_exception(Env, ReqId, Exception);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_locate_request(object_forward)~n"
"Resulted in exit: ~p", [?LINE, E], ?DEBUG_LEVEL),
marshal_locate_exception(Env, ReqId,
#'MARSHAL'{completion_status=?COMPLETED_NO});
R ->
R
end;
handle_locate_request(Env, Hdr) ->
Location = orber_objectkeys:check(Hdr#locate_request_header.object_key),
case catch cdr_encode:enc_locate_reply(
Env#giop_env{request_id = Hdr#locate_request_header.request_id,
reply_status = Location}) of
{'EXCEPTION', Exception} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_locate_request(~p)~n"
"Raised the exception: ~p",
[?LINE, Location, Exception], ?DEBUG_LEVEL),
marshal_locate_exception(Env, Hdr#locate_request_header.request_id, Exception);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:handle_locate_request(~p)~n"
"Resulted in exit: ~p", [?LINE, Location, E], ?DEBUG_LEVEL),
marshal_locate_exception(Env, Hdr#locate_request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_NO});
R ->
R
end.
%%-----------------------------------------------------------------
%% Func: invoke_request/2
%%-----------------------------------------------------------------
invoke_request(Hdr, Par, normal, TypeCodes, #giop_env{iiop_ssl_port = SSLPort,
partial_security = PartialSec}) ->
Result =
case SSLPort of
-1 ->
corba:request_from_iiop(Hdr#request_header.object_key,
Hdr#request_header.operation,
Par, [], Hdr#request_header.response_expected,
Hdr#request_header.service_context);
_ ->
case Hdr#request_header.object_key of
{_,registered,orber_init,_,_,_} ->
corba:request_from_iiop(Hdr#request_header.object_key,
Hdr#request_header.operation,
Par, [],
Hdr#request_header.response_expected,
Hdr#request_header.service_context);
{_,_,_,_,_,Flags} when PartialSec == true,
?ORB_FLAG_TEST(Flags, ?ORB_NO_SECURITY) == true ->
corba:request_from_iiop(Hdr#request_header.object_key,
Hdr#request_header.operation,
Par, [],
Hdr#request_header.response_expected,
Hdr#request_header.service_context);
_ ->
orber:dbg("[~p] orber_iiop_inrequest:invoke_request(~p)~n"
"SSL do not permit",
[?LINE, Hdr#request_header.object_key], ?DEBUG_LEVEL),
{'EXCEPTION', #'NO_PERMISSION'{completion_status=?COMPLETED_NO}}
end
end,
result_to_list(Result, TypeCodes);
invoke_request(Hdr, Par, ssl, TypeCodes, _) ->
Result = corba:request_from_iiop(Hdr#request_header.object_key,
Hdr#request_header.operation,
Par, [], Hdr#request_header.response_expected,
Hdr#request_header.service_context),
result_to_list(Result, TypeCodes).
%%-----------------------------------------------------------------
%% Func: evaluate/4
%%-----------------------------------------------------------------
evaluate(_, Hdr,_,_,_,_) when Hdr#request_header.response_expected == 'false' ->
oneway;
evaluate(Env, Hdr, _, _, Func, _)
when Hdr#request_header.response_expected == 'true_oneway' ->
%% Special case which only occurs when using IIOP-1.2
cdr_encode:Func(Env#giop_env{request_id = Hdr#request_header.request_id,
reply_status = 'no_exception',
tc = {tk_null,[],[]}, result = null});
evaluate(Env, Hdr, {'EXCEPTION', Exc}, _, Func, _) ->
%% The exception can be user defined. Hence, we must check the result.
case catch marshal_exception(Env, Hdr#request_header.request_id, Exc, Func) of
{'EXCEPTION', Exception} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p)~n"
"Encoding (reply) exception: ~p",
[?LINE, Hdr, Exception], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id, Exception, Func);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p)~n"
"Encode (reply) resulted in: ~p",
[?LINE, Hdr, E], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_YES}, Func);
R ->
R
end;
evaluate(#giop_env{version = {1,2}} = Env, Hdr, {'location_forward_perm', NewIOR}, _,
Func, _)->
case catch cdr_encode:Func(#giop_env{version = {1,2},
request_id = Hdr#request_header.request_id,
reply_status = 'location_forward_perm',
tc = {{'tk_objref', "", ""}, [],[]},
result = NewIOR}) of
{'EXCEPTION', Exception} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p) " ++
"Encoding (reply) exception: ~p",
[?LINE, Hdr, Exception], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id, Exception, Func);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p) " ++
"Encode (reply) resulted in: ~p",
[?LINE, Hdr, E], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_YES}, Func);
R ->
R
end;
evaluate(Env, Hdr, [Res |OutPar], TypeCodes, Func, Type) ->
case catch cdr_encode:Func(Env#giop_env{request_id = Hdr#request_header.request_id,
reply_status = Type,
tc = TypeCodes, result = Res,
parameters = OutPar}) of
{'EXCEPTION', Exception} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p, ~p, ~p)~n"
"Encode exception: ~p",
[?LINE, Hdr, Res, OutPar, Exception], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id, Exception, Func);
{'EXIT', E} ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p, ~p, ~p)~n"
"Encode exit: ~p",
[?LINE, Hdr, Res, OutPar, E], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id,
#'MARSHAL'{completion_status=?COMPLETED_YES}, Func);
R ->
R
end;
evaluate(Env, Hdr, What, TypeCodes, Func, _) ->
orber:dbg("[~p] orber_iiop_inrequest:evaluate(~p)~n"
"Bad reply: ~p~n"
"Should be: ~p~n"
"GIOP Env : ~p", [?LINE, Hdr, What, TypeCodes, Env], ?DEBUG_LEVEL),
marshal_exception(Env, Hdr#request_header.request_id,
#'INTERNAL'{completion_status=?COMPLETED_MAYBE}, Func).
%%-----------------------------------------------------------------
%% Utility Functions
%%-----------------------------------------------------------------
result_to_list({'oe_location_forward_perm', NewIOR}, _) ->
{'location_forward_perm', NewIOR};
result_to_list({'EXCEPTION', E}, _) ->
{'EXCEPTION', E};
result_to_list(Result, {_TkRes, _, []}) ->
[Result];
result_to_list(Result, {_TkRes, _, _TkOut}) ->
tuple_to_list(Result).
marshal_exception(Env, Id, Exception, Func) ->
{TypeOfException, ExceptionTypeCode, NewExc} =
orber_exceptions:get_def(Exception),
cdr_encode:Func(Env#giop_env{request_id = Id,
reply_status = TypeOfException,
tc = {ExceptionTypeCode, [], []},
result = NewExc}).
marshal_locate_exception(#giop_env{version = {1,2}} = Env, Id, Exception) ->
case orber_exceptions:get_def(Exception) of
{?SYSTEM_EXCEPTION, ExceptionTypeCode, NewExc} ->
cdr_encode:enc_locate_reply(
Env#giop_env{request_id = Id,
reply_status = 'loc_system_exception',
tc = ExceptionTypeCode, result = NewExc});
_ ->
%% This case is impossible (i.e. Orber only throws system
%% exceptions). But to be on the safe side...
marshal_locate_exception(Env, Id, #'MARSHAL'
{completion_status=?COMPLETED_YES})
end;
marshal_locate_exception(Env, _Id, _Exception) ->
%% There is no way to define an exception for IIOP-1.0/1.1 in a
%% locate_reply.
cdr_encode:enc_message_error(Env).
|
use curl::easy::{Easy2, Handler, WriteError};
use core::time::Duration;
struct Collector(Vec<u8>);
impl Handler for Collector {
fn write(&mut self, data: &[u8]) -> Result<usize, WriteError> {
self.0.extend_from_slice(data);
Ok(data.len())
}
}
fn main() {
let interfaces = vec!("eth0","wlan0","wwan0");
for interface in interfaces{
let mut easy = Easy2::new(Collector(Vec::new()));
easy.url("http://www.google.com").unwrap();
easy.interface(interface).unwrap();
easy.timeout(Duration::from_secs(10)).unwrap();
match easy.perform() {
Ok(_) => println!("Interface {} connected", interface),
Err(_) => println!("Interface {} without connection", interface),
}
}
}
|
# coding: utf-8
import gym
import numpy as np
from cached_property import cached_property
class BaseEnv(gym.Env):
def __init__(
self,
dt,
dtype=np.float32,
name="BaseEnv"
):
super().__init__()
self.name = name
self.dt = dt
self.dtype = dtype
self._act_low = 0
self._act_high = 1
def step(self, action):
raise NotImplementedError()
def reset(self):
raise NotImplementedError()
@cached_property
def action_size(self):
return self.action_space.high.size
@cached_property
def observation_size(self):
return self.observation_space.high.size
@staticmethod
def generate_space(low, high):
high = np.array(high) if type(high) is list else high
low = np.array(low) if type(low) is list else low
return gym.spaces.Box(high=high, low=low)
def clip_action(self, action):
return np.clip(action, self._act_low, self._act_high)
|
import { Observable } from '../../internal/Observable';
import { shareReplay } from '../../internal/patching/operator/shareReplay';
// v4-backwards-compatibility
Observable.prototype.shareReplay_persisted = shareReplay;
Observable.prototype.shareReplay = function<T>(this: Observable<T>, count: number): Observable<T> {
return this.publishReplay(count).refCount();
};
declare module '../../internal/Observable' {
interface Observable<T> {
// v4-backwards-compatibility
shareReplay: typeof shareReplay;
shareReplay_persisted: typeof shareReplay;
}
}
|
package com.gapps.oneone.utils.views.dialog
import android.app.Dialog
import android.content.Context
import android.graphics.Color
import android.graphics.drawable.ColorDrawable
import android.text.InputType
import android.view.Window
import com.gapps.oneone.R
import com.gapps.oneone.utils.extensions.gone
import com.gapps.oneone.utils.extensions.visible
import kotlinx.android.synthetic.main.dialog_oo_sp_editor.*
import java.util.*
class SpEditorDialog(
context: Context,
private val key: String?,
private val type: String?,
private val value: String?,
private val ok: String?,
private val cancel: String?,
private val okAction: ((String?, String?, String?) -> Unit)? = null,
private val cancelAction: (() -> Unit)? = null
) : Dialog(context) {
init {
requestWindowFeature(Window.FEATURE_NO_TITLE)
setContentView(R.layout.dialog_oo_sp_editor)
window?.setBackgroundDrawable(ColorDrawable(Color.TRANSPARENT))
setCancelable(true)
initViews()
}
private fun initViews() {
editEntry(type)
dialog_title.apply {
if (key == null) {
gone()
} else {
text = key
}
}
ok_btn.apply {
if (ok == null) {
gone()
} else {
setOnClickListener {
onOkClick()
cancel()
}
text = ok
}
}
cancel_btn.apply {
if (cancel == null) {
gone()
} else {
setOnClickListener {
cancelAction?.invoke()
cancel()
}
text = cancel
}
}
}
private fun onOkClick() {
val value = when (type?.toLowerCase(Locale.ENGLISH)) {
"boolean" -> if (radio_true.isChecked) "true" else "false"
else -> text_editor?.text?.toString()
}
okAction?.invoke(type, key, value)
}
private fun editEntry(type: String?) {
type ?: return
when (type.toLowerCase(Locale.ENGLISH)) {
"string" -> editEntryAsString()
"long" -> editEntryAsLong()
"integer" -> editEntryAsInt()
"float" -> editEntryAsFloat()
"boolean" -> editEntryAsBoolean()
}
}
private fun editEntryAsFloat() {
boolean_editor.gone()
text_editor.apply {
visible()
inputType = InputType.TYPE_CLASS_NUMBER or InputType.TYPE_NUMBER_FLAG_DECIMAL or InputType.TYPE_NUMBER_FLAG_SIGNED
setText(value)
setSelection(value?.length ?: 0)
}
}
private fun editEntryAsBoolean() {
boolean_editor.visible()
text_editor.gone()
if (value?.toLowerCase(Locale.ENGLISH) == "true") {
boolean_editor.check(R.id.radio_true)
} else {
boolean_editor.check(R.id.radio_false)
}
}
private fun editEntryAsInt() {
boolean_editor.gone()
text_editor.apply {
visible()
inputType = InputType.TYPE_CLASS_NUMBER or InputType.TYPE_NUMBER_FLAG_SIGNED
setText(value)
setSelection(value?.length ?: 0)
}
}
private fun editEntryAsLong() {
boolean_editor.gone()
text_editor.apply {
visible()
inputType = InputType.TYPE_CLASS_NUMBER or InputType.TYPE_NUMBER_FLAG_SIGNED
setText(value)
setSelection(value?.length ?: 0)
}
}
private fun editEntryAsString() {
boolean_editor.gone()
text_editor.apply {
visible()
inputType = InputType.TYPE_CLASS_TEXT or InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS
setText(value)
setSelection(value?.length ?: 0)
}
}
} |
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.OracleClient;
using System.Linq;
using System.Web;
using VAdvantage.DataBase;
using VIS.DataContracts;
namespace VIS.Helpers
{
/// <summary>
/// help to fetch Data from dataServer and return json compatible class objects
/// </summary>
public class SqlHelper
{
/// <summary>
/// fetch Data from DataServer and create list of Jtable objects
/// </summary>
/// <param name="sqlIn">input parameter</param>
/// <returns>null if error , other wise list of JTable object </returns>
public List<JTable> ExecuteJDataSet(SqlParamsIn sqlIn)
{
try
{
List<JTable> outO = new List<JTable>();
JTable obj = null;
DataSet ds = null;
ds = ExecuteDataSet(sqlIn);
if (ds == null || ds.Tables.Count < 1)
return null;
// bool singleTable = ds.Tables.Count >= 0;
//StringBuilder tables = new StringBuilder("[");
// StringBuilder columns = new StringBuilder(!singleTable ? "[{" : "{");
int MAX_ROWS = 10001;
for (int table = 0; table < ds.Tables.Count; table++)
{
obj = new JTable();
var dt = ds.Tables[table];
obj.name = dt.TableName;
obj.records = ds.Tables[table].Rows.Count;
obj.page = sqlIn.page;
obj.total = 1;
for (int column = 0; column < dt.Columns.Count; column++)
{
var cc = new JColumn();
cc.index = column;
cc.name = dt.Columns[column].ColumnName.ToLower();
cc.type = dt.Columns[column].DataType.ToString().ToLower();
obj.columns.Add(cc);
}
int count = dt.Rows.Count;
for (int row = 0; row < count; row++)
{
if (row > MAX_ROWS)
{
break;
}
JRow r = new JRow();
r.id = row;
for (int column = 0; column < dt.Columns.Count; column++)
{
//var c = new Dictionary<string,object>();
//c[dt.Columns[column].ColumnName.ToLower()] = dt.Rows[row][column];
r.cells[dt.Columns[column].ColumnName.ToLower()] = dt.Rows[row][column];
//rows.Append(dt.Columns[column].ColumnName).Append(":").Append(dt.Rows[row][column]);
}
obj.rows.Add(r);
}
outO.Add(obj);
}
ds.Tables.Clear();
ds.Dispose();
return outO;
}
catch
{
return null;
}
}
public DataSet ExecuteDataSet(SqlParamsIn sqlIn)
{
if (String.IsNullOrEmpty(sqlIn.sql))
{
return null;
}
string sql = sqlIn.sql;
bool doPaging = sqlIn.pageSize > 0;
SqlParams[] paramIn = sqlIn.param == null ? null : sqlIn.param.ToArray();
Trx trxName = null;
string[] tables = sql.Split('~');
DataSet ds = new DataSet();
int i = 0;
foreach (string table in tables)
{
string tableName = "Table" + i;
DataSet dsTemp = null;
if (!doPaging)
{
dsTemp = VIS.DBase.DB.ExecuteDataset(table, paramIn, trxName);
if (dsTemp != null && dsTemp.Tables.Count > 0)
{
DataTable data = dsTemp.Tables[0];
dsTemp.Tables.Remove(data);
data.TableName = tableName;
ds.Tables.Add(data);
}
i++;
// ds = VAdvantage.DataBase.DB.SetUtcDateTime(ds);
}
else //Paging
{
ds = VIS.DBase.DB.ExecuteDatasetPaging(sql, sqlIn.page, sqlIn.pageSize);
}
}
if (ds == null || ds.Tables.Count < 1)
return null;
ds = VAdvantage.DataBase.DB.SetUtcDateTime(ds);
return ds;
}
public DataSet ExecuteDataSet(string sql)
{
//string trxName = null;
DataSet ds = null;
ds = VIS.DBase.DB.ExecuteDataset(sql, null);
if (ds == null || ds.Tables.Count < 1)
return null;
ds = VAdvantage.DataBase.DB.SetUtcDateTime(ds);
return ds;
}
public List<int> ExecuteNonQueries(string sql, List<List<SqlParams>> param)
{
List<int> result = new List<int>();
string[] queries = sql.Split('/');
for (int i = 0; i < queries.Count(); i++)
{
SqlParams[] paramIn = (param == null || param.Count < (i + 1) || param[i] == null) ? null : param[i].ToArray();
result.Add(VIS.DBase.DB.ExecuteQuery(queries[i], paramIn, null));
}
return result;
}
public int ExecuteNonQuery(SqlParamsIn sqlIn)
{
SqlParams[] paramIn = sqlIn.param == null ? null : sqlIn.param.ToArray();
return VIS.DBase.DB.ExecuteQuery(sqlIn.sql, paramIn, null);
}
}
} |
-- Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
-- and the EPL 1.0 (http://h2database.com/html/license.html).
-- Initial Developer: H2 Group
--
CREATE TABLE TEST(B1 VARBINARY, B2 BINARY VARYING, B3 BINARY, B4 RAW, B5 BYTEA, B6 LONG RAW, B7 LONGVARBINARY);
> ok
SELECT COLUMN_NAME, DATA_TYPE, TYPE_NAME, COLUMN_TYPE FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'TEST' ORDER BY ORDINAL_POSITION;
> COLUMN_NAME DATA_TYPE TYPE_NAME COLUMN_TYPE
> ----------- --------- --------- --------------
> B1 -3 VARBINARY VARBINARY
> B2 -3 VARBINARY BINARY VARYING
> B3 -3 VARBINARY BINARY
> B4 -3 VARBINARY RAW
> B5 -3 VARBINARY BYTEA
> B6 -3 VARBINARY LONG RAW
> B7 -3 VARBINARY LONGVARBINARY
> rows (ordered): 7
DROP TABLE TEST;
> ok
|
package com.sadik.hrmscf.entities.concretes;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
@Data
@Entity
@Table(name ="languages")
@JsonIgnoreProperties({"hibernateLazyInitializer", "handler", "curriculumVitae"})
@NoArgsConstructor
@AllArgsConstructor
public class Language {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name="id")
private int id ;
@Column(name="language")
private String language ;
@Column(name="level")
private String level ;
@ManyToOne
@JoinColumn(name = "curriculum_id")
private CurriculumVitae curriculumVitae;
}
|
package userprint
import (
"code.cloudfoundry.org/cli/cf/models"
"code.cloudfoundry.org/cli/plugin/models"
)
type pluginPrinter struct {
roles []models.Role
userLister func(spaceGUID string, role models.Role) ([]models.UserFields, error)
users userCollection
printer func([]userWithRoles)
}
type userCollection map[string]userWithRoles
type userWithRoles struct {
models.UserFields
Roles []models.Role
}
func NewOrgUsersPluginPrinter(
pluginModel *[]plugin_models.GetOrgUsers_Model,
userLister func(guid string, role models.Role) ([]models.UserFields, error),
roles []models.Role,
) *pluginPrinter {
return &pluginPrinter{
users: make(userCollection),
userLister: userLister,
roles: roles,
printer: func(users []userWithRoles) {
var orgUsers []plugin_models.GetOrgUsers_Model
for _, user := range users {
orgUsers = append(orgUsers, plugin_models.GetOrgUsers_Model{
Guid: user.GUID,
Username: user.Username,
IsAdmin: user.IsAdmin,
Roles: rolesToString(user.Roles),
})
}
*pluginModel = orgUsers
},
}
}
func NewSpaceUsersPluginPrinter(
pluginModel *[]plugin_models.GetSpaceUsers_Model,
userLister func(guid string, role models.Role) ([]models.UserFields, error),
roles []models.Role,
) *pluginPrinter {
return &pluginPrinter{
users: make(userCollection),
userLister: userLister,
roles: roles,
printer: func(users []userWithRoles) {
var spaceUsers []plugin_models.GetSpaceUsers_Model
for _, user := range users {
spaceUsers = append(spaceUsers, plugin_models.GetSpaceUsers_Model{
Guid: user.GUID,
Username: user.Username,
IsAdmin: user.IsAdmin,
Roles: rolesToString(user.Roles),
})
}
*pluginModel = spaceUsers
},
}
}
func (p *pluginPrinter) PrintUsers(guid string, username string) {
for _, role := range p.roles {
users, _ := p.userLister(guid, role)
for _, user := range users {
p.users.storeAppendingRole(role, user.Username, user.GUID, user.IsAdmin)
}
}
p.printer(p.users.all())
}
func (coll userCollection) storeAppendingRole(role models.Role, username string, guid string, isAdmin bool) {
u := coll[username]
u.Roles = append(u.Roles, role)
u.Username = username
u.GUID = guid
u.IsAdmin = isAdmin
coll[username] = u
}
func (coll userCollection) all() (output []userWithRoles) {
for _, u := range coll {
output = append(output, u)
}
return output
}
func rolesToString(roles []models.Role) []string {
var rolesStr []string
for _, role := range roles {
rolesStr = append(rolesStr, role.ToString())
}
return rolesStr
}
|
/**
* Created by Alicia on 18/04/2016.
*/
var patata = "cruda";
function cuece() {
console.log('la patata está ', patata);
var patata = 'cocida';
}
cuece(); |
<?php
/**
* curl库
*
* @license MIT
* @author espada [email protected]
*/
namespace Zd\upload\request;
class Curl
{
/**
* curl get
*
* @param string $url
* @param array $options
* @return mixed
*/
public static function get($url, $options = [])
{
$ch = \curl_init();
curl_setopt($ch, CURLOPT_TIMEOUT, 30); // 设置超时时间
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, TRUE);
if ($options) {
foreach ($options as $key => $value) {
curl_setopt($ch, $key, $value);
}
}
$result = curl_exec($ch);
curl_close($ch);
return $result;
}
/**
* curl post
*
* @param string $url
* @param array $options
* @return mixed
*/
public static function post($url, $data, $options = [])
{
$ch = curl_init();
curl_setopt($ch, CURLOPT_TIMEOUT, 30); // 设置超时时间
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, TRUE);
curl_setopt($ch, CURLOPT_POST, 1);
if ($options) {
foreach ($options as $key => $value) {
curl_setopt($ch, $key, $value);
}
}
curl_setopt($ch, CURLOPT_POSTFIELDS, $data);
$result = curl_exec($ch);
curl_close($ch);
return $result;
}
/**
* post json
*
* @param string $url
* @param string $data
* @param array $options
* @return object
*/
public static function postJson($url, $data, $options = [])
{
$result = self::post($url, $data, [
CURLOPT_HTTPHEADER => [
'Content-Type: application/json',
'Content-Length:' . strlen($data)
]
]);
return $result;
}
/**
* post xml
*
* @param string $url
* @param string $xml
* @return array
*/
public static function postXml($url, $xml, $options = [])
{
$options[CURLOPT_SSL_VERIFYPEER] = true;
$options[CURLOPT_SSL_VERIFYHOST] = 2;
$result = self::post($url, $xml, $options);
$result = json_decode(json_encode(simplexml_load_string($result, 'SimpleXMLElement', LIBXML_NOCDATA)), true);
return $result;
}
} |
using eScape.Core.Page;
using eScapeLLC.UWP.Charts;
using System;
using System.Diagnostics;
using System.Threading;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
using Yacc.Demo.VM;
namespace Yacc.Demo.Pages {
public sealed partial class Chart1 : BasicPage {
public override string PageTitle => "Demo Chart";
public int CurrentChildCount { get; set; }
Timer cctimer;
public Chart1() {
this.InitializeComponent();
cctimer = new Timer(Timer_Callback, this, 1000, 1000);
}
/// <summary>
/// Update the child count so the viewer can see how recycling levels out.
/// </summary>
/// <param name="ox"></param>
async void Timer_Callback(object ox) {
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => {
// the chart has one child, which is a Canvas
var child = VisualTreeHelper.GetChild(chart, 0);
// everything lives in the canvas on one level
CurrentChildCount = VisualTreeHelper.GetChildrenCount(child);
// tell x:Bind
Bindings.Update();
});
}
protected override object InitializeDataContext(NavigationEventArgs e) {
var vm = new ObservationsVM(Dispatcher, new[] {
new Observation("Group 1", -0.5, 1),
new Observation("Group 2", 3, 10),
new Observation("Group 3", 2, 5),
new Observation("Group 4", 3, -10),
new Observation("Group 5", 4, -3.75),
new Observation("Group 6", -5.25, 0.5)
});
return vm;
}
protected override void DataContextReleased(NavigatingCancelEventArgs ncea) {
base.DataContextReleased(ncea);
if (cctimer != null) {
try {
cctimer.Change(Timeout.Infinite, Timeout.Infinite);
cctimer.Dispose();
} finally {
cctimer = null;
}
}
}
private void Add_item_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e) {
(DataContext as ObservationsVM).AddTail();
}
private void Remove_head_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e) {
(DataContext as ObservationsVM).RemoveHead();
}
private void Add_and_remove_head_Click(object sender, Windows.UI.Xaml.RoutedEventArgs e) {
(DataContext as ObservationsVM).AddAndRemoveHead();
}
private void Remove_tail_Click(object sender, RoutedEventArgs e) {
(DataContext as ObservationsVM).RemoveTail();
}
private void Add_head_Click(object sender, RoutedEventArgs e) {
(DataContext as ObservationsVM).AddHead();
}
private void Chart_ChartError(Chart sender, ChartErrorEventArgs args) {
foreach(var ev in args.Results) {
Debug.WriteLine($"chart error {ev.Source}\t{String.Join(",", ev.MemberNames)}: {ev.ErrorMessage}");
}
}
}
}
|
import { Column, Entity, PrimaryColumn } from 'typeorm';
import { AbstractEntity } from 'shared/core';
import { Currency, EmploymentType, Level, Technology } from '../../domain/types';
@Entity('offers')
export class OfferEntity extends AbstractEntity {
@PrimaryColumn()
offer_id: string;
@Column()
title: string;
@Column()
description: string;
@Column()
technology: Technology;
@Column()
level: Level;
@Column()
employment_type: EmploymentType;
@Column()
city_name: string;
@Column()
street_name: string;
@Column()
price_min: number;
@Column()
price_max: number;
@Column()
currency: Currency;
@Column('decimal')
longitude: number;
@Column('decimal')
latitude: number;
@Column('text', { array: true })
must_have: string[];
@Column('text', { array: true })
nice_to_have: string[];
}
|
package indexer
import (
"encoding/csv"
"os"
"os/signal"
"github.com/sirupsen/logrus"
"gopkg.in/src-d/core-retrieval.v0/model"
"gopkg.in/src-d/core-retrieval.v0/repository"
"gopkg.in/src-d/go-kallax.v1"
)
// Index to the given output csv file all the processed repositories in
// the given store.
func Index(
store *model.RepositoryStore,
txer repository.RootedTransactioner,
outputFile string,
workers int,
limit uint64,
offset uint64,
list []string,
) {
f, err := createOutputFile(outputFile)
if err != nil {
logrus.WithField("file", outputFile).WithField("err", err).
Fatal("unable to create file")
}
defer f.Close()
w := csv.NewWriter(f)
if err := w.Write(csvHeader); err != nil {
logrus.WithField("err", err).Fatal("unable to write csv header")
}
w.Flush()
rs, total, err := getResultSet(store, limit, offset, list)
if err != nil {
logrus.WithField("err", err).Fatal("unable to get result set")
}
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
repos := processRepos(workers, txer, rs)
var processed int
for {
select {
case repo, ok := <-repos:
if !ok {
logrus.WithFields(logrus.Fields{
"processed": processed,
"failed": total - int64(processed),
"total": total,
}).Info("finished processing all repositories")
return
}
logrus.WithField("repo", repo.URL).Debug("writing record to CSV")
if err := w.Write(repo.toRecord()); err != nil {
logrus.WithFields(logrus.Fields{
"err": err,
"repo": repo.URL,
}).Fatal("unable to write csv record")
}
w.Flush()
processed++
case <-signals:
logrus.Warn("received an interrupt signal, stopping")
return
}
}
}
func createOutputFile(outputFile string) (*os.File, error) {
if _, err := os.Stat(outputFile); err != nil && !os.IsNotExist(err) {
return nil, err
} else if err == nil {
logrus.WithField("file", outputFile).Warn("file exists, it will be deleted")
if err := os.Remove(outputFile); err != nil {
return nil, err
}
}
return os.Create(outputFile)
}
func getResultSet(
store *model.RepositoryStore,
limit, offset uint64,
list []string,
) (*model.RepositoryResultSet, int64, error) {
query := model.NewRepositoryQuery().
FindByStatus(model.Fetched).
WithReferences(nil)
var repos = make([]interface{}, len(list))
for i, r := range list {
repos[i] = r
}
if len(list) > 0 {
query = query.Where(kallax.ArrayOverlap(
model.Schema.Repository.Endpoints,
repos...,
))
}
if limit > 0 {
query = query.Limit(limit)
}
if offset > 0 {
query = query.Offset(offset)
}
total, err := store.Count(query)
if err != nil {
return nil, 0, err
}
rs, err := store.Find(query.Order(kallax.Asc(model.Schema.Repository.ID)))
if err != nil {
return nil, 0, err
}
return rs, total, nil
}
|
package com.aws.dynamodb
object DynamoKeyAttribute extends Enumeration {
type Types = DynamoKeyAttribute.Value
val String: Types = Value("S")
val Number: Types = Value("N")
val Binary: Types = Value("B")
}
|
//9, 16 -> 9, 28 AllowLoadtime == true
package p;
class S {
public static S instance= new S();
int f(){
int v= instance.f();
return 0;
}
} |
package edu.learn.jpa.repos
import org.springframework.data.repository.CrudRepository
import org.springframework.data.repository.NoRepositoryBean
import org.springframework.data.repository.PagingAndSortingRepository
@NoRepositoryBean
interface GenericRepo<T, ID> : CrudRepository<T, ID>, PagingAndSortingRepository<T, ID>
|
package repl
import scala.collection.mutable.ListBuffer
class Repl {
private[this] val submissionHistory: ListBuffer[String] = ListBuffer()
private[this] var submissionHistroyIndex: Int = _
private[this] var done: Boolean = _
def evalateMetaCommand(text: String) = {
println(s"Invald command ${text}")
}
def evalateSubmission(text: String) = ???
def run(): Unit = {
while (true) {
val text: String = ""
if (text == null || text.isEmpty)
return
if (!text.contains('\n') && text.startsWith("#"))
evalateMetaCommand(text)
else
evalateSubmission(text)
submissionHistory += text
submissionHistroyIndex = 0
}
}
sealed class SubmissionView(val lineRenderer: ListBuffer[String],
val submissionDocument: ListBuffer[String]) {
var cursorTop: Int = _
var renderedLineCount: Int = _
var currentLine: Int = _
var currentCharacter: Int = _
def render(): Unit = {}
}
}
|
# frozen_string_literal: true
module Tianguis
class Product
attr_accessor :name, :state, :category, :kind
attr_reader :quality, :variant
def initialize
yield(self) if block_given?
end
def to_h
{
name: name,
quality: quality,
variant: variant.to_h,
state: state,
category: category,
kind: kind
}
end
def quality=(value)
@quality = calc_quality(value)
end
def variant=(value)
@variant = Variant.new(value)
end
private
def calc_quality(value)
case value.downcase
when /primera/
1
when /segunda/
2
when /tercera/
3
else
0
end
end
end
end
|
package me.anatoliy57.chunit.core;
import com.laytonsmith.core.Globals;
import com.laytonsmith.core.constructs.CNull;
import com.laytonsmith.core.natives.interfaces.Mixed;
import me.anatoliy57.chunit.util.ReflectionUtils;
import java.util.Map;
public class OriginalGlobals {
private static Map<String, Mixed> originalGlobals;
public static void InitOriginalGlobal() {
Map<String, Mixed> globalsMap = ReflectionUtils.GetGlobalsMap();
if(!(globalsMap instanceof ExtendGlobals)) {
originalGlobals = globalsMap;
}
}
public static void SetGlobal(String name, Mixed value) {
synchronized (Globals.class) {
if (value instanceof CNull) {
originalGlobals.remove(name);
} else {
originalGlobals.put(name, value);
}
}
}
public static Mixed GetGlobalConstruct(String name) {
synchronized (Globals.class) {
return originalGlobals.getOrDefault(name, CNull.NULL);
}
}
public static Map<String, Mixed> GetGlobals() {
return originalGlobals;
}
}
|
package khats
trait ApplicativeKlass[T[_[_], _]] extends ApplicativeK[T] with ApplyKlass[T] with PointedK[T] {
final def applicativek: ApplicativeK[T] = this
} |
1 31 63 116 129 158 200 210
2 26 41 49 97 216
3 8 77 84 107 112 121
4 48 91 139 180
5 98 119
6 4 105 117 149 188 193 195
7 34 173 219
8 181 197
9 75 142 183
10 154 155 156 193
11 29 205
12 7 67 92 99 131
13 58 108 114 115 191 209
14
15 1 174
16 73 91 198
17 171 173 223
18 109 126
19 72
20 9 159
21 23 212
22 124 156 214
23 61 75 112 162 205 214 220
24 177
25 44 149 192
26 31 42 64 80 177
27 95 142 154 220
28 175 198
29 80 100 115 146 209
30 10 67 157 222
31 2 15 203 209
32 91 136 155 202
33 53 66 171
34 6 101 159 169
35 83 107 159 190 207 224
36 84 145 211
37 42 76 131 159 168 209
38 21 22 57 96 169 218
39 27 104 127 179
40 35 41 98
41 83
42 3 136 171 177
43 131 141 206
44 21 36 56 95 108 178
45 13 64 90 142 215
46 8 74 89 102 197 204
47 27 184
48 5 24 40 86 117 224
49 111
50 29 69 84 131
51 14 69 220
52 41 42 121 170
53 5 11 213 216
54 160
55 158
56 23 53 64 92
57 54 103 161
58 7
59 3 27 81 148
60 7 16 154 195 197
61 41 73 148 171 203 209 218 223
62 34 80 98 116 227
63 15 21 31 36 90 142 158 202
64 34 54 219
65 80 158 196
66 2 6 9 36 72 87 207
67 10 26 74
68 130
69 228
70 79 101 227
71 21
72 52 189
73 14 37 68 83 98 174 207 210 218
74 93 108 140 143 192
75 20 92 97 136 153 162 176
76 12
77 89 170 177
78 67 83 169 196
79 11 35 65 108 114
80 55 133 151 212 218
81 90 112 189 222
82 54 88 206
83 3 50 63 209
84 141 177
85 155 177
86 32 116
87 118 120 152 158 214
88 7 50 194
89 102 200
90 51 54 98
91 94 139 175
92 29 97 202
93 40 177
94 59 62 67 151 195
95 28 48 90 160 222
96 29 39 115 123 187
97 66 119 125 127 202 225
98 91
99 46 98 136
100 3 45 58 67 139 145 186 229
101 46 65 69 93 99 112 190
102
103 72 131 189 199
104 179
105 17 111 147
106 30 42 98
107 57
108 7 33 48 51 124 200
109 85 160 203
110 24 96 158 222
111 18 104 123 130 140 162 214
112 87 94
113 52 58 135 226
114 18 62 75 87 163 193
115 140 200 225
116 33 52 89 126 204
117
118 74 132 159 211
119 7 80 113 139 212 229
120 38
121 27 56 117 131
122 39 45 52 93 194 195
123 9 42 151 171
124
125 64 103
126 15 31 175
127 109
128 107 122
129 29 116 224
130 80
131 74 161 181 194
132 24 40 96 144 176 193 228
133 39 41 143 153 226
134 24 50 181
135 23 37 48 71 122
136 39 51 69 199
137 6 28 188 216
138 8 162 188 192 223
139 22 111 195 205
140 31 59 102 141 157
141 116 122 131 149
142 10 34 44 103 114 186
143 95 175
144 2 166 187 215
145 19 133 165 171
146 36 70 116
147 14 54 72 142 166 208
148 137 138
149 108
150 25 215
151 30 37 44
152 95 106 138 139
153 13 83 111 174
154 11 176 199 202
155 89 196
156 19 134 145 166 186 205
157 173 209 210 222
158 2 91 198
159 44 112 120 167
160 77 103 178
161 117 155
162
163 3 20 79 128 160 186
164 48 104
165 12 68 221
166 24 63 161
167 77
168 17 133 196
169 48 129 138
170 139 160 194
171 6 16 34
172 15 24 160 186
173 49 97 106 188
174 54 133 154 155
175 155
176 66 94 127 201 230
177 51 117
178 25 63 94 103 204 229
179 49 80 81 97 133 141
180 10 202
181 137 151 154 199
182 53 59 66 79 109
183 12 48 55 89 103 225
184 41 153 190 214
185 140
186 11 20 70 188 197
187 181
188 1 20 64 144
189 112 157
190 54 84 103 201
191 136
192
193 64 111 184 185 219
194 86 211
195 161 173
196 6 88 209
197 5 163 205
198 45 106 107 113 221 223
199 36 149 210
200 20 60 64 100 123 142 149
201 70 71 117
202 28 64 80 116 157 162 204
203 32 139 163 184 199 218
204 34 139 225
205 29 96 160 165 199
206 57 100 133
207 74 162 164 203
208 133
209 114
210 70 135
211 60 64 91
212 46 184 192 204
213 24 73 92 186 188
214 6 25 230
215 96 115 122
216 190
217 176
218 74 79
219 50 130 211 213 225
220 70 101 114 210
221 6
222 56 139
223 31 37 115 170 174 193 217
224 87
225 163 166 184 187
226 63 161
227
228 67 96 108
229 217
230 70 141 183 |
export { default as S3Service } from './S3Service';
export { default as ElasticTranscoderService } from './ElasticTranscoderService';
|
import axios from "axios";
import dotenv from "dotenv";
import _ from "lodash";
import { getUrl } from "./query";
dotenv.config();
const { HOST } = process.env;
/**
* POST /admin/api/2020-07/fulfillment_orders/{fulfillment_order_id}/fulfillment_request.json
Sends a fulfillment request
POST /admin/api/2020-07/fulfillment_orders/{fulfillment_order_id}/fulfillment_request/accept.json
Accepts a fulfillment request
POST /admin/api/2020-07/fulfillment_orders/{fulfillment_order_id}/fulfillment_request/reject.json
Rejects a fulfillment request
*/
export const createFulfillmentRequest = async (
fulfillmentOrderId,
changeset
) => {
//changeset = {message: "Fulfill this ASAP please."};
const result = await axios.get(
`${getUrl()}/fulfillment_orders/${fulfillmentOrderId}/fulfillment_request.json`,
{
fulfillment_request: changeset,
}
);
return result.data.submitted_fulfillment_order;
};
export const acceptFulfillmentRequest = async (
fulfillmentOrderId,
changeset
) => {
//changeset = {message: "Ok to fulfill your request."};
const result = await axios.get(
`${getUrl()}/fulfillment_orders/${fulfillmentOrderId}/fulfillment_request/accept.json`,
{
fulfillment_request: changeset,
}
);
return result.data.fulfillment_order;
};
export const rejectFulfillmentRequest = async (
fulfillmentOrderId,
changeset
) => {
//changeset = {message: "Not OK to fulfill your request."};
const result = await axios.get(
`${getUrl()}/fulfillment_orders/${fulfillmentOrderId}/fulfillment_request/reject.json`,
{
fulfillment_request: changeset,
}
);
return result.data.fulfillment_order;
};
|
# PoET 1.0 Specification
# Introduction
<!--
Licensed under Creative Commons Attribution 4.0 International License
https://creativecommons.org/licenses/by/4.0/
-->
The Proof of Elapsed Time (PoET) Consensus method offers a solution to
the Byzantine Generals Problem that utilizes a "trusted execution
environment" to improve on the efficiency of present solutions such as
Proof-of-Work. The initial reference implementation of PoET released to
Hyperledger was written for an abstract TEE to keep it flexible to any
TEE implementation. This specification defines a concrete implementation
for SGX. The following presentation assumes the use of Intel SGX as the
trusted execution environment.
At a high-level, PoET stochastically elects individual peers to execute
requests at a given target rate. Individual peers sample an
exponentially distributed random variable and wait for an amount of time
dictated by the sample. The peer with the smallest sample wins the
election. Cheating is prevented through the use of a trusted execution
environment, identity verification and blacklisting based on asymmetric
key cryptography, and an additional set of election policies.
For the purpose of achieving distributed consensus efficiently, a good
lottery function has several characteristics:
> - Fairness: The function should distribute leader election across
> the broadest possible population of participants.
> - Investment: The cost of controlling the leader election process
> should be proportional to the value gained from it.
> - Verification: It should be relatively simple for all participants
> to verify that the leader was legitimately selected.
PoET is designed to achieve these goals using new secure CPU
instructions which are becoming widely available in consumer and
enterprise processors. PoET uses these features to ensure the safety and
randomness of the leader election process without requiring the costly
investment of power and specialized hardware inherent in most "proof"
algorithms.
Sawtooth includes an implementation which simulates the secure
instructions. This should make it easier for the community to work with
the software but also forgoes Byzantine fault tolerance.
PoET essentially works as follows:
1. Every validator requests a wait time from an enclave (a trusted
function).
2. The validator with the shortest wait time for a particular
transaction block is elected the leader.
3. One function, such as "CreateTimer", creates a timer for a
transaction block that is guaranteed to have been created by the
enclave.
4. Another function, such as "CheckTimer", verifies that the timer was
created by the enclave. If the timer has expired, this function
creates an attestation that can be used to verify that validator did
wait the allotted time before claiming the leadership role.
The PoET leader election algorithm meets the criteria for a good lottery
algorithm. It randomly distributes leadership election across the entire
population of validators with distribution that is similar to what is
provided by other lottery algorithms. The probability of election is
proportional to the resources contributed (in this case, resources are
general purpose processors with a trusted execution environment). An
attestation of execution provides information for verifying that the
certificate was created within the enclave (and that the validator
waited the allotted time). Further, the low cost of participation
increases the likelihood that the population of validators will be
large, increasing the robustness of the consensus algorithm.
# Definitions
The following terms are used throughout the PoET spec and are defined
here for reference.
Enclave
: A protected area in an application's address space which provides
confidentiality and integrity even in the presence of privileged
malware.
The term can also be used to refer to a specific enclave that has
been initialized with a specific code and data.
Basename
: A service provider base name. In our context the service provider
entity is the distributed ledger network. Each distinct network
should have its own Basename and Service Provider ID (see EPID and
IAS specifications).
EPID
: An anonymous credential system. See E. Brickell and Jiangtao Li:
"Enhanced Privacy ID from Bilinear Pairing for Hardware
Authentication and Attestation". IEEE International Conference on
Social Computing / IEEE International Conference on Privacy,
Security, Risk and Trust. 2010.
EPID Pseudonym
: Pseudonym of an SGX platform used in linkable quotes. It is part of
the IAS attestation response according to IAS API specifications. It
is computed as a function of the service Basename (validator network
in our case) and the device\'s EPID private key.
PPK, PSK
: PoET ECC public and private key created by the PoET enclave.
IAS Report Key
: IAS public key used to sign attestation reports as specified in the
current IAS API Guide.
PSEmanifest
: Platform Services Enclave manifest. It is part of an SGX quote for
enclaves using Platform Services like Trusted Time and Monotonic
Counters.
AEP
: Attestation evidence payload sent to IAS (see IAS API
specifications). Contains JSON encodings of the quote, an optional
PSEmanifest, and an optional nonce.
AVR
: Attestation verification report, the response to a quote attestation
request from the IAS. It is verified with the IAS Report Key. It
contains a copy of the input AEP.
$WaitCertId_{n}$
: The $n$-th or most recent WaitCertificate digest. We assume $n >= 0$
represents the current number of blocks in the ledger. WaitCertId is
a function of the contents of the Wait Certificate. For instance the
SHA256 digest of the WaitCertificate ECDSA signature.
OPK, OSK
: Originator ECDSA public and private key. These are the higher level
ECDSA keys a validator uses to sign messages.
OPKhash
: SHA256 digest of OPK
blockDigest
: ECDSA signature with OSK of SHA256 digest of transaction block that
the validator wants to commit.
localMean
: Estimated wait time local mean.
MCID
: SGX Monotonic Counter identifier.
SealKey
: The SGX enclave Seal Key. It is used by the SGX `sgx_seal_data()`
and `sgx_unseal_data()` functions.
PoetSealKey
: The Poet SGX enclave Seal Key. It must be obtained through the SGX
SDK `` `sgx_get_key() `` function passing a fixed 32 byte constant
as `key_id` argument.
PoET_MRENCLAVE
: Public MRENCLAVE (see SGX SDK documentation) value of valid PoET SGX
enclave.
$T_{WT}$
: WaitTimer timeout in seconds. A validator has at most $T_{WT}$
seconds to consume a WaitTimer, namely obtain a WaitCertificate on
it after the WaitTimer itself has expired.
$K$
: Number of blocks a validator can commit before having to sign-up
with a fresh PPK.
$c$
: The \"sign-up delay\", i.e., number of blocks a validator has to
wait after sign-up before starting to participate in elections.
minDuration
: Minimum duration for a WaitTimer.
# P2P PoET SGX Enclave Specifications
The P2P PoET SGX enclave uses the following data structures:
WaitTimer {
double requestTime
double duration
byte[32] WaitCertId:sub:`n`
double localMean
}
WaitCertificate {
WaitTimer waitTimer
byte[32] nonce
byte[] blockDigest
}
It uses the following global variables:
WaitTimer activeWT # The unique active WaitTimer object
byte[64] PPK
byte[64] PSK
MCID # SGX Monotonic Counter Identifier
It exports the following functions:
## `generateSignUpData(OPKhash)`
**Returns**
``` console
byte[64] PPK
byte[432] report # SGX Report Data Structure
byte[256] PSEmanifest
byte[672] sealedSignUpData # (PPK, PSK, MCID) tuple encrypted with SealKey
```
\***\*Parameters**\*\*
``` console
byte[32] OPKhash # SHA256 digest of OPK
```
**Description**
1. Generate fresh ECC key pair (PPK, PSK)
2. Create monotonic counter and save its identifier as MCID.
3. Use the SGX `sgx_seal_data()` function to encrypt (PPK, PSK, MCID)
with SealKey (using MRENCLAVE policy)
$sealedSignupData = \textnormal{AES-GCM}_{SealKey} (PPK | PSK | MCID)$
4. Create SGX enclave report, store `SHA256(OPKhash|PPK)` in
`report_data` field.
5. Get SGX PSE manifest: PSEManifest.
6. Save (PPK, PSK, MCID) as global variables within the enclave.
7. Set active WaitTimer instance activeWT to NULL.
8. Return (PPK, report, PSEmanifest, sealedSignUpData).
::: note
::: title
Note
:::
**Implementation Note:** Normally, there is a maximum number of
monotonic counters that can be created. One way to deal with this
limitation is to destroy a previously created monotonic counter if this
is not the first time the generateSignupData function was called.
:::
## `unsealSignUpData(sealedSignUpData)`
**Returns**
``` console
byte[64] PPK
```
**Parameters**
``` console
byte[672] sealedSignUpData # (PPK, PSK, MCID) tuple encrypted with SealKey
```
**Description**
1. Use the `sgx_unseal_data()` function to decrypt sealedSignUpData
into (PPK, PSK, MCID) with SealKey (using MRENCLAVE policy).
2. Save (PPK, PSK, MCID) as global variables within the enclave.
3. Set global active WaitTimer instance activeWT to NULL.
4. Return PPK
## `createWaitTimer(localMean, WaitCertId_n)`
**Returns**
``` console
WaitTimer waitTimer
byte[64] signature # ECDSA PSK signature of waitTimer
```
**Parameters**
``` console
double localMean # Estimated wait time local mean
byte[32] WaitCertId_n # SHA256 digest of WaitCertificate owner's ECDSA
# signature
```
**Description**
1. Increment monotonic counter MCID and store value in global variable
counterValue.
2. Compute
$tag = \textnormal{AES-CMAC}_{PoetSealKey} (WaitCertId_{n})$.
3. Convert lowest 64-bits of tag into double precision number in
$[0, 1]$: tagd.
4. Compute $duration = minimumDuration - localMean * log(tagd)$.
5. Set requestTime equal to SGX Trusted Time value.
6. Create WaitTimer object
$waitTimer = WaitTimer(requestTime, duration,
WaitCertId_{n}, localMean)$.
7. Compute ECDSA signature of waitTimer using PSK: $signature =
ECDSA_{PSK} (waitTimer)$.
8. Set global active WaitTimer instance activeWT equal to waitTimer.
9. Return (waitTimer, signature).
## `createWaitCertificate(blockDigest)`
**Returns**
``` console
WaitCertificate waitCertificate
byte[64] signature # ECDSA PSK signature of waitCertificate
```
**Parameters**
``` console
byte[] blockDigest # ECDSA signature with originator private key of SHA256
# digest of transaction block that the validator wants
# to commit
```
**Description**
1. If activeWT is equal to NULL, exit.
2. Read monotonic counter MCID and compare its value to global variable
counterValue. If values do not match, exit.
3. Read SGX Trusted time into variable currentTime. If currentTime is
smaller than $waitTimer.requestTime + waitTimer.duration$, exit (the
duration has not elapsed yet).
4. If currentTime is larger than $waitTimer.requestTime +
waitTimer.duration+T_{WT}$, exit.
5. Generate random nonce.
6. Create WaitCertificate object $waitCertificate =
WaitCertificate(waitTimer, nonce, blockDigest)$.
7. Compute ECDSA signature of waitCertificate using PSK: $signature =
ECDSA_{PSK} (waitCertificate)$.
8. Set activeWT to NULL.
9. Return (waitCertificate, signature).
### Sign-up Phase
A participant joins as a validator by downloading the PoET SGX enclave
and a SPID certificate for the blockchain. The client side of the
validator runs the following sign-up procedure:
1. Start PoET SGX enclave: ENC.
2. Generate sign-up data:
$(PPK, report, PSEmanifest, sealedSignUpData) =
\textnormal{ENC.generateSignUpData(OPKhash)}$ The `report_data` (512
bits) field in the report body includes the SHA256 digest of
(OPKhash \| PPK).
3. Ask SGX Quoting Enclave (QE) for linkable quote on the report (using
the validator network\'s Basename).
4. If Self Attestation is enabled in IAS API: request attestation of
linkable quote and PSE manifest to IAS. The AEP sent to IAS must
contain:
- isvEnclaveQuote: base64 encoded quote
- pseManifest: base64 encoded PSEmanifest
- nonce: $WaitCertId_{n}$
The IAS sends back a signed AVR containing a copy of the input AEP
and the EPID Pseudonym.
5. If Self Attestation is enabled in IAS API: broadcast self-attested
join request, (OPK, PPK, AEP, AVR) to known participants.
6. If Self Attestation is NOT enabled in IAS API: broadcast join
request, (OPK, PPK, quote, PSEmanifest) to known participants.
A validator has to wait for $c$ block to be published on the distributed
ledger before participating in an election.
The server side of the validator runs the following sign-up procedure:
1. Wait for a join request.
2. Upon arrival of a join request do the verification:
If the join request is self attested (Self Attestation is enabled in
IAS API): (OPK, PPK, AEP, AVR)
a. Verify AVR legitimacy using IAS Report Key and therefore quote
legitimacy.
b. Verify the `report_data` field within the quote contains the
SHA256 digest of (OPKhash \| PPK).
c. Verify the nonce in the AVR is equal to $WaitCertId_{n}$, namely
the digest of the most recently committed block. It may be that
the sender has not seen $WaitCertId_{n}$ yet and could be
sending $WaitCertId_{n'}$ where $n'<n$. In this case the sender
should be urged to updated his/her view of the ledger by
appending the new blocks and retry. It could also happen that
the receiving validator has not seen $WaitCertId_{n}$ in which
case he/she should try to update his/her view of the ledger and
verify again.
d. Verify MRENCLAVE value within quote is equal to PoET_MRENCLAVE
(there could be more than one allowed value).
e. Verify PSE Manifest SHA256 digest in AVR is equal to SHA256
digest of PSEmanifest in AEP.
f. Verify basename in the quote is equal to distributed ledger
Basename.
g. Verify attributes field in the quote has the allowed value
(normally the enclave must be in initialized state and not be a
debug enclave).
If the join request is not self attested (Self Attestation is NOT
enabled in IAS API): (OPK, PPK, quote, PSEmanifest)
a. Create AEP with quote and PSEmanifest :
- isvEnclaveQuote: base64 encoded quote
- pseManifest: base64 encoded PSEmanifest
b. Send AEP to IAS. The IAS sends back a signed AVR.
c. Verify received AVR attests to validity of both quote and
PSEmanifest and save EPID Pseudonym.
d. Verify `report_data` field within the quote contains the SHA256
digest of (OPKhash \| PPK).
e. Verify MRENCLAVE value within quote is equal to PoET_MRENCLAVE
(there could be more than one allowed value).
f. Verify basename in the quote is equal to distributed ledger
Basename.
g. Verify attributes field in the quote has the allowed value
(normally the enclave must be in initialized state and not be a
debug enclave).
If the verification fails, exit.
If the verification succeeds but the SGX platform identified by the
EPID Pseudonym in the quote has already signed up, ignore the join
request, exit.
If the verification succeeds:
a. Pass sign-up certificate of new participant (OPK, EPID
Pseudonym, PPK, current $WaitCertId_{n}$ to upper layers for
registration in EndPoint registry.
b. Goto 1.
### Election Phase
Assume the identifier of the most recent valid block is
$WaitCertId_{n}$. Broadcast messages are signed by a validator with
his/her PPK. To participate in the election phase a validator runs the
following procedure on the client side:
1. Start the PoET SGX enclave: ENC.
2. Read the sealedSignUpData from disk and load it into enclave:
$ENC.\textnormal{unsealSignUpData}(sealedSignUpData)$
3. Call $(waitTimer, signature) = ENC.createWaitTimer(localMean,
WaitCertId_{n})$.
4. Wait waitTimer.duration seconds.
5. Call $(waitCertificate, signature) =
ENC.createWaitCertificate(blockDigest)$.
6. If the `createWaitCertificate()` call is successful, broadcast
(waitCertificate, signature, block, OPK, PPK) where block is the
transaction block identified by blockDigest.
On the server side a validator waits for incoming (waitCertificate,
signature, block, OPK, PPK) tuples. When one is received the following
validity checks are performed:
1. Verify the PPK and OPK belong to a registered validator by checking
the EndPoint registry.
2. Verify the signature is valid using sender\'s PPK.
3. Verify the PPK was used by sender to commit less than $K$ blocks by
checking EndPoint registry (otherwise sender needs to re-sign).
4. Verify the waitCertificate.waitTimer.localMean is correct by
comparing against localMean computed locally.
5. Verify the waitCertificate.blockDigest is a valid ECDSA signature of
the SHA256 hash of block using OPK.
6. Verify the sender has been winning elections according to the
expected distribution (see z-test documentation).
7. Verify the sender signed up at least $c$ committed blocks ago, i.e.,
respected the $c$ block start-up delay.
A valid waitCertificate is passed to the upper ledger layer and the
waitCertificate with the lowest value of
waitCertificate.waitTimer.duration determines the election winner.
### Revocation
Two mechanisms are put in place to blacklist validators whose EPID key
has been revoked by IAS. The first one affects each validator
periodically, although infrequently. The second one is an asynchronous
revocation check that each validator could perform on other validators\'
EPID keys at any time.
1. **Periodic regeneration of PPK** a validator whose EPID key has been
revoked by the IAS would not be able to obtain any valid AVR and
therefore would be prevented from signing-up. Forcing validators to
periodically re-sign with a fresh sign-up certificate leaves
validators whose EPID keys have been revoked out of the system.
Validators have to re-sign after they commit $K$ blocks and if they
do not they are considered revoked.
2. **Asynchronous sign-up quote verification** A validator can (at any
time) ask IAS for attestation on a quote that another validator used
to sign-up to check if his/her EPID key has been revoked since. If
so the returned AVR will indicate that the key is revoked. A
validator who obtains such an AVR from IAS can broadcast it in a
blacklisting transaction, so that all the validators can check the
veracity of the AVR and proceed with the blacklisting. To limit the
use of blacklisting transactions as a means to thwart liveness for
malicious validators one can control the rate at which they can be
committed in different ways:
- A certain number of participation tokens needs to be burned to
commit a blacklisting transaction.
- A validator can commit a blacklisting transaction only once
he/she wins one or more elections.
- A validator who commits a certain number of non-legit
blacklisting transactions is blacklisted.
### Security Considerations
1. $T_{WT}$ **motivation**: A validator has at most $T_{WT}$ seconds to
consume a WaitTimer, namely obtain a WaitCertificate on it after the
WaitTimer itself has expired. This constraint is enforced to avoid
that in case there are no transactions to build a block for some
time several validators might hold back after they waited the
duration of their WaitTimers and generate the WaitCertificate only
once enough transactions are available. At the point they will all
send out their WaitCertificates generating a lot of traffic and
possibly inducing forks. The timeout mitigates this problem.
2. **Enclave compromise:** a compromised SGX platform that is able to
arbitrarily win elections cannot affect the correctness of the
system, but can hinder progress by publishing void transactions.
This problem is mitigated by limiting the frequency with which a
validator (identified by his/her PPK) can win elections in a given
time frame (see z-test documentation).
3. **WaitTimer duration manipulation:**
a. Imposing a $c$ block participation delay after sign-up prevents
validators from generating different pairs of OPK, PPK and pick
the one that would result in the lowest value of the next
WaitTimer duration as follows:
i. Generate as many PPK,PSK pairs and therefore monotonic
counters as possible.
ii. Do not sign up but use all the enclaves (each using a
different PPK, PSK and MCID) to create a WaitTimer every
time a new block is committed until a very low duration is
obtained (good chance of winning the election). Then collect
all the different waitCertIds.
iii. Ask each enclave to create the next waitTimer, whose
duration depends on each of the different winning
waitCertIds. Choose the PPK of the enclave giving me the
lowest next duration and sign up with that.
iv. As a result an attacker can win the first the election (with
high probability) and can chain the above 3 steps to get a
good chance of winning several elections in a row.
b. The nonce field in WaitCertificate is set to a random value so
that a validator does not have control over the resulting
$WaitCertId_{n}$. A validator winning an election could
otherwise try different blockDigest input values to
createWaitCertificate and broadcast the WaitCertificate whose
$WaitCertId_{n}$ results in the lowest duration of his/her next
WaitTimer.
c. The call `createWaitTimer()` in step 1 of the election phase
(client side) is bound to the subsequent call to
`createWaitCertificate()` by the internal state of the PoET
enclave. More precisely only one call to
`createWaitCertificate()` is allowed after a call to
`createWaitTimer()` (and the duration has elapsed) as the value
of the global active WaitTimer object activeWT is set to null at
the end of `createWaitCertificate()` so that subsequent calls
would fail. Therefore only one transaction block (identified by
the input parameter blockDigest) can be attached to a
WaitCertificate object. This prevents a malicious user from
creating multiple WaitCertificates (each with a different nonce)
resulting in different WaitCertId digests without re-creating a
WaitTimer (and waiting for its duration) each time. It follows
that as long as the duration of WaitTimer is not too small a
malicious validator who wins the current election has very
limited control over the duration of his/hers next WaitTimer.
d. The check on the Monotonic Counter value guarantees only one
enclave instance can obtain a WaitCertificate after the
WaitTimer duration elapses. This again prevents a malicious user
from running multiple instances of the enclave to create
multiple WaitCertificates (each with a different nonce)
resulting in different WaitCertId digests and selecting the one
that would result in the lowest duration for a new WaitTimer.
e. A monotonic counter with id MCID is created at the same time PPK
and PSK are generated and the triple (MCID, PPK, PSK) is
encrypted using AES-GCM with the Seal Key and saved in permanent
storage. A malicious validator cannot run multiple enclave
instances (before signing up) to create multiple monotonic
counters without being forced to commit to using only one
eventually. As a monotonic counter is bound to PPK, PSK through
the AES-GCM encryption with the Seal Key, when a validator
signs-up with a PPK it automatically commits to using the
monotonic counter that was created along with PPK, PSK.
4. **Sign-up AEP replay:** the use of the nonce field in the AEP, which
is set equal to $WaitCertId_{n}$, is used to prevent the replay of
old AEPs.
### Comments on Multi-user or Multi-ledger SGX Enclave Service
It is possible to use the same enclave for multiple users or ledgers by
providing username and ledgername as input parameters to
`generateSignUpData()` and `unsealSignUpData()`. Then the sign-up tuple
(username, ledgername, PPK, PSK, MCID) is sealed to disk, with username
and ledgername used to generate the filename. Anytime a user
authenticates to the service the latter can have the enclave unseal and
use the sign-up tuple from the file corresponding to that user (and
ledger).
# Population Size and Local Mean Computation
**Parameters**:
1. targetWaitTime: the desired average wait time. This depends on the
network diameter and is selected to minimize the probability of a
collision.
2. initialWaitTime: the initial wait time used in the bootstrapping
phase until the ledger contains sampleLength blocks.
3. sampleLength: number of blocks that need to be on the ledger to
finish the bootstrapping phase
4. minimumWaitTime: a lower bound on the wait time.
The population size is computed as follows:
1. $sumMeans = 0$
2. $sumWaits = 0$
3. | **foreach** wait certificate $wc$ stored on the ledger:
| $sumWaits += wc\textrm{.duration}-\textrm{minimumWaitTime}$
| $sumMeans += wc\textrm{.localMean}$
4. $populationSize = sumMeans / sumWaits$
Assuming $b$ is the number of blocks currently claimed, the local mean
is computed as follows:
1. ```
| if $b < \textrm{sampleLength}$ then
| $ratio = 1.0\cdot b / \textrm{sampleLength}$ and
| $\textrm{localMean} = \textrm{targetWaitTime}\cdot (1 - ratio^2) + \textrm{initialWaitTime}\cdot ratio^2$.
```
2. else $\textrm{localMean}= \textrm{targetWaitTime}\cdot
\textrm{populationSize}$
# z-test
A z-test is used to test the hypothesis that a validator won elections
at a higher average rate than expected.
**Parameters**:
1. zmax: test value, it measures the maximum deviation from the
expected mean. It is selected so that the desired confidence
interval \$alpha\$ is obtained. Example configurations are:
a. $\textrm{ztest}=1.645 \leftrightarrow \alpha=0.05$
b. $\textrm{ztest}=2.325 \rightarrow \alpha=0.01$
c. $\textrm{ztest}=2.575 \rightarrow \alpha=0.005$
d. $\textrm{ztest}=3.075 \rightarrow \alpha=0.001$
2. testValidatorId: the validator identifier under test.
3. blockArray: an array containing pairs of validator identifier and
estimated population size. Each pair represents one published
transaction block.
4. minObservedWins: minimum number of election wins that needs to be
observed for the identifier under test.
The z-test is computed as follows:
observedWins = expectedWins = blockCount = 0
foreach block = (validatorId, populationEstimate) in blockArray:
blockCount += 1
expectedWins += 1 / populationEstimate
if validatorId is equal to testValidatorId:
observedWins += 1
if observedWins > minObservedWins and observedWins > expectedWins:
p = expectedWins / blockCount
σ = sqrt(blockCount * p * (1.0 - p))
z = (observedWins - expectedWins) / σ
if z > zmax:
return False
return True
If the z-test fails (False is returned) then the validator under test
won elections at a higher average rate than expected.
|
# Build example for an express, reactjs env
> A example for client and server environment for ReactJS in es6
>(gulp, webpack, react, flux, bootstrap 4, sass, babel)
### Install
```
cd client
npm install
```
> Run for dev ([localhost:8080](http://localhost:8080)) :
```
gulp
```
> For final build
```
gulp build
``` |
#include <brisbane/brisbane_openmp.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct {
__global float * Z;
float A;
__global float * X;
__global float * Y;
} brisbane_openmp_saxpy_args;
brisbane_openmp_saxpy_args saxpy_args;
static int brisbane_openmp_saxpy_setarg(int idx, size_t size, void* value) {
switch (idx) {
case 1: memcpy(&saxpy_args.A, value, size); break;
default: return BRISBANE_ERR;
}
return BRISBANE_OK;
}
static int brisbane_openmp_saxpy_setmem(int idx, void* mem) {
switch (idx) {
case 0: saxpy_args.Z = (__global float *__restrict) mem; break;
case 2: saxpy_args.X = (__global float *__restrict) mem; break;
case 3: saxpy_args.Y = (__global float *__restrict) mem; break;
default: return BRISBANE_ERR;
}
return BRISBANE_OK;
}
#include "kernel.openmp.h"
int brisbane_openmp_kernel(const char* name) {
brisbane_openmp_lock();
if (strcmp(name, "saxpy") == 0) {
brisbane_openmp_kernel_idx = 0;
return BRISBANE_OK;
}
return BRISBANE_ERR;
}
int brisbane_openmp_setarg(int idx, size_t size, void* value) {
switch (brisbane_openmp_kernel_idx) {
case 0: return brisbane_openmp_saxpy_setarg(idx, size, value);
}
return BRISBANE_ERR;
}
int brisbane_openmp_setmem(int idx, void* mem) {
switch (brisbane_openmp_kernel_idx) {
case 0: return brisbane_openmp_saxpy_setmem(idx, mem);
}
return BRISBANE_ERR;
}
int brisbane_openmp_launch(int dim, size_t off, size_t ndr) {
switch (brisbane_openmp_kernel_idx) {
case 0: saxpy(saxpy_args.Z, saxpy_args.A, saxpy_args.X, saxpy_args.Y, off, ndr); break;
}
brisbane_openmp_unlock();
return BRISBANE_OK;
}
#ifdef __cplusplus
} /* end of extern "C" */
#endif
|
package com.vvt.events;
public enum FxRecipientType {
TO(0),
CC(1),
BCC(2);
private int mNumber;
FxRecipientType(int number){
this.mNumber = number;
}
public int getNumber() {
return this.mNumber;
}
} |
class FormAttachment < ActiveRecord::Base
mount_uploader :file, AttachmentUploader
belongs_to :form
end
|
require 'spec_helper'
require 'fix_db_schema_conflicts/autocorrect_configuration'
RSpec.describe FixDbSchemaConflicts::AutocorrectConfiguration do
subject(:autocorrect_config) { described_class }
it 'for versions 0.79.0 and above' do
installed_rubocop(version: '0.79.0')
expect(autocorrect_config.load).to eq('.rubocop_schema.79.yml')
end
def installed_rubocop(version:)
allow(Gem).to receive_message_chain(:loaded_specs, :[], :version)
.and_return(Gem::Version.new(version))
end
end
|
; Tests for the Funnel Shift intrinsics fshl/fshr(a, b, c) which
; - concatenate 'a' and 'b'
; - shift the result by an amount 'c' (left or right)
; - return the top or bottom half of result (depending on direction)
;
; This file consists of a sequence of tests of the form
; ```
; T = llvm_fshl(a, b, c);
; C = T != r;
; if C {
; klee_abort();
; }
; ```
; where the constants a, b, c and r are copied from the constants
; used in the LLVM testfile llvm/test/Analysis/ConstantFolding/funnel-shift.ll
; REQUIRES: geq-llvm-7.0
; RUN: %llvmas %s -o=%t.bc
; RUN: rm -rf %t.klee-out
; RUN: %klee -exit-on-error --output-dir=%t.klee-out --optimize=false %t.bc
target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
target triple = "x86_64-pc-linux-gnu"
declare i32 @llvm.fshl.i32(i32, i32, i32)
declare i32 @llvm.fshr.i32(i32, i32, i32)
declare i7 @llvm.fshl.i7(i7, i7, i7)
declare i7 @llvm.fshr.i7(i7, i7, i7)
declare void @klee_abort()
; Function Attrs: noinline nounwind optnone uwtable
define dso_local i32 @main() #0 {
; T1: extract(concat(0x12345678, 0xABCDEF01) << 5) = 0x468ACF15
%T1 = call i32 @llvm.fshl.i32(i32 305419896, i32 2882400001, i32 5)
%C1 = icmp ne i32 %T1, 1183502101
br i1 %C1, label %FAIL1, label %OK1
FAIL1:
call void @klee_abort()
unreachable
OK1:
; T2: extract(concat(0x12345678, 0xABCDEF01) >> 5) = 0xC55E6F78
; Try an oversized shift (37) to test modulo functionality.
%T2 = call i32 @llvm.fshr.i32(i32 305419896, i32 2882400001, i32 37)
%C2 = icmp ne i32 %T2, 3311300472
br i1 %C2, label %FAIL2, label %OK2
FAIL2:
call void @klee_abort()
unreachable
OK2:
; T3: extract(concat(0b1110000, 0b1111111) << 2) = 0b1000011
; Use a weird type.
; Try an oversized shift (9) to test modulo functionality.
%T3 = call i7 @llvm.fshl.i7(i7 112, i7 127, i7 9)
%C3 = icmp ne i7 %T3, 67
br i1 %C3, label %FAIL3, label %OK3
FAIL3:
call void @klee_abort()
unreachable
OK3:
; T4: extract(concat(0b1110000, 0b1111111) >> 2) = 0b0011111
; Try an oversized shift (16) to test modulo functionality.
%T4 = call i7 @llvm.fshr.i7(i7 112, i7 127, i7 16)
%C4 = icmp ne i7 %T4, 31
br i1 %C4, label %FAIL4, label %OK4
FAIL4:
call void @klee_abort()
unreachable
OK4:
ret i32 0
}
|
from ctypes import c_int, c_char
from warnings import warn
import openmc.exceptions as exc
from . import _dll
def _error_handler(err, func, args):
"""Raise exception according to error code."""
# Get error code corresponding to global constant.
def errcode(s):
return c_int.in_dll(_dll, s).value
# Get error message set by OpenMC library
errmsg = (c_char*256).in_dll(_dll, 'openmc_err_msg')
msg = errmsg.value.decode()
# Raise exception type corresponding to error code
if err == errcode('OPENMC_E_ALLOCATE'):
raise exc.AllocationError(msg)
elif err == errcode('OPENMC_E_OUT_OF_BOUNDS'):
raise exc.OutOfBoundsError(msg)
elif err == errcode('OPENMC_E_INVALID_ARGUMENT'):
raise exc.InvalidArgumentError(msg)
elif err == errcode('OPENMC_E_INVALID_TYPE'):
raise exc.InvalidTypeError(msg)
if err == errcode('OPENMC_E_INVALID_ID'):
raise exc.InvalidIDError(msg)
elif err == errcode('OPENMC_E_GEOMETRY'):
raise exc.GeometryError(msg)
elif err == errcode('OPENMC_E_DATA'):
raise exc.DataError(msg)
elif err == errcode('OPENMC_E_PHYSICS'):
raise exc.PhysicsError(msg)
elif err == errcode('OPENMC_E_WARNING'):
warn(msg)
elif err < 0:
if not msg:
msg = "Unknown error encountered (code {}).".format(err)
raise exc.OpenMCError(msg)
|
#!/bin/sh
PACKAGE_VERSION=$(cat package.json | jq -r .version)
GITHUB_ENDPOINT="${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/git/ref/tags/v${PACKAGE_VERSION}"
GITHUB_ENDPOINT_STATUS=$(
curl \
-LI $GITHUB_ENDPOINT \
-o /dev/null \
-w '%{http_code}\n' \
-sH "Authorization: token ${GITHUB_TOKEN}" \
)
if [ "$GITHUB_ENDPOINT_STATUS" = "404" ]
then
echo "This version doesn't exist"
elif [ "$GITHUB_ENDPOINT_STATUS" = "200" ]
then
echo "This version already exists"
exit 1
else
echo "There was a problem connecting to GitHub"
exit 1
fi
|
using System;
namespace Lesson_2
{
class Program
{
static void Main(string[] args)
{
Console.WriteLine("====Workers-1====");
Workers workers1 = new Workers("Micheal","Scott",123456,"Manager");
workers1.workersInfo();
Console.WriteLine("====Workers-2====");
Workers workers2 = new Workers();
workers2.Name = "Dwight";
workers2.LastName = "Schrute";
workers2.Id = 654321;
workers2.Department = "Sales Manager";
workers2.workersInfo();
}
}
class Workers
{
public string Name ;
public string LastName ;
public int Id ;
public string Department ;
public Workers(string name, string lastName, int id, string department)
{
Name = name;
LastName = lastName;
Id = id;
Department = department;
}
public Workers(){}
public void workersInfo()
{
Console.WriteLine("Worker's Name : {0}" , Name);
Console.WriteLine("Worker's Lastname : {0}" , LastName);
Console.WriteLine("Worker's Id : {0}" , Id);
Console.WriteLine("Worker's Department : {0}" , Department);
}
}
}
|
<?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sylius\Bundle\OrderBundle\Form\Type;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\OptionsResolver\OptionsResolverInterface;
use Sylius\Component\Order\Model\OrderInterface;
/**
* Order state choice type.
*
* @author Aram Alipoor <[email protected]>
*/
class OrderStateChoiceType extends AbstractType
{
public function setDefaultOptions(OptionsResolverInterface $resolver)
{
$resolver->setDefaults(array(
'choices' => array(
OrderInterface::STATE_CART => 'sylius.order.state.checkout',
OrderInterface::STATE_CART_LOCKED => 'sylius.order.state.cart_locked',
OrderInterface::STATE_PENDING => 'sylius.order.state.ordered',
OrderInterface::STATE_CONFIRMED => 'sylius.order.state.order_confimed',
OrderInterface::STATE_SHIPPED => 'sylius.order.state.shipped',
OrderInterface::STATE_ABANDONED => 'sylius.order.state.abandoned',
OrderInterface::STATE_CANCELLED => 'sylius.order.state.cancelled',
OrderInterface::STATE_RETURNED => 'sylius.order.state.returned',
)
));
}
/**
* {@inheritdoc}
*/
public function getParent()
{
return 'choice';
}
/**
* {@inheritdoc}
*/
public function getName()
{
return 'sylius_order_state_choice';
}
}
|
/*
* @Author: wangli
* @Date: 2020-07-19 13:23:29
* @LastEditors: 王利
* @LastEditTime: 2020-12-09 10:32:04
*/
import Layout from "@/layout";
const sysRouter = {
path: "/sys",
component: Layout,
redirect: "/sys/dictionary",
name: "SYS",
meta: {
title: "系统管理",
icon: "table"
},
children: [
{
path: "dictionary",
component: () => import("@SYS/pages/dictionary"),
name: "Dictionary",
meta: { title: "数据字典", keepAlive: true }
}
]
};
export default sysRouter;
|
<?hh // strict
// Copyright 2004-present Facebook. All Rights Reserved.
class C<T as arraykey> {}
function f(mixed $x): void {
if ($x is C<_>) {
// We expect $x to be C<T#1> where T#1 has the same bounds
// as T in C's declaration
hh_show($x);
}
}
|
/**
* The failsafe route that catches everything.
*
* This should always been last in a routes array.
*/
'use strict';
// Import dependencies
const chalk = require('chalk');
const Route = require('../Route.js');
const Printable = require('../Printable');
// Define the route options.
const route = {
name: 'no-route',
test: () => true,
run: () => [
new Printable('error', 'Command not found!'),
new Printable('raw', `It's 'Levi-O-sa', not 'Levio-sA'. Try '${chalk.bold('fidelius -h')}' for help.\n`),
],
};
// Export the route.
module.exports = new Route(route);
|
// The entry file of your WebAssembly module.
import { Console, CommandLine } from "as-wasi";
export function _start(): void {
// Parse command line arguments
let commandLine = new CommandLine();
let args: Array<string> = commandLine.all();
if (args.length < 2) {
Console.log('Please pass an argument to echo');
return;
}
Console.log(args[1]);
}
|
# Wrapping a primitive
* wrap a primitive when I want to use the value as an object
* to put into a collection
* wrapping a value
```java
int i = 288;
Integer iWrap = new Integer(i);
```
* unwrapping a value
```java
int unWrapped = iWrap.intValue();
```
* Autoboxing from Java5.0
```java
ArrayList<Integer> listOfNumbers = new ArrayList<>();
listOfNumbers.add(3); // autoboxing
int num = listOfNumbers.get(0); // unboxing
```
* For all the cases below, one can use areference to a wrapper, or a primitive of the matching type
* Method arguments
* `void takeNumber(Integer i)` or `void takeNumber(int i)`
* return values
* `int giveNumber() {return x;}` or `Integer giveNumber() {return x;}`
* Operations on numbers
* `Integer i = new Integer(3);i++;` `Integer k = i + 3;`
* assignments
* `Double d = x;`
### wrapper methods
* Wrapper classes have static utility methods
```java
int x = Integer.parseInt("2");
int x = Integer.parseInt("two"); // NumberFormatException!!!
double d = Double.parseDouble("42.24");
boolean b = Boolean.parseBoolean("True"); // ignores the cases of the characters
```
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MEDIA_ADAPTER_H
#define MEDIA_ADAPTER_H
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MetaData.h>
#include <utils/threads.h>
namespace android {
// Convert the MediaMuxer's push model into MPEG4Writer's pull model.
// Used only by the MediaMuxer for now.
struct MediaAdapter : public MediaSource, public MediaBufferObserver {
public:
// MetaData is used to set the format and returned at getFormat.
MediaAdapter(const sp<MetaData> &meta);
virtual ~MediaAdapter();
/////////////////////////////////////////////////
// Inherited functions from MediaSource
/////////////////////////////////////////////////
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
virtual sp<MetaData> getFormat();
virtual status_t read(
MediaBuffer **buffer, const ReadOptions *options = NULL);
/////////////////////////////////////////////////
// Inherited functions from MediaBufferObserver
/////////////////////////////////////////////////
virtual void signalBufferReturned(MediaBuffer *buffer);
/////////////////////////////////////////////////
// Non-inherited functions:
/////////////////////////////////////////////////
// pushBuffer() will wait for the read() finish, and read() will have a
// deep copy, such that after pushBuffer return, the buffer can be re-used.
status_t pushBuffer(MediaBuffer *buffer);
private:
Mutex mAdapterLock;
// Make sure the read() wait for the incoming buffer.
Condition mBufferReadCond;
// Make sure the pushBuffer() wait for the current buffer consumed.
Condition mBufferReturnedCond;
MediaBuffer *mCurrentMediaBuffer;
bool mStarted;
sp<MetaData> mOutputFormat;
DISALLOW_EVIL_CONSTRUCTORS(MediaAdapter);
};
} // namespace android
#endif // MEDIA_ADAPTER_H
|
/**
* Project Name: com.qiniu.sdkdemo
* File Name: Basic.java
* Package Name: com.qiniu.sdkdemo
* Date Time: 06/11/2017 6:14 PM
* Copyright (c) 2017, xxx_xxx All Rights Reserved.
*/
package com.qiniu.sdkdemo;
import com.qiniu.common.Zone;
import com.qiniu.http.Response;
import com.qiniu.storage.UploadManager;
import com.qiniu.util.StringMap;
import com.qiniu.util.UrlSafeBase64;
import java.io.IOException;
/**
* ClassName: Basic
* Description: TODO
* Date Time: 06/11/2017 6:14 PM
* @author Nigel Wu [email protected]
* @version V1.0
* @since V1.0
* @jdk 1.8
* @see
*/
public class UploadMain extends Basic {
public Response simpleUpload(String fileName) throws IOException {
return upload(
getUploadManager(null, false),
config.getFilepath() + fileName
);
}
public Response overwriteUpload1(String fileName) throws IOException {
return uploadWithPolicy(
getUploadManager(null, false),
config.getFilepath() + fileName,
new StringMap().put("insertOnly", 1),
config.getFirstBucketName(),
"overwrite1.jpg"
);
}
public Response overwriteUpload2(String fileName) throws IOException {
return uploadWithPolicy(
getUploadManager(null, false),
config.getFilepath() + fileName,
new StringMap().put("insertOnly", 1),
config.getFirstBucketName(),
"overwrite2.jpg",
"3600"
);
}
public Response callbackUpload(String fileName) throws IOException {
return uploadWithPolicy(
getUploadManager(null, false),
config.getFilepath() + fileName,
new StringMap()
// .put("callbackUrl", "http://oysol03xx.bkt.clouddn.com/a.jpg")
.put("callbackBody", "filename=$(fname)&filesize=$(fsize)"),
config.getFirstBucketName(),
"callback.jpg",
"3600"
);
}
public Response breakpointUpload(String fileName) throws IOException {
return upload(
getUploadManager(null, true),
config.getFilepath() + fileName,
config.getFirstBucketName(),
"breakpoint.avi"
);
}
public Response pfopsUpload(String fileName) throws IOException {
//设置转码操作参数
String fops = "avthumb/mp4/s/640x360/vb/1.25m";
//设置转码的队列
String pipeline = "testqueue";
//可以对转码后的文件进行使用saveas参数自定义命名,当然也可以不指定文件会默认命名并保存在当前空间。
String urlbase64 = UrlSafeBase64.encodeToString(config.getFirstBucketName() + ":" + "pfops.mp4");
String pfops = fops + "|saveas/" + urlbase64;
return uploadWithPolicy(
getUploadManager(null, false),
config.getFilepath() + fileName,
new StringMap()
.putNotEmpty("persistentOps", pfops)
.putNotEmpty("persistentPipeline", pipeline),
config.getFirstBucketName(),
"pfops.avi",
"3600",
"true"
);
}
public static void main(String[] args) {
UploadMain uploadMain = new UploadMain();
try {
Response res = uploadMain.simpleUpload("1.jpg");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
try {
Response res = uploadMain.overwriteUpload1("2.jpg");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
try {
Response res = uploadMain.overwriteUpload2("3.jpg");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
try {
Response res = uploadMain.breakpointUpload("t.avi");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
try {
Response res = uploadMain.callbackUpload("4.jpg");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
try {
Response res = uploadMain.pfopsUpload("t.avi");
System.out.println(res.bodyString());
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Windows.Forms;
using Blumind.Controls;
using Blumind.Controls.MapViews;
using Blumind.Core;
using Blumind.Core.Documents;
using Blumind.Globalization;
using Blumind.Model;
using Blumind.Model.MindMaps;
namespace Blumind.Dialogs
{
partial class FindDialog : BaseDialog
{
public enum FindDialogMode
{
Find,
Replace,
}
FindDialogMode _Mode = FindDialogMode.Find;
float ControlSpace = 10;
MindMapFinder Finder = new MindMapFinder();
DocumentManageForm MainForm;
string LastFindWhat;
public FindDialog()
{
InitializeComponent();
OnModeChanged();
OpenOptions = false;
CancelButton = BtnClose;
CkbCaseSensitive.Checked = FindOptions.Default.CaseSensitive;
CkbWholeWordOnly.Checked = FindOptions.Default.WholeWordOnly;
CkbRegularExpression.Checked = FindOptions.Default.RegularExpression;
//CkbForwardSearch.Checked = FindOptions.Default.Direction == FindDirection.Forward;
CkbWithHiddenItems.Checked = FindOptions.Default.WithHiddenItems;
AfterInitialize();
}
public FindDialog(DocumentManageForm mainForm)
:this()
{
MainForm = mainForm;
}
[DefaultValue(FindDialogMode.Find)]
public FindDialogMode Mode
{
get { return _Mode; }
set
{
if (_Mode != value)
{
_Mode = value;
OnModeChanged();
}
}
}
[Browsable(false), DefaultValue(null)]
public ChartControl ChartPageView
{
get
{
if (MainForm != null && MainForm.SelectedForm is DocumentForm)
return ((DocumentForm)MainForm.SelectedForm).ActiveChartBox;
else
return null;
}
}
protected override bool ShowButtonArea
{
get
{
return true;
}
}
[DefaultValue(true)]
public bool OpenOptions
{
get { return !FbOptions.Folded; }
set { FbOptions.Folded = !value; }
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
LayoutControls();
FillHistory(CmbFindWhat, FindTextHistory);
FillHistory(CmbReplaceWith, ReplaceTextHistory);
CmbFindWhat.SelectAll();
ResetFocus();
}
public override void ApplyTheme(UITheme theme)
{
base.ApplyTheme(theme);
if (this.myToolStrip1 != null)
this.myToolStrip1.Renderer = theme.ToolStripRenderer;
this.SetFontNotScale(theme.DefaultFont);
}
void OnModeChanged()
{
Text = Mode == FindDialogMode.Find ? Lang._("Find") : Lang._("Replace");
TsbFind.Checked = Mode == FindDialogMode.Find;
TsbReplace.Checked = Mode == FindDialogMode.Replace;
LabReplaceWith.Visible = Mode == FindDialogMode.Replace;
CmbReplaceWith.Visible = Mode == FindDialogMode.Replace;
BtnReplace.Visible = Mode == FindDialogMode.Replace;
Finder.Reset();
LayoutControls();
//LayoutButtons();
//PerformLayout();
}
void OnMapViewChanged()
{
}
protected override void OnFormClosing(FormClosingEventArgs e)
{
if (e.CloseReason == CloseReason.UserClosing)
{
Hide();
e.Cancel = true;
}
base.OnFormClosing(e);
}
protected override void OnLayout(LayoutEventArgs e)
{
base.OnLayout(e);
LocateButtonsRight(new Button[] { BtnFind, BtnReplace, BtnClose });
//LayoutControls();
}
protected override void OnCurrentLanguageChanged()
{
Text = Mode == FindDialogMode.Find ? Lang._("Find") : Lang._("Replace");
TsbFind.Text = Lang._("Find");
TsbReplace.Text = Lang._("Replace");
LabFindWhat.Text = Lang._("Find What");
LabReplaceWith.Text = Lang._("Replace With");
FbOptions.Text = Lang._("Find Options");
CkbCaseSensitive.Text = Lang._("Case Sensitive");
CkbWholeWordOnly.Text = Lang._("Whole Word Only");
CkbRegularExpression.Text = Lang._("Regular Expression");
//CkbForwardSearch.Text = Lang._("Forward Search");
CkbWithHiddenItems.Text = Lang._("With Hidden Items");
BtnFind.Text = Lang._("Find Next");
BtnReplace.Text = Lang._("Replace");
BtnClose.Text = Lang._("Close");
base.OnCurrentLanguageChanged();
}
protected override void ScaleControl(SizeF factor, BoundsSpecified specified)
{
base.ScaleControl(factor, specified);
if ((specified & BoundsSpecified.Height) == BoundsSpecified.Height)
{
ControlSpace *= factor.Height;
}
}
void FillHistory(ComboBox list, List<string> history)
{
list.Items.Clear();
for (int i = history.Count - 1; i >= 0; i--)
{
list.Items.Add(history[i]);
}
if (list.Items.Count > 0)
{
list.SelectedIndex = 0;
}
}
void AddHistoryItem(ComboBox list, string item)
{
if (list.Items.Count == 0)
{
list.Items.Add(item);
}
else if ((string)list.Items[0] != item)
{
if (list.Items.Contains(item))
list.Items.Remove(item);
list.Items.Insert(0, item);
}
}
void LayoutControls()
{
float y = myToolStrip1.Bottom;
y += ControlSpace;
int h = Math.Max(LabFindWhat.Height, CmbFindWhat.Height);
LabFindWhat.Top = (int)Math.Ceiling(y + (h - LabFindWhat.Height) / 2);
CmbFindWhat.Top = (int)Math.Ceiling(y + (h - CmbFindWhat.Height) / 2);
y += h + ControlSpace;
if (Mode == FindDialogMode.Replace)
{
h = Math.Max(LabReplaceWith.Height, CmbReplaceWith.Height);
LabReplaceWith.Top = (int)Math.Ceiling(y + (h - LabReplaceWith.Height) / 2);
CmbReplaceWith.Top = (int)Math.Ceiling(y + (h - CmbReplaceWith.Height) / 2);
y += h + ControlSpace;
}
FbOptions.Top = (int)Math.Ceiling(y);
y += FbOptions.Height + ControlSpace;
h = (int)Math.Ceiling(y + ControlSpace) + ButtonAreaHeight;
MinimumSize = new Size(200, h + SystemInformation.ToolWindowCaptionHeight + SystemInformation.SizingBorderWidth);
ClientSize = new Size(ClientSize.Width, h);
}
//void LayoutButtons()
//{
// // buttons
// int x = ClientSize.Width - (int)Math.Ceiling(ControlSpace);
// int y = ClientSize.Height - (int)Math.Ceiling(ControlSpace) - BtnClose.Height;
// Button[] buttons = new Button[] { BtnFind, BtnReplace, BtnClose };
// for (int i = buttons.Length - 1; i >= 0; i--)
// {
// if (!buttons[i].Visible)
// continue;
// x -= buttons[i].Width;
// buttons[i].Location = new Point(x, y);
// x -= (int)Math.Ceiling(ControlSpace);
// }
//}
void TsbFind_Click(object sender, System.EventArgs e)
{
Mode = FindDialogMode.Find;
}
void TsbReplace_Click(object sender, System.EventArgs e)
{
Mode = FindDialogMode.Replace;
}
void FbOptions_FoldedChanged(object sender, System.EventArgs e)
{
LayoutControls();
}
void BtnClose_Click(object sender, System.EventArgs e)
{
Close();
}
void BtnFind_Click(object sender, EventArgs e)
{
if (DoFind() == null)
{
string msg = string.Format(Lang._("Can't find \"{0}\""), CmbFindWhat.Text);
this.ShowMessage(msg, MessageBoxIcon.Information);
Finder.Reset();
}
}
void BtnReplace_Click(object sender, EventArgs e)
{
if (!DoReplace())
{
string msg = Lang.Format("Can't find \"{0}\"", CmbFindWhat.Text);
this.ShowMessage(msg, MessageBoxIcon.Information);
}
}
void CkbCaseSensitive_CheckedChanged(object sender, EventArgs e)
{
FindOptions.Default.CaseSensitive = CkbCaseSensitive.Checked;
}
void CkbWholeWordOnly_CheckedChanged(object sender, EventArgs e)
{
FindOptions.Default.WholeWordOnly = CkbWholeWordOnly.Checked;
}
void CkbRegularExpression_CheckedChanged(object sender, EventArgs e)
{
FindOptions.Default.RegularExpression = CkbRegularExpression.Checked;
}
//private void CkbForwardSearch_CheckedChanged(object sender, EventArgs e)
//{
// FindOptions.Default.Direction = CkbForwardSearch.Checked ? FindDirection.Forward : FindDirection.Backward;
//}
void CkbWithHiddenItems_CheckedChanged(object sender, EventArgs e)
{
FindOptions.Default.WithHiddenItems = CkbWithHiddenItems.Checked;
}
void CmbFindWhat_KeyDown(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Enter)
{
if (BtnFind.Enabled)
{
BtnFind_Click(this, EventArgs.Empty);
e.SuppressKeyPress = true;
}
}
}
void CmbReplaceWith_KeyDown(object sender, KeyEventArgs e)
{
if (e.KeyCode == Keys.Enter)
{
if (BtnReplace.Enabled)
{
BtnReplace_Click(this, EventArgs.Empty);
e.SuppressKeyPress = true;
}
}
}
bool DoReplace()
{
string findWhat = CmbFindWhat.Text;
string replaceWith = CmbReplaceWith.Text;
if (string.IsNullOrEmpty(findWhat))
return false;
if (ChartPageView.SelectedObject != null)
{
var topic = ChartPageView.SelectedObject;
string newText = Finder.Replace(topic.Text, findWhat, replaceWith);
if (topic.Text != newText)
{
ChartPageView.ChangeObjectText(topic, newText);
}
}
// find next
return DoFind() != null;
}
ChartObject DoFind()
{
string findWhat = CmbFindWhat.Text;
if (!string.IsNullOrEmpty(findWhat))
{
if (LastFindWhat != findWhat)
Finder.Reset();
LastFindWhat = findWhat;
ChartObject chartObject = ChartPageView.FindNext(Finder, findWhat);
if(chartObject != null)
{
//MapView.SelectTopic(topic, true);
ChartPageView.Select(chartObject);
PopFindTextHistory(findWhat);
AddHistoryItem(CmbFindWhat, findWhat);
return chartObject;
}
}
return null;
}
public void ResetFocus()
{
if (CmbFindWhat.CanFocus)
CmbFindWhat.Focus();
else
ActiveControl = CmbFindWhat;
}
#region history
static List<string> FindTextHistory = new List<string>();
static List<string> ReplaceTextHistory = new List<string>();
static void PopFindTextHistory(string text)
{
if (FindTextHistory.Contains(text))
FindTextHistory.Remove(text);
FindTextHistory.Add(text);
}
static void PopReplaceTextHistory(string text)
{
if (ReplaceTextHistory.Contains(text))
ReplaceTextHistory.Remove(text);
ReplaceTextHistory.Add(text);
}
#endregion
}
}
|
/*
* SPDX-FileCopyrightText: 2020 DB Station&Service AG <[email protected]>
*
* SPDX-License-Identifier: Apache-2.0
*/
package de.deutschebahn.bahnhoflive.backend
import com.android.volley.Request
import com.android.volley.VolleyError
class DetailedVolleyError(
val request: Request<*>,
cause: Throwable?
) : VolleyError(
(cause as? VolleyError)?.let { volleyError ->
"Status code ${volleyError.networkResponse?.statusCode}: ${request.url}"
} ?: "Failed: ${request.url}",
cause) |
-- DQL - Linguagem de Consultas de Dados - Comando SELECT
-- Criar um novo banco de dados
CREATE DATABASE bdprodutos;
USE bdprodutos;
-- Criar uma tabela no BD "bdprodutos"
CREATE TABLE produtos(
codigo INT AUTO_INCREMENT,
tipo VARCHAR(50), modelo VARCHAR(50), marca VARCHAR(50),
qdt INT, preco FLOAT,
PRIMARY KEY(codigo)
);
-- Altera o nome de uma coluna
ALTER TABLE produtos
CHANGE COLUMN qdt qtd INT;
DESC produtos;
-- Inserir alguns registros na tabela "produtos"
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('TV', '29 TP WIDE', 'SONY', '2', '1200.00');
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('DVD', 'D659', 'PHILIPS', 1, 399.00);
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('DVD', '91P', 'SONY', 5, 459.95);
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('CELULAR', '1100 PRETO', 'NOKIA', '10', '199,90');
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('CELULAR', '4911 CAMERA', 'NOKIA', 5, 850.00);
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('TV', '20 ST SAP', 'TOSHIBA', 10, '500.00');
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('DVD', 'D202', 'PHILIPS', '4', 500.00);
INSERT INTO produtos(tipo, modelo, marca, qtd, preco)
VALUES('TV', '32 PLASMA', 'SONY', '1', '4500.00');
-- Exemplo 1: Selecionar todas as colunas e todos os registros.
SELECT * FROM produtos;
-- Exemplo 2: Selecionar algumas colunas e todos os registros.
SELECT modelo, marca, tipo, preco FROM produtos;
-- Exemplo 3: Selecionar algumas colunas e alguns registros.
SELECT modelo, marca, tipo FROM produtos
WHERE marca = 'SONY';
-- Exemplo 4: Selecionar algumas colunas e alguns registros.
SELECT modelo, marca, preco, qtd FROM produtos
WHERE preco <= 850.00;
-- Exemplo 5: Selecionar algumas colunas e alguns
-- registros utilizando mais de uma condição (filtro)
-- com o operador “AND”
-- Selecionar todos os produtos cujo preço for
-- menor do que R$ 500,00 e for da marca sony
SELECT modelo, marca, tipo, preco FROM produtos
WHERE preco < 500 AND marca = 'SONY';
-- Exemplo 06 - Operador OR
-- Selecionar os produtos que o preço for menor
-- ou igual R$ 400,00 ou o tipo for igual a DVD
SELECT * FROM produtos
WHERE preco <= 400 OR tipo = 'dvd';
-- Exemplo 07 - Operadores AND e OR
-- Selecionar todas as TVs da marca SONY OU todos
-- os DVDs da marca PHILIPS
SELECT * FROM produtos
WHERE (marca = 'sony' AND tipo = 'TV')
OR (tipo = 'DVD' AND marca = 'PHILIPS');
-- Exemplo 8: Retornar os dados dos produtos que custam de
-- 100 a 500 reais, considerando os valores 100 e 500 reais.
SELECT * FROM produtos
WHERE preco >= 100 AND preco <= 500;
-- Exemplo 8 utilizando o operador BETWEEN
SELECT * FROM produtos
WHERE preco BETWEEN 100 AND 500;
INSERT INTO produtos(modelo, marca)
VALUES('Moto G9', 'Motorola');
INSERT INTO produtos(modelo, marca, tipo)
VALUES('Galaxy A70', 'Samsung', 'celular');
SELECT * FROM produtos;
-- Exemplo 9: Retorna os produtos que não
-- possuem preço definido:
SELECT * FROM produtos WHERE preco IS NULL;
-- Exemplo 10: Retorna os produtos que possuem
-- a quantidade defina
SELECT * FROM produtos WHERE qtd IS NOT NULL;
-- Operador LIKE - Selecionar todos os produtos cuja
-- marca iniciar com a letra "N"
SELECT * FROM produtos
WHERE marca LIKE 'N%';
-- Operador LIKE - Selecionar todos os produtos cujo
-- modelo tenha o termo "CAM"
SELECT * FROM produtos
WHERE modelo LIKE '%CAM%';
-- Exemplo 13: Selecionar todos os produtos de
-- código 1, 2, 5, 7
SELECT * FROM produtos
WHERE codigo = 1 OR codigo = 2 OR codigo = 5 OR codigo = 7;
-- Exemplo 13 - Operador IN() - Selecionar todos
-- os produtos de código 1, 2, 5, 7
SELECT * FROM produtos
WHERE codigo IN(1, 2, 5, 7);
-- Exemplo retorna os produtos que o código não seja 6 e 8
SELECT * FROM produtos
WHERE codigo NOT IN (6, 8);
-- --------------------------------------
-- SQL – Cláusula ORDER BY
-- Exemplo 15: Seleciona todos os produtos cujo
-- preço for menor ou igual a 500, exibindo os
-- produtos mais baratos primeiro
SELECT * FROM produtos
WHERE preco <= 500
ORDER BY preco ASC;
-- Exemplo 16: Seleciona todos os produtos cujo preço for maior
-- ou igual a 700, exibindo os produtos mais caros primeiro.
SELECT * FROM produtos
WHERE preco >= 700
ORDER BY preco DESC;
-- Exemplo 17: Seleciona todos os produtos cujo tipo
-- igual 'DVD', ornando por marca e exibindo
-- os produtos mais caros primeiro.
SELECT * FROM produtos
WHERE tipo = 'dvd'
ORDER BY marca ASC, preco DESC;
-- Exemplo 18: Seleciona a TV mais cara cadastrada.
SELECT * FROM produtos
WHERE tipo = 'TV'
ORDER BY preco DESC
LIMIT 1; -- Define a quantidade de registros (linhas) que será exibido na consulta
-- Exemplo 19: Selecionar a coluna 'tipo' da tabela
-- 'produtos’, exibindo somente uma vez cada tipo diferente.
SELECT DISTINCT tipo FROM produtos;
|
<?php
use yii\db\Migration;
/**
* Handles the creation of table `grade`.
*/
class m161028_023447_create_grade_table extends Migration
{
/**
* @inheritdoc
*/
public function up()
{
$this->createTable('grade', [
'id' => $this->primaryKey(),
'standard' => $this->string(50)->notNull(),
'section' => $this->string(50)->notNull(),
'code' => $this->string(50)->notNull()->unique(),
'status' => $this->integer(3)->notNull(),
]);
}
/**
* @inheritdoc
*/
public function down()
{
$this->dropTable('grade');
}
}
|
import { Resolver, Mutation, Args, Query, ResolveField, Parent } from '@nestjs/graphql';
import { UserService } from './user.service';
import { UseGuards, Inject, forwardRef } from '@nestjs/common';
import { GqlAuthGuard } from '../auth/graphql.guard';
import { CurrentUser } from '../../decorators/current-user.decorator';
import { UserListResult } from '../../graphql/schemas/user/list.result';
import { UserListArgs } from '../../graphql/schemas/user/list.args';
import { UserEntity } from '../../database/entities/user.entity';
import { UserGroupResolver } from '../user-group/user-group.resolver';
import { UserGroupListResult } from '../../graphql/schemas/user-group/list.result';
import { GroupListResult } from '../../graphql/schemas/group/list.result';
import { GroupResolver } from '../group/group.resolver';
import { Int } from '@nestjs/graphql';
import { GroupListArgs } from '../../graphql/schemas/group/list.args';
@Resolver(() => UserEntity)
export class UserResolver {
@Inject(UserService)
private userService: UserService;
@Inject(forwardRef(() => UserGroupResolver))
private userGroupResolver: UserGroupResolver;
@Inject(GroupResolver)
private groupResolver: GroupResolver;
@UseGuards(GqlAuthGuard)
@Query(returns => UserEntity, { description: '获取当前登录用户信息' })
getCurrentUser(@CurrentUser()/* 使用@CurrentUser从context中注入token解析出来的user */ user: any) {
return this.userService.getCurrent(user.id);
}
@UseGuards(GqlAuthGuard)
@Query(returns => UserListResult, { description: '获取用户列表' })
userList(@Args() data: UserListArgs) {
return this.userService.getUserList(data);
}
@ResolveField(() => UserGroupListResult)
userGroups(
@Parent() user: UserEntity,
@Args({ name: 'page', type: () => Int, nullable: true, defaultValue: 1 }) page: number,
@Args({ name: 'pageSize', type: () => Int, nullable: true, defaultValue: 10 }) pageSize: number,
@Args({ name: 'groupId', type: () => Int, nullable: true }) groupId: number
): Promise<UserGroupListResult> {
return this.userGroupResolver.listByUserId(user.id, page, pageSize, groupId);
}
@ResolveField(() => GroupListResult)
groups(
@Parent() user: UserEntity,
@Args() groupListArgs: GroupListArgs
): Promise<GroupListResult> {
return this.groupResolver.listByUserId(user.id, groupListArgs);
}
getById(id: number): Promise<UserEntity> {
return this.userService.getById(id);
}
}
|
require 'spec_helper'
# See https://github.com/geoblacklight/geoblacklight/wiki/Schema
describe GeoWorks::Discovery::DocumentBuilder do
subject { described_class.new(geo_concern_presenter, document_class) }
let(:geo_concern) { FactoryGirl.build(:public_vector_work, attributes) }
let(:geo_concern_presenter) { GeoWorks::VectorWorkShowPresenter.new(SolrDocument.new(geo_concern.to_solr), nil) }
let(:document_class) { GeoWorks::Discovery::GeoblacklightDocument.new }
let(:document) { JSON.parse(subject.to_json(nil)) }
let(:visibility) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
let(:metadata__mime_type) { 'application/xml; schema=iso19139' }
let(:metadata_file) { FileSet.new(id: 'metadatafile', geo_mime_type: metadata__mime_type) }
let(:metadata_presenter) { Hyrax::FileSetPresenter.new(SolrDocument.new(metadata_file.to_solr), nil) }
let(:geo_file_mime_type) { 'application/zip; ogr-format="ESRI Shapefile"' }
let(:geo_file) { FileSet.new(id: 'geofile', geo_mime_type: geo_file_mime_type, geometry_type: 'Line') }
let(:geo_file_presenter) { Hyrax::FileSetPresenter.new(SolrDocument.new(geo_file.to_solr), nil) }
let(:coverage) { GeoWorks::Coverage.new(43.039, -69.856, 42.943, -71.032) }
let(:issued) { '01/02/2013' }
let(:issued_xmlschema) { '2013-02-01T00:00:00Z' }
let(:attributes) { { id: 'geo-work-1',
title: ['Geo Work'],
coverage: coverage.to_s,
description: ['This is a Geo Work'],
creator: ['Yosiwo George'],
publisher: ['National Geographic'],
issued: issued,
spatial: ['Micronesia'],
temporal: ['2011'],
subject: ['Human settlements'],
language: ['Esperanto'],
identifier: ['ark:/99999/fk4'] }
}
before do
allow(geo_concern_presenter.solr_document).to receive(:visibility).and_return(visibility)
allow(geo_concern_presenter.solr_document).to receive(:representative_id).and_return(geo_file_presenter.id)
allow(geo_concern_presenter).to receive(:file_set_presenters).and_return([geo_file_presenter, metadata_presenter])
allow(geo_concern_presenter).to receive(:member_presenters).and_return([geo_file_presenter, metadata_presenter])
allow(geo_file_presenter.request).to receive_messages(host_with_port: 'localhost:3000', protocol: 'http://')
end
describe 'vector work' do
context 'required' do
it 'has all metadata' do
expect(document['dc_identifier_s']).to eq('ark:/99999/fk4')
expect(document['layer_slug_s']).to eq('institution-geo-work-1')
expect(document['dc_title_s']).to eq('Geo Work')
expect(document['solr_geom']).to eq('ENVELOPE(-71.032, -69.856, 43.039, 42.943)')
expect(document['dct_provenance_s']).to eq('Institution')
expect(document['dc_rights_s']).to eq('Public')
expect(document['geoblacklight_version']).to eq('1.0')
end
end
context 'optional' do
it 'has metadata' do
expect(document['dc_description_s']).to eq('This is a Geo Work')
expect(document['dc_creator_sm']).to eq(['Yosiwo George'])
expect(document['dc_subject_sm']).to eq(['Human settlements'])
expect(document['dct_spatial_sm']).to eq(['Micronesia'])
expect(document['dct_temporal_sm']).to eq(['2011'])
expect(document['dc_language_s']).to eq('Esperanto')
expect(document['dc_publisher_s']).to eq('National Geographic')
end
it 'has modified date' do
expect(document).to include('layer_modified_dt')
# TODO: Rails 4 puts +00:00 rather than Z for `xmlschema` format
expect(document['layer_modified_dt']).to match(/\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d(Z|\+00:00)/)
end
context 'issued date' do
it 'works with valid date' do
expect(document).to include('dct_issued_dt')
expect(document['dct_issued_dt']).to eq(issued_xmlschema)
end
end
it 'has date field' do
expect(document['solr_year_i']).to eq(2011)
end
it 'has layer info fields' do
expect(document['layer_geom_type_s']).to eq('Line')
expect(document['dc_format_s']).to eq('Shapefile')
end
it 'has references' do
refs = JSON.parse(document['dct_references_s'])
expect(refs['http://schema.org/url']).to eq('http://localhost:3000/concern/vector_works/geo-work-1')
expect(refs['http://www.isotc211.org/schemas/2005/gmd/']).to eq('http://localhost:3000/downloads/metadatafile')
expect(refs['http://schema.org/downloadUrl']).to eq('http://localhost:3000/downloads/geofile')
expect(refs['http://schema.org/thumbnailUrl']).to eq('http://localhost:3000/downloads/geofile?file=thumbnail')
end
end
end
describe 'raster work' do
let(:geo_concern_presenter) { GeoWorks::RasterWorkShowPresenter.new(SolrDocument.new(geo_concern.to_solr), nil) }
context 'with a GeoTIFF file and a FGDC metadata file' do
let(:geo_file_mime_type) { 'image/tiff; gdal-format=GTiff' }
let(:metadata__mime_type) { 'application/xml; schema=fgdc' }
it 'has layer info fields' do
expect(document['layer_geom_type_s']).to eq('Raster')
expect(document['dc_format_s']).to eq('GeoTIFF')
end
it 'has references' do
refs = JSON.parse(document['dct_references_s'])
expect(refs['http://www.opengis.net/cat/csw/csdgm']).to eq('http://localhost:3000/downloads/metadatafile')
end
end
context 'with an ArcGRID file and a MODS metadata file' do
let(:geo_file_mime_type) { 'application/octet-stream; gdal-format=AIG' }
let(:metadata__mime_type) { 'application/mods+xml' }
it 'has layer info fields' do
expect(document['dc_format_s']).to eq('ArcGRID')
end
it 'has references' do
refs = JSON.parse(document['dct_references_s'])
expect(refs['http://www.loc.gov/mods/v3']).to eq('http://localhost:3000/downloads/metadatafile')
end
end
end
describe 'image work' do
let(:geo_concern_presenter) { GeoWorks::ImageWorkShowPresenter.new(SolrDocument.new(geo_concern.to_solr), nil) }
context 'with a tiff file and no decription' do
let(:geo_file_mime_type) { 'image/tiff' }
before do
allow(geo_concern_presenter).to receive(:description).and_return([])
end
it 'uses a default description' do
expect(document['dc_description_s']).to eq('A vector work object.')
end
it 'has layer info fields' do
expect(document['layer_geom_type_s']).to eq('Image')
expect(document['dc_format_s']).to eq('TIFF')
end
end
end
context 'with a missing required metadata field' do
before do
allow(geo_concern_presenter).to receive(:coverage).and_return(nil)
end
it 'returns an error document' do
expect(document['error'][0]).to include('solr_geom')
expect(document['error'].size).to eq(1)
end
end
context 'with a missing non-required metadata field' do
before do
allow(geo_concern_presenter).to receive(:language).and_return([])
end
it 'returns a document without the field but valid' do
expect(document['dc_language_s']).to be_nil
end
end
context 'with a missing temporal field' do
before do
allow(geo_concern_presenter).to receive(:temporal).and_return([])
end
it 'returns a document without the field but valid' do
expect(document['dct_temporal_sm']).to be_nil
end
end
context 'with a missing issued field' do
before do
allow(geo_concern_presenter).to receive(:issued).and_return(nil)
end
it 'returns a document without the field but valid' do
expect(document['dct_issued_dt']).to be_nil
end
end
context 'with an authenticated visibility' do
let(:visibility) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED }
it 'returns a restricted rights field value' do
expect(document['dc_rights_s']).to eq('Restricted')
end
end
context 'with a private visibility' do
let(:visibility) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
it 'returns an empty document' do
expect(document).to eq({})
end
end
context 'with ssl enabled' do
before do
allow(geo_file_presenter.request).to receive_messages(host_with_port: 'localhost:3000', protocol: 'https://')
end
it 'returns https reference urls' do
refs = JSON.parse(document['dct_references_s'])
expect(refs['http://schema.org/url']).to eq('https://localhost:3000/concern/vector_works/geo-work-1')
end
end
end
|
using CharactersApi_Data;
using Microsoft.EntityFrameworkCore;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CharactersApi_Test.Mocks
{
public static class CharacterDBMock
{
private static DbContextOptions<P3_NotFightClub_CharactersContext> _opts =
new DbContextOptionsBuilder<P3_NotFightClub_CharactersContext>()
.UseInMemoryDatabase("NotFightClubDB")
.Options;
public static P3_NotFightClub_CharactersContext GetMockDB()
{
return new P3_NotFightClub_CharactersContext(_opts);
}
}
}
|
var util = require('util');
var events = require('events');
var AudioDevice = {
play: function (track) {
console.log(track)
// Stub: Trigger playback through iTunes, mpg123, etc.
},
stop: function () {
console.log('stop')
}
};
function MusicPlayer() {
this.playing = false;
//.call就可以使用events
events.EventEmitter.call(this);
}
//MusicPlayer继承events.EventEmitter
util.inherits(MusicPlayer, events.EventEmitter);
var musicPlayer = new MusicPlayer();
//TODO:emitter.on(eventName, listener) 监听函数
musicPlayer.on('play', function (track) {
this.playing = true;
AudioDevice.play(track);
});
musicPlayer.on('stop', function () {
this.playing = false;
AudioDevice.stop();
});
//按监听器的注册顺序,同步地调用每个注册到名为 eventName 事件的监听器,并传入提供的参数。
musicPlayer.emit('play', 'The Roots - The Fire');
setTimeout(function () {
musicPlayer.emit('stop');
}, 1000);
|
import API from "@/services"
import { ref } from "vue"
export type userType = {
id: number
identify_card: string
social_insurance: string
fullname: string
birthday: null
gender: number
username: string
address: string
address_full: []
phone: string
village_id: number
role: {
name: string
}
created_at: null
updated_at: null
images: []
}
function useUsers() {
const data = ref()
const loadingListUser = ref(false)
const loadingSearch = ref(false)
const statusSearchUser = ref(false)
const getListUsers = async (page: number) => {
try {
statusSearchUser.value = false
loadingListUser.value = true
const response = await API.get("user?page=" + page)
if (response.data.success) {
data.value = response.data.data
}
} catch (e) {
console.log(e)
loadingListUser.value = false
} finally {
loadingListUser.value = false
}
}
const getListUsersSearch = async (text: string, page: number) => {
try {
statusSearchUser.value = true
loadingListUser.value = true
loadingSearch.value = true
const responseSearch = await API.get(
"user?search=" + text + "&page=" + page
)
if (responseSearch.data.success) {
data.value = responseSearch.data.data
}
} catch (e) {
console.log(e)
loadingListUser.value = false
loadingSearch.value = false
} finally {
loadingListUser.value = false
loadingSearch.value = false
}
}
return {
data,
loadingListUser,
getListUsers,
getListUsersSearch,
loadingSearch,
statusSearchUser
}
}
export default useUsers
|
#include "kernel/dev/tty/tty.hpp"
#include "kernel/fs/vfs/defines.hpp"
#include "kernel/task.hpp"
#include "kernel/trace.hpp"
namespace dev::tty
{
using namespace fs;
bool tty_read_func(u64 data)
{
auto *tty = (tty_pseudo_t *)data;
auto buffer = &tty->buffer;
return tty->eof_count > 0 || (tty->line_count > 0 && !buffer->is_emtpy());
}
/// print to screen
i64 tty_pseudo_t::write(const byte *data, u64 size, flag_t flags)
{
trace::print_inner((const char *)data, size);
return size;
}
u64 tty_pseudo_t::write_to_buffer(const byte *data, u64 size, flag_t flags)
{
trace::print_inner((const char *)data, size);
for (u64 i = 0; i < size; i++)
{
if ((char)data[i] == '\n')
{
line_count++;
}
if (buffer.is_full())
{
byte p = (byte)0;
buffer.last(&p);
if ((char)p == '\n')
line_count--;
}
buffer.write(data[i]);
}
wait_queue.do_wake_up();
return size;
}
void tty_pseudo_t::send_EOF()
{
eof_count++;
wait_queue.do_wake_up();
}
i64 tty_pseudo_t::read(byte *data, u64 max_size, flag_t flags)
{
if (line_count <= 0)
{
if (flags & rw_flags::no_block)
{
return -1;
}
wait_queue.do_wait(tty_read_func, (u64)this);
}
for (u64 i = 0; i < max_size;)
{
if (buffer.is_emtpy())
{
if (i > 0)
return i;
if (flags & rw_flags::no_block)
return i;
if (eof_count > 0)
{
int old;
bool ok = false;
do
{
old = eof_count;
if (old == 0)
{
ok = true;
break;
}
} while (!eof_count.compare_exchange_strong(old, old - 1, std::memory_order_acquire));
if (!ok)
return -1;
}
wait_queue.do_wait(tty_read_func, (u64)this);
}
char ch;
buffer.read((byte *)&ch);
if (ch == '\n') // keep \n
{
data[i++] = (byte)ch;
line_count--;
return i;
}
else if (ch == '\b')
{
if (i > 0)
i--;
continue;
}
data[i++] = (byte)ch;
}
return max_size;
}
void tty_pseudo_t::close()
{
wait_queue.remove(task::current_process());
wait_queue.do_wake_up();
};
} // namespace dev::tty
|
#/bin/bash
# Create a service account that can read from the gcr.io/skia-public container
# registry and add it as a docker-registry secret to the cluster.
set -x -e
source ../../bash/ramdisk.sh
SA_EMAIL=$(../../kube/secrets/add-service-account.sh \
skia-public \
skolo-rack4 \
gcr-io-skia-public-account \
"cluster service account to access gcr.io/skia-public images" \
roles/storage.objectViewer)
cd /tmp/ramdisk
# Download a key for the clusters default service account.
gcloud beta iam service-accounts keys create key.json \
--iam-account="${SA_EMAIL}"
# Use that key as a docker-registry secret.
kubectl create secret docker-registry gcr-io-skia-public \
--docker-username=_json_key \
--docker-password="`cat key.json`" \
--docker-server=https://gcr.io \
[email protected]
##################################################################
#
# Add the ability for the new cluster to pull docker images from
# gcr.io/skia-public container registry.
#
##################################################################
kubectl patch serviceaccount default -p "{\"imagePullSecrets\": [{\"name\": \"gcr-io-skia-public\"}]}"
# Add service account as reader of docker images bucket.
# First remove the account so the add is fresh.
gsutil iam ch -d "serviceAccount:${SA_EMAIL}:objectViewer" gs://artifacts.skia-public.appspot.com
gsutil iam ch "serviceAccount:${SA_EMAIL}:objectViewer" gs://artifacts.skia-public.appspot.com |
using System.Collections.Generic;
using System.Linq;
namespace NomOrderManager.Model
{
public class IndexModel : IModel
{
public IndexModel(IEnumerable<string> locations)
{
Name = "Lieferdienste";
Locations = locations;
}
public string Name { get; }
public IEnumerable<string> Locations { get; }
public bool HasData { get { return Locations.Any(); } }
}
}
|
# (c) Copyright IBM Corp. 2021
# (c) Copyright Instana Inc. 2021
require 'test_helper'
require 'support/apps/active_record/active_record'
class RailsActiveRecordTest < Minitest::Test
def setup
skip unless ENV['DATABASE_URL']
@connection = ActiveRecord::Base.establish_connection(ENV['DATABASE_URL'])
ActiveRecord::Migration.suppress_messages do
ActiveRecord::Migration.run(CreateBlocks, direction: :up)
end
end
def teardown
ActiveRecord::Migration.suppress_messages do
ActiveRecord::Migration.run(CreateBlocks, direction: :down)
end
ActiveRecord::Base.remove_connection(@connection)
end
def test_config_defaults
assert ::Instana.config[:sanitize_sql] == true
assert ::Instana.config[:active_record].is_a?(Hash)
assert ::Instana.config[:active_record].key?(:enabled)
assert_equal true, ::Instana.config[:active_record][:enabled]
end
def test_create
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
Block.create(name: 'core', color: 'blue')
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
data = span[:data][:activerecord]
assert data[:sql].start_with?('INSERT INTO')
end
def test_read
Block.create(name: 'core', color: 'blue')
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
Block.find_by(name: 'core')
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
data = span[:data][:activerecord]
assert data[:sql].start_with?('SELECT')
end
def test_update
Block.create(name: 'core', color: 'blue')
b = Block.find_by(name: 'core')
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
b.color = 'red'
b.save
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
data = span[:data][:activerecord]
assert data[:sql].start_with?('UPDATE')
end
def test_delete
b = Block.create(name: 'core', color: 'blue')
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
b.delete
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
data = span[:data][:activerecord]
assert data[:sql].start_with?('DELETE')
end
def test_raw
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
ActiveRecord::Base.connection.execute('SELECT 1')
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
data = span[:data][:activerecord]
assert 'SELECT 1', data[:sql]
end
def test_raw_error
assert_raises ActiveRecord::StatementInvalid do
Instana::Tracer.start_or_continue_trace(:ar_test, {}) do
ActiveRecord::Base.connection.execute('INVALID')
end
end
spans = ::Instana.processor.queued_spans
assert_equal 2, spans.length
span = find_first_span_by_name(spans, :activerecord)
assert_equal 1, span[:ec]
end
end
|
#include <bits/stdc++.h>
#include "Headers/Reader.h"
using namespace std;
void Reader::ReadMap() {
cin >> numberOfPoints >> numberOfEdges;
for (int i = 0, id; i < numberOfPoints; i++) {
double x, y;
cin >> id >> x >> y;
points.push_back({ id, x, y });
}
for (int i = 0, idSource, idTarget; i < numberOfEdges; i++) {
cin >> idSource >> idTarget;
edgeIds.push_back({ idSource, idTarget });
}
}
void Reader::ReadQueries() {
double tme;
int src, dst;
vector<Query> vec;
cout << "enter number of queries:\n";
/*while (cin >> tme >> src >> dst) {
vec.push_back({ tme, src, dst });
}*/
{
int temp; cin >> temp;
while (temp--) {
cin >> tme >> src >> dst;
vec.push_back({ tme, src, dst });
}
}
sort(vec.begin(), vec.end(), [](Query& left, Query& right) {
return left.tme < right.tme;
});
for (auto& item : vec)
queries.push(item);
} |
resolvers += Resolver.url(
"meetup-sbt-plugins",
new java.net.URL("https://dl.bintray.com/meetup/sbt-plugins/")
)(Resolver.ivyStylePatterns)
addSbtPlugin("com.meetup" % "sbt-plugins" % "0.2.19")
addSbtPlugin("com.eed3si9n" % "sbt-dirty-money" % "0.1.0")
|
import 'package:intl/intl.dart';
import 'package:logging/logging.dart';
import 'package:network_tools/network_tools.dart';
void main() {
Logger.root.level = Level.FINE;
Logger.root.onRecord.listen((record) {
print(
'${DateFormat.Hms().format(record.time)}: ${record.level.name}: ${record.loggerName}: ${record.message}',
);
});
final _log = Logger('host_scan');
const String ip = '192.168.1.1';
// or You can also get ip using network_info_plus package
// final String? ip = await (NetworkInfo().getWifiIP());
final String subnet = ip.substring(0, ip.lastIndexOf('.'));
// You can set [firstSubnet] and scan will start from this host in the network.
// Similarly set [lastSubnet] and scan will end at this host in the network.
final stream = HostScanner.discover(
subnet,
// firstSubnet: 1,
// lastSubnet: 254,
progressCallback: (progress) {
_log.finer('Progress for host discovery : $progress');
},
);
stream.listen(
(ActiveHost host) {
//Same host can be emitted multiple times
//Use Set<ActiveHost> instead of List<ActiveHost>
_log.fine('Found device: $host');
},
onDone: () {
_log.fine('Scan completed');
},
); // Don't forget to cancel the stream when not in use.
}
|
# react-redux-demo server
A Node.js app using [Express 4](http://expressjs.com/) to
## Running Locally
```sh
$ git clone https://github.com/latamind/reat-redux.git # or clone your own fork
$ cd reat-redux
$ npm install
$ npm start
```
Your app should now be running on [localhost:5000](http://localhost:5000/).
## Deploying to Heroku
```
$ heroku create
$ git push heroku master
$ heroku open
```
or
[](https://heroku.com/deploy)
## Documentation
For more information about using Node.js on Heroku, see these Dev Center articles:
- [Getting Started with Node.js on Heroku](https://devcenter.heroku.com/articles/getting-started-with-nodejs)
- [Heroku Node.js Support](https://devcenter.heroku.com/articles/nodejs-support)
- [Node.js on Heroku](https://devcenter.heroku.com/categories/nodejs)
- [Best Practices for Node.js Development](https://devcenter.heroku.com/articles/node-best-practices)
- [Using WebSockets on Heroku with Node.js](https://devcenter.heroku.com/articles/node-websockets)
|
# Amplify
<p align="center">
<img src="docs/demo.gif" alt="Demo gif">
</p>
## Overview
Amplify allows users to increase the size of images.
Best used in narrow containers, between paragraphs.
It is lightweight: 29 sloc of JS and 23 sloc of SCSS resulting in **1.22kB** combined.
### [Demo](https://charlestati.github.io/amplify/)
## Quickstart
- Add [amplify.min.css](docs/amplify.min.css) and [amplify.min.js](docs/amplify.min.js) to your page
- Add the `js-amplify` CSS class to your images
`<img class="js-amplify" src="photo.jpg" alt="Photo">`
## Browser Support
- Chrome
- Firefox
- Safari
Probably Edge, Internet Explorer 10+ and Opera too.
## Known bugs
- Sometimes the first transition is not smooth
- Adding a wrapper for image centering prevents margin collapsing with the paragraphs
## Todo
- Use `transform: scale()` or JavaScript for smoother transitions
- Find a better naming system as `.js-*` in CSS is not ideal
## License
Apache 2.0
|
package com.kilimandzsaro.proxyclick.helper;
import io.github.cdimascio.dotenv.Dotenv;
public class Settings {
private Dotenv dotenv;
public Settings() {
this.dotenv = Dotenv.load();
}
public String getAdminUser() {
return dotenv.get("ADMIN_USER");
}
public String getAdminPassword() {
return dotenv.get("ADMIN_PASSWORD");
}
public String getBaseURL() {
return dotenv.get("BASE_URL");
}
public String getChromeDriverPath() {
return dotenv.get("CHROMEDRIVER_PATH");
}
}
|
namespace LiveHTS.Core.Model.SmartCard
{
public class EXTERNALPATIENTID
{
public string ID { get; set; }
public string IDENTIFIER_TYPE { get; set; }
public string ASSIGNING_AUTHORITY { get; set; }
public string ASSIGNING_FACILITY { get; set; }
public EXTERNALPATIENTID()
{
ID = IDENTIFIER_TYPE = ASSIGNING_AUTHORITY = ASSIGNING_FACILITY = string.Empty;
}
public static EXTERNALPATIENTID Create()
{
return new EXTERNALPATIENTID();
}
}
} |
(defpackage :jsown
(:use :common-lisp)
;; reading
(:export :parse
:build-key-container
:parse-with-container
:filter)
;; writing
(:export :to-json
:to-json*)
;; editing
(:export :keywords
:val
:empty-object
:do-json-keys
:export
:new-js
:extend-js
:remkey)
(:export :as-js-bool
:as-js-null
:keyp
:json-encoded-content
:*parsed-true-value*
:*parsed-false-value*
:*parsed-null-value*
:*parsed-empty-list-value*
:with-injective-reader))
|
package com.kralofsky.cipherbox.ciphers
import android.widget.SeekBar
import android.widget.TextView
import androidx.appcompat.app.AppCompatActivity
import com.kralofsky.cipherbox.Cipher
import com.kralofsky.cipherbox.R
object RailfenceCipher : Cipher() {
override val name = "Railfence Cipher"
override val imageId = R.drawable.railfence
override val controlLayout = R.layout.cipher_slider
override val link = "https://en.wikipedia.org/wiki/Rail_fence_cipher"
override val youtube = "OeAHJbBzpk4"
var key = 1
override fun encode(cleartext: String): String = RailFenceCipher(key).getEncryptedData(cleartext)
override fun decode(ciphertext: String): String = RailFenceCipher(key).getDecryptedData(ciphertext)
override fun init(context: AppCompatActivity) {
val slider = context.findViewById<SeekBar>(R.id.cipher_slider_seekbar)
slider.max = 10
slider.setOnSeekBarChangeListener(object: SeekBar.OnSeekBarChangeListener{
override fun onProgressChanged(p0: SeekBar?, p1: Int, p2: Boolean) {
key = p1+1
context.findViewById<TextView>(R.id.cipher_slider_currentValue).text = key.toString()
}
override fun onStartTrackingTouch(p0: SeekBar?) {}
override fun onStopTrackingTouch(p0: SeekBar?) {}
})
}
// From https://exercism.io/tracks/kotlin/exercises/rail-fence-cipher/solutions/665bb2dd30364a7d9b268333040de6c5
class RailFenceCipher(private val railCount: Int) {
private val cycleLength = (railCount * 2) - 2
fun getEncryptedData(data: String): String {
if (railCount==1 || railCount > 2*(data.length-1) || data.isEmpty()) return data
return data
.mapIndexed { i, char -> Pair(getRail(i), char) }
.sortedBy { it.first }
.map { it.second }
.joinToString("")
}
fun getDecryptedData(encrypted: String): String {
if (railCount==1 || railCount > 2*(encrypted.length-1) || encrypted.isEmpty()) return encrypted
return placeDataOnRails(encrypted).let { rails ->
encrypted.indices.joinToString("") { index ->
getRail(index).let { r ->
rails[r].take(1).also { rails[r] = rails[r].drop(1) }
}
}
}
}
private fun getRail(index: Int) = (index % cycleLength).let {
if (it >= railCount) cycleLength - it else it
}
private fun placeDataOnRails(encrypted: String) = mutableListOf<String>().apply {
var start = 0
getRailCounts(encrypted).forEach {
add(encrypted.substring(start, start + it))
start += it
}
}
private fun getRailCounts(data: String) = data
.mapIndexed { i, char -> Pair(getRail(i), char) }
.groupingBy { it.first }
.eachCount()
.map { it.value }
}
}
|
#!/bin/bash -e
./git_all.sh checkout master
./git_all.sh checkout $TRAVIS_BRANCH
if [ "$(basename $TRAVIS_REPO_SLUG)" == "$ANGR_REPO" ]
then
COVERAGE="--with-coverage --cover-package=$ANGR_REPO --cover-erase"
fi
NOSE_OPTIONS="-v --nologcapture --with-timer $COVERAGE --processes=2 --process-timeout=570 --process-restartworker"
cd $ANGR_REPO
if [ -f "test.py" ]
then
nosetests $NOSE_OPTIONS test.py
elif [ -d "tests" ]
then
nosetests $NOSE_OPTIONS tests/
else
echo "### No tests for repository $ANGR_REPO?"
fi
[ "$(basename $TRAVIS_REPO_SLUG)" == "$ANGR_REPO" ] && ../lint.py
exit 0
|
/**
Computes the (magnetic) scattering form sld (n and m) profile
*/
#ifndef SLD2I_CLASS_H
#define SLD2I_CLASS_H
#include <vector>
using namespace std;
/**
* Base class
*/
class GenI {
protected:
vector<double>* I_out;
// vectors
int n_pix;
double* qx;
double* qy;
double* x_val;
double* y_val;
double* z_val;
double* sldn_val;
double* mx_val;
double* my_val;
double* mz_val;
double* vol_pix;
// spin ratios
double inspin;
double outspin;
double stheta;
public:
// Constructor
GenI(int npix, double* x, double* y, double* z, double* sldn,
double* mx, double* my, double* mz, double* voli,
double in_spin, double out_spin,
double s_theta);
// compute function
void genicomXY(int npoints, double* qx, double* qy, double *I_out);
void genicom(int npoints, double* q, double *I_out);
};
#endif
|
// Created by Kay Czarnotta on 20.04.2016
//
// Copyright (c) 2016, EagleEye .
//
// All rights reserved.
using System;
using Windows.ApplicationModel.Background;
namespace SensorbergSDKTests.Mocks
{
public class MockBackgroundTaskInstance: IBackgroundTaskInstance
{
public BackgroundTaskDeferral GetDeferral()
{
return null;
}
public Guid InstanceId { get; }
public uint Progress { get; set; }
public uint SuspendedCount { get; }
public BackgroundTaskRegistration Task { get; }
public object TriggerDetails { get; }
public event BackgroundTaskCanceledEventHandler Canceled;
}
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace OredersTracker.Web.Areas.Administration.ViewModels
{
public class IncomeViewModel
{
public decimal TotalForMonth { get; set; }
public decimal TotalForKornelia { get; set; }
public decimal TotalForMitko { get; set; }
public decimal TotalForNikola { get; set; }
public decimal TotalForStaiko { get; set; }
}
} |
CREATE TABLE moderation_case (
id INT(11) PRIMARY KEY NOT NULL AUTO_INCREMENT,
guild_id INT(11) NOT NULL,
user_id INT(11) NOT NULL,
moderator INT(11),
message_id INT(21) NOT NULL,
created_at DATETIME NOT NULL,
reason TEXT NOT NULL,
punishment INT(11) NOT NULL,
expires DATETIME,
active INT(11) NOT NULL
);
CREATE UNIQUE INDEX moderation_case_guild_id_user_id_pk
ON moderation_case (guild_id, user_id);
CREATE UNIQUE INDEX moderation_case_guild_id_message_id_pk
ON moderation_case (guild_id, message_id);
CREATE UNIQUE INDEX moderation_case_user_id_message_id_pk
ON moderation_case (user_id, message_id); |
using pegabicho.domain.Entities.Core.Users;
using pegabicho.domain.Helpers;
using pegabicho.domain.Interfaces.Arguments.Base;
namespace pegabicho.domain.Arguments.Core.Users {
public class GeneralResponse : IResponse {
public string Id { get; set; }
public string Type { get; set; }
public string Phone { get; set; }
public string CellPhone { get; set; }
public string FirstName { get; set; }
public string LastName { get; set; }
public string BirthDate { get; set; }
public static explicit operator GeneralResponse(General v) {
return v == null ? null : new GeneralResponse {
BirthDate = v.BirthDate.ToString("dd/MM/yyyy HH:mm"),
Type = v.Type.EnumDisplay(),
CellPhone = v.CellPhone,
FirstName = v.FirstName,
LastName= v.LastName,
Phone = v.Phone,
Id = v.Id,
};
}
}
} |
import 'package:kino_player/generated/l10n.dart';
class PosterSortFieldData {
final String id;
final String title;
PosterSortFieldData(this.id, this.title);
}
class PosterSortFieldsData {
final List<PosterSortFieldData> items;
static const defaultId = "-kinopoisk_rating";
static List<PosterSortFieldData> _init() {
final s = S.current;
List<PosterSortFieldData> items = [];
items.add(PosterSortFieldData(
"kinopoisk_rating", s.posterSortFieldDataTopKinopoisk));
items.add(PosterSortFieldData("imdb_rating", s.posterSortFieldDataTopIMDb));
items.add(PosterSortFieldData("created", s.posterSortFieldDataNewKinopub));
return items;
}
PosterSortFieldsData() : items = _init();
}
|
package com.deflatedpickle.survivalimprovement
object Reference {
const val MOD_ID = "survivalimprovement"
const val NAME = "SurvivalImprovement"
// Versions follow this format: MCVERSION-MAJORMOD.MAJORAPI.MINOR.PATCH.
const val VERSION = "1.12.2-2.0.1.0"
const val ACCEPTED_VERSIONS = "[1.12.1, 1.12.2]"
const val ADAPTER = "net.shadowfacts.forgelin.KotlinAdapter"
} |
# This file contains the Schedule and Route classes, which load data from our
# database and provide methods and variables to help access it
import pandas as pd
import numpy as np
import psycopg2 as pg
from scipy import stats
# Schedule class definition
# (has some extra methods that are not all used in this notebook)
class Schedule:
"""
The Schedule class loads schedule data for a given route and date
Attributes:
route_id (str): the id of the route loaded
date (pd.Timestamp): the date of the schedule loaded
route_data (dict): the raw schedule data
inbound_table (pd.DataFrame): a dataframe of the inbound schedule
outbound_table (pd.DataFrame): a dataframe of the outbound schedule
mean_interval (float): the average time in minutes between each
scheduled stop
common_interval (float): the most common time (mode) in minutes between
each scheduled stop
"""
def __init__(self, route_id, date, connection):
"""
The Schedule class loads the schedule for a particular route and day,
and makes several accessor methods available for it.
Parameters:
route_id (str or int)
- The route id to load
date (str or pandas.Timestamp)
- Which date to load
- Converted with pandas.to_datetime so many formats are acceptable
connection (psycopg2 connection object)
- The connection object to connect to the database with
"""
self.route_id = str(route_id)
self.date = pd.to_datetime(date)
# load the schedule for that date and route
self.route_data = load_schedule(self.route_id, self.date, connection)
# process data into a table
self.inbound_table, self.outbound_table = \
extract_schedule_tables(self.route_data)
# calculate the common interval values
self.mean_interval, self.common_interval = get_common_intervals(
[self.inbound_table, self.outbound_table])
def list_stops(self):
"""
returns the list of all stops used by this schedule
"""
# get stops for both inbound and outbound routes
inbound = list(self.inbound_table.columns)
outbound = list(self.outbound_table.columns)
# convert to set to ensure no duplicates,
# then back to list for the correct output type
return list(set(inbound + outbound))
def get_specific_interval(self, stop, time, inbound=True):
"""
Returns the expected interval, in minutes, for a given stop and
time of day.
Parameters:
stop (str or int)
- the stop tag/id of the bus stop to check
time (str or pandas.Timestamp)
- the time of day to check, uses pandas.to_datetime to convert
- examples that work: "6:00", "3:30pm", "15:30"
inbound (bool, optional)
- whether to check the inbound or outbound schedule
- ignored unless the given stop is in both inbound and outbound
"""
# ensure correct parameter types
stop = str(stop)
time = pd.to_datetime(time)
# check which route to use, and extract the column for the given stop
if (stop in self.inbound_table.columns and
stop in self.outbound_table.columns):
# stop exists in both, use inbound parameter to decide
if inbound:
sched = self.inbound_table[stop]
else:
sched = self.outbound_table[stop]
elif (stop in self.inbound_table.columns):
# stop is in the inbound schedule, use that
sched = self.inbound_table[stop]
elif (stop in self.outbound_table.columns):
# stop is in the outbound schedule, use that
sched = self.outbound_table[stop]
else:
# stop doesn't exist in either, throw an error
raise ValueError(f"Stop id '{stop}' doesn't exist in either",
"inbound or outbound schedules")
# 1: convert schedule to datetime for comparison statements
# 2: drop any NaN values
# 3: convert to list since pd.Series threw errors on i indexing
sched = list(pd.to_datetime(sched).dropna())
# reset the date portion of the time parameter to
# ensure we are checking the schedule correctly
time = time.replace(year=self.date.year, month=self.date.month,
day=self.date.day)
# iterate through that list to find where the time parameter fits
for i in range(1, len(sched)):
# start at 1 and move forward,
# is the time parameter before this schedule entry?
if(time < sched[i]):
# return the difference between this entry and the previous one
return (sched[i] - sched[i-1]).seconds / 60
# can only reach this point if the time parameter is after all entries
# in the schedule, return the last available interval
return (sched[len(sched)-1] - sched[len(sched)-2]).seconds / 60
def load_schedule(route, date, connection):
"""
loads schedule data from the database and returns it
Parameters:
route (str)
- The route id to load
date (str or pd.Timestamp)
- Which date to load
- Converted with pandas.to_datetime so many formats are acceptable
"""
# ensure correct parameter types
route = str(route)
date = pd.to_datetime(date)
# DB connection
cursor = connection.cursor()
# build selection query
query = """
SELECT content
FROM schedules
WHERE rid = %s AND
begin_date <= %s::TIMESTAMP AND
(end_date IS NULL OR end_date >= %s::TIMESTAMP);
"""
# execute query and save the route data to a local variable
cursor.execute(query, (route, str(date), str(date)))
if cursor.rowcount == 0:
raise Exception(f"No schedule data found for route {route}",
f"on {date.date()}")
data = cursor.fetchone()[0]['route']
# pd.Timestamp.dayofweek returns 0 for monday and 6 for Sunday
# the actual serviceClass strings are defined by Nextbus
# these are the only 3 service classes we can currently observe,
# if others are published later then this will need to change
if(date.dayofweek <= 4):
serviceClass = 'wkd'
elif(date.dayofweek == 5):
serviceClass = 'sat'
else:
serviceClass = 'sun'
# the schedule format has two entries for each serviceClass,
# one each for inbound and outbound.
# get each entry in the data list with the correct serviceClass
result = [sched for sched in data if
(sched['serviceClass'] == serviceClass)]
# make sure there's data
# (most commonly reason to be here: some routes don't run on weekends)
if len(result) == 0:
print(f"No schedule data found for route {route} on {date.date()}")
return result
def extract_schedule_tables(route_data):
"""
converts raw schedule data to two pandas dataframes
columns are stops, and rows are individual trips
returns inbound_df, outbound_df
"""
# assuming 2 entries, but not assuming order
if(route_data[0]['direction'] == 'Inbound'):
inbound = 0
else:
inbound = 1
# extract a list of stops to act as columns
inbound_stops = [s['tag'] for s in route_data[inbound]['header']['stop']]
# initialize dataframe
inbound_df = pd.DataFrame(columns=inbound_stops)
# extract each row from the data
if type(route_data[inbound]['tr']) == list:
# if there are multiple trips in a day, structure will be a list
i = 0
for trip in route_data[inbound]['tr']:
for stop in trip['stop']:
# '--' indicates the bus is not going to that stop on this trip
if stop['content'] != '--':
inbound_df.at[i, stop['tag']] = stop['content']
# increment for the next row
i += 1
else:
# if there is only 1 trip in a day, the object is a dict and
# must be handled slightly differently
for stop in route_data[inbound]['tr']['stop']:
if stop['content'] != '--':
inbound_df.at[0, stop['tag']] = stop['content']
# flip between 0 and 1
outbound = int(not inbound)
# repeat steps for the outbound schedule
outbound_stops = [s['tag'] for s in route_data[outbound]['header']['stop']]
outbound_df = pd.DataFrame(columns=outbound_stops)
if type(route_data[outbound]['tr']) == list:
i = 0
for trip in route_data[outbound]['tr']:
for stop in trip['stop']:
if stop['content'] != '--':
outbound_df.at[i, stop['tag']] = stop['content']
i += 1
else:
for stop in route_data[outbound]['tr']['stop']:
if stop['content'] != '--':
outbound_df.at[0, stop['tag']] = stop['content']
# return both dataframes
return inbound_df, outbound_df
def get_common_intervals(df_list):
"""
takes route schedule tables and returns both the average interval (mean)
and the most common interval (mode), measured in number of minutes
takes a list of dataframes and combines them before calculating statistics
intended to combine inbound and outbound schedules for a single route
"""
# ensure we have at least one dataframe
if len(df_list) == 0:
raise ValueError("Function requires at least one dataframe")
# append all dataframes in the array together
df = df_list[0].copy()
for i in range(1, len(df_list)):
df.append(df_list[i].copy())
# convert all values to datetime so we can get an interval easily
for col in df.columns:
df[col] = pd.to_datetime(df[col])
# initialize a table to hold each individual interval
intervals = pd.DataFrame(columns=df.columns)
intervals['temp'] = range(len(df))
# take each column and find the intervals in it
for col in df.columns:
prev_time = np.nan
for i in range(len(df)):
# find the first non-null value and save it to prev_time
if pd.isnull(prev_time):
prev_time = df.at[i, col]
# if the current time is not null, save the interval
elif ~pd.isnull(df.at[i, col]):
intervals.at[i, col] = (df.at[i, col] - prev_time).seconds / 60
prev_time = df.at[i, col]
# this runs without adding a temp column, but the above loop runs 3x as
# fast if the rows already exist
intervals = intervals.drop('temp', axis=1)
# calculate the mean of the entire table
mean = intervals.mean().mean()
# calculate the mode of the entire table, the [0][0] at the end is
# because scipy.stats returns an entire ModeResult class
mode = stats.mode(intervals.values.flatten())[0][0]
return mean, mode
# Route class definition
# (also has some extra methods that are not all used in this notebook)
class Route:
"""
The Route class loads route definition data for a given route and date,
such as stop location and path coordinates
Attributes:
route_id (str): the id of the route loaded
date (pd.Timestamp): the date of the route definition loaded
route_data (dict): the raw route data
route_type (str): the type of route loaded
route_name (str): the name of the route loaded
stops_table (pd.DataFrame): a table of all stops on this route
inbound (list): a list of stop tags in the order they appear on the
inbound route
outbound (list): a list of stop tags in the order they appear on the
outbound route
Not fully implemented:
path_coords (list): a list of (lat,lon) tuples describing the route
path. These are stored as an unordered collection
of sub-paths in the raw data.
"""
def __init__(self, route_id, date, connection):
"""
The Route class loads the route configuration data for a particular
route, and makes several accessor methods available for it.
Parameters:
route_id (str or int)
- The route id to load
date (str or pandas.Timestamp)
- Which date to load
- Converted with pandas.to_datetime so many formats are acceptable
connection (psycopg2 connection object)
- The connection object to connect to the database with
"""
self.route_id = str(route_id)
self.date = pd.to_datetime(date)
# load the route data
self.route_data, self.route_type, self.route_name = \
load_route(self.route_id, self.date, connection)
# extract stops table
self.stops_table, self.inbound, self.outbound = \
extract_stops(self.route_data)
# The extract_path method is not complete
# extract route path, list of (lat, lon) pairs
# self.path_coords = extract_path(self.route_data)
def load_route(route, date, connection):
"""
loads raw route data from the database
Parameters:
route (str or int)
- The route id to load
date (str or pd.Timestamp)
- Which date to load
- Converted with pandas.to_datetime so many formats are acceptable
Returns route_data (dict), route_type (str), route_name (str)
"""
# ensure correct parameter types
route = str(route)
date = pd.to_datetime(date)
# DB connection
cursor = connection.cursor()
# build selection query
query = """
SELECT route_name, route_type, content
FROM routes
WHERE rid = %s AND
begin_date <= %s::TIMESTAMP AND
(end_date IS NULL OR end_date > %s::TIMESTAMP);
"""
# execute query and return the route data
cursor.execute(query, (route, str(date), str(date)))
if cursor.rowcount == 0:
raise Exception(f"No route data found for route {route}",
f"on {date.date()}")
result = cursor.fetchone()
return result[2]['route'], result[1], result[0]
def extract_path(route_data):
"""
Extracts the list of path coordinates for a route.
The raw data stores this as an unordered list of sub-routes, so this
function deciphers the order they should go in and returns a single list.
"""
# KNOWN BUG
# this approach assumed all routes were either a line or a loop.
# routes that have multiple sub-paths meeting at a point break this,
# route 24 is a current example.
# I'm committing this now to get the rest of the code out there
# this part is also not currently used in the daily report generation
# extract the list of subpaths as just (lat,lon) coordinates
# also converts from string to float (raw data has strings)
path = []
for sub_path in route_data['path']:
path.append([(float(p['lat']), float(p['lon']))
for p in sub_path['point']])
# start with the first element, remove it from path
final = path[0]
path.pop(0)
# loop until the first and last coordinates in final match
counter = len(path)
done = True
while final[0] != final[-1]:
# loop through the sub-paths that we haven't yet moved to final
for i in range(len(path)):
# check if the last coordinate in final matches the first
# coordinate of another sub-path
if final[-1] == path[i][0]:
# match found, move it to final
# leave out the first coordinate to avoid duplicates
final = final + path[i][1:]
path.pop(i)
break # break the for loop
# protection against infinite loops, if the path never closes
counter -= 1
if counter < 0:
done = False
break
if not done:
# route did not connect in a loop, perform same steps backwards
# to get the rest of the line
for _ in range(len(path)):
# loop through the sub-paths that we haven't yet moved to final
for i in range(len(path)):
# check if the first coordinate in final matches the last
# coordinate of another sub-path
if final[0] == path[i][-1]:
# match found, move it to final
# leave out the last coordinate to avoid duplicates
final = path[i][:-1] + final
path.pop(i)
break # break the for loop
# some routes may have un-used sub-paths
# Route 1 for example has two sub-paths that are almost identical, with the
# same start and end points
# if len(path) > 0:
# print(f"WARNING: {len(path)} unused sub-paths")
# return the final result
return final
def extract_stops(route_data):
"""
Extracts a dataframe of stops info
Returns the main stops dataframe, and a list of inbound and outbound stops
in the order they are intended to be on the route
"""
stops = pd.DataFrame(route_data['stop'])
directions = pd.DataFrame(route_data['direction'])
# Change stop arrays to just the list of numbers
for i in range(len(directions)):
directions.at[i, 'stop'] = [s['tag'] for s in directions.at[i, 'stop']]
# Find which stops are inbound or outbound
inbound = []
for stop_list in directions[directions['name'] == "Inbound"]['stop']:
for stop in stop_list:
if stop not in inbound:
inbound.append(stop)
outbound = []
for stop_list in directions[directions['name'] == "Outbound"]['stop']:
for stop in stop_list:
if stop not in inbound:
outbound.append(stop)
# Label each stop as inbound or outbound
stops['direction'] = ['none'] * len(stops)
for i in range(len(stops)):
if stops.at[i, 'tag'] in inbound:
stops.at[i, 'direction'] = 'inbound'
elif stops.at[i, 'tag'] in outbound:
stops.at[i, 'direction'] = 'outbound'
# Convert from string to float
stops['lat'] = stops['lat'].astype(float)
stops['lon'] = stops['lon'].astype(float)
return stops, inbound, outbound
|
{% highlight yaml %}
title: callysto
github: ashtonmv/example_site
alert: 'I'm here for your cookies!'
url: michael-ashton.com/example_site
baseurl: /
email: [email protected]
formspree: https://formspree.io/[email protected]
copyright: Michael Ashton
{% endhighlight %} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.