text
stringlengths 27
775k
|
---|
using System;
using Microsoft.Maui.Graphics;
using Microsoft.UI.Xaml;
using Microsoft.UI.Xaml.Controls;
using UwpApp = Microsoft.UI.Xaml.Application;
using UwpControlTemplate = Microsoft.UI.Xaml.Controls.ControlTemplate;
using UwpScrollBarVisibility = Microsoft.UI.Xaml.Controls.ScrollBarVisibility;
using WVisibility = Microsoft.UI.Xaml.Visibility;
namespace Microsoft.Maui.Controls.Platform
{
internal class FormsListView : Microsoft.UI.Xaml.Controls.ListView, IEmptyView
{
ContentControl _emptyViewContentControl;
FrameworkElement _emptyView;
View _formsEmptyView;
public FormsListView()
{
Template = (UwpControlTemplate)UwpApp.Current.Resources["FormsListViewTemplate"];
ScrollViewer.SetHorizontalScrollBarVisibility(this, UwpScrollBarVisibility.Disabled);
ScrollViewer.SetVerticalScrollBarVisibility(this, UwpScrollBarVisibility.Auto);
}
public static readonly DependencyProperty EmptyViewVisibilityProperty =
DependencyProperty.Register(nameof(EmptyViewVisibility), typeof(Visibility),
typeof(FormsListView), new PropertyMetadata(WVisibility.Collapsed, EmptyViewVisibilityChanged));
static void EmptyViewVisibilityChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
if (d is FormsListView listView)
{
// Update this manually; normally we'd just bind this, but TemplateBinding doesn't seem to work
// for WASDK right now.
listView.UpdateEmptyViewVisibility((WVisibility)e.NewValue);
}
}
public WVisibility EmptyViewVisibility
{
get
{
return (WVisibility)GetValue(EmptyViewVisibilityProperty);
}
set
{
SetValue(EmptyViewVisibilityProperty, value);
}
}
public void SetEmptyView(FrameworkElement emptyView, View formsEmptyView)
{
_emptyView = emptyView;
_formsEmptyView = formsEmptyView;
if (_emptyViewContentControl != null)
{
_emptyViewContentControl.Content = emptyView;
UpdateEmptyViewVisibility(EmptyViewVisibility);
}
}
protected override void OnApplyTemplate()
{
base.OnApplyTemplate();
_emptyViewContentControl = GetTemplateChild("EmptyViewContentControl") as ContentControl;
if (_emptyView != null)
{
_emptyViewContentControl.Content = _emptyView;
UpdateEmptyViewVisibility(EmptyViewVisibility);
}
}
protected override global::Windows.Foundation.Size ArrangeOverride(global::Windows.Foundation.Size finalSize)
{
if (_formsEmptyView != null)
{
_formsEmptyView.Layout(new Rectangle(0, 0, finalSize.Width, finalSize.Height));
}
return base.ArrangeOverride(finalSize);
}
protected override void PrepareContainerForItemOverride(DependencyObject element, object item)
{
GroupFooterItemTemplateContext.EnsureSelectionDisabled(element, item);
base.PrepareContainerForItemOverride(element, item);
}
void UpdateEmptyViewVisibility(WVisibility visibility)
{
if (_emptyViewContentControl == null)
{
return;
}
_emptyViewContentControl.Visibility = visibility;
}
}
} |
#!/bin/bash
# Use colors, but only if connected to a terminal, and that terminal
# supports them. (From oh-my-zsh/tools/install.sh)
if which tput >/dev/null 2>&1; then
ncolors=$(tput colors)
fi
if [ -t 1 ] && [ -n "$ncolors" ] && [ "$ncolors" -ge 8 ]; then
RED="$(tput setaf 1)"
GREEN="$(tput setaf 2)"
YELLOW="$(tput setaf 3)"
BLUE="$(tput setaf 4)"
BOLD="$(tput bold)"
NORMAL="$(tput sgr0)"
MAGENTA=$(tput setaf 5)
ORANGE=$(tput setaf 4)
PURPLE=$(tput setaf 1)
WHITE=$(tput setaf 7)
else
RED=""
GREEN=""
YELLOW=""
BLUE=""
BOLD=""
NORMAL=""
MAGENTA=""
ORANGE=""
PURPLE=""
WHITE=""
fi
source $DOTFILES_LIB/verbosity.sh
export DOTFILES_ECHO_NO_NEWLINE=false
colored_echo()
{
local color=$1
local bold=$2
local header="$3"
local message_color=$4
local message_bold=$5
local message="$6"
if [ -z "$color" ]; then
color=$NORMAL
fi
if [ -z "$bold" ]; then
bold=$NORMAL
fi
if [ -z "$message_color" ]; then
message_color=$NORMAL
fi
if [ -z "$message_bold" ]; then
message_bold=$bold
fi
output="${color}${bold}${header}${message_color}${message_bold}${message}${NORMAL}"
if $DOTFILES_ECHO_NO_NEWLINE; then
echo -n "$output"
else
echo "$output"
fi
}
print_message()
{
local level=$1
local category=$2
local message="$3"
local header=""
local color=$NORMAL
local bold=""
if [[ "$level" == "TARGET" ]]; then
header="==> "
color=$GREEN
bold=$BOLD
elif [[ "$level" == "COMPONENT" ]]; then
header=" -> "
color=$BLUE
bold=$BOLD
elif [[ "$level" == "STEP" ]]; then
header=""
color=$NORMAL
bold=""
else
header=""
color=$NORMAL
bold=""
fi
if [[ "$category" == "ERROR" ]]; then
color=$RED
header="${header}ERROR: "
elif [[ "$category" == "WARNING" ]]; then
color=$YELLOW
header="${header}WARNING: "
elif [[ "$category" == "NOTE" ]]; then
color=$NORMAL
header="${header}NOTE: "
fi
if verbose_policy $level || [ "${category}" == "REQUEST" ]; then
colored_echo $color $bold "$header" $NORMAL $bold "$message"
fi
}
print_info()
{
print_message "$1" "INFO" "$2"
}
print_request()
{
print_message "$1" "REQUEST" "$2"
}
print_note()
{
print_message "$1" "NOTE" "$2"
}
print_warning()
{
print_message "$1" "WARNING" "$2"
}
print_error()
{
print_message "$1" "ERROR" "$2"
}
confirm () {
# call with a prompt string or use a default
read -r -p "${1:-Are you sure? [y/N]} " response
case $response in
[yY][eE][sS]|[yY])
true
;;
*)
false
;;
esac
}
confirm_colored()
{
export DOTFILES_ECHO_NO_NEWLINE=true
print_request "$1" "$2"
export DOTFILES_ECHO_NO_NEWLINE=false
confirm " [y/N]: "
}
|
import 'package:cloud_firestore/cloud_firestore.dart';
import 'package:orgonetchatappv2/models/UserModel.dart';
class FirebaseHelper {
static Future<UserModel?> getUserModelById(String uid)async{
UserModel? userModel;
DocumentSnapshot docSnap = await FirebaseFirestore.instance.collection("users").doc(uid).get();
if(docSnap.data() != null){
userModel = UserModel.fromMap(docSnap.data() as Map<String,dynamic>);
}
return userModel;
}
} |
using System;
using System.Collections.Generic;
using System.Text;
namespace ElementIoT.Particle.Infrastructure.Model.Handling
{
public interface IEventHandlerRegistry
{
void Register(IEventHandler handler);
}
}
|
package jaskell.parsec
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
/**
* TODO
*
* @author mars
* @version 1.0.0
* @since 2020/05/11 10:50
*/
class AheadSpec extends AnyFlatSpec with Matchers {
import Txt.{text, space}
import Combinator.ahead
"Simple" should "Expect status stop after this" in {
val content: String = "this is a string data."
val state = State.apply(content)
val parser = Parsec[Char, String] { s =>
for {
re <- text("this") ? (s)
_ <- ahead (text(" is")) ? (s)
} yield re
}
parser ! state should be("this")
state.status should be(4)
}
"Then" should "Check status get result and stop at is" in {
val content: String = "this is a string data."
val state = State.apply(content)
val parser = Parsec[Char, String] { s =>
for {
_ <- text("this") ? s
_ <- space ? s
re <- ahead (text("is")) ? s
} yield re
}
val re = parser ! state
re should be("is")
state.status should be(5)
}
"Fail" should "throw parsec exception from parser" in {
val content: String = "this is a string data."
val state = State apply content
val parser = Parsec[Char, String] { s =>
for {
_ <- text("this") ? s
_ <- space ? s
re <- ahead(text(" is")) ? s
} yield re
}
a[ParsecException] should be thrownBy {
parser ! state
}
}
}
|
const colors = require('colors') // eslint-disable-line no-unused-vars
const moment = require('moment')
const { logo, welcomeText } = require('../other/text')
const NetcatServer = require('netcat/server')
const HiveInterface = require('./hive')
const { broadcast } = require('./utilities')
const fs = require('fs')
const readline = require('readline')
const welcomeMsg = `${logo}\n${welcomeText}`.yellow
function start ({ BEE_HOST, BEE_PORT, QUEEN_HOST, QUEEN_PORT, HONEY_SCRIPT}) {
console.log(logo.yellow, `\nAlveare started on port ${QUEEN_PORT}, waiting for bees on port ${BEE_PORT}`.cyan)
// BEE HIVE
const hive = new NetcatServer()
hive.k().address(BEE_HOST).port(BEE_PORT).listen();
hive.on('connection', (bee) => {
const now = moment().format('MMM Do YYYY, HH:mm:ss')
const msg = `[${now}] New bee ${bee.remoteAddress}:${bee.remotePort} (${bee.id})`.yellow + ' connected'.green
console.log(msg)
broadcast(queenLoft.getClients(), msg)
})
// abort if HONEY_SCRIPT not exists if set
if (HONEY_SCRIPT != '') {
if (!fs.existsSync(HONEY_SCRIPT)) {
console.log(`Alveare can't find your HONEY_SCRIPT at ${HONEY_SCRIPT}`)
process.exit(1)
}
// send "HONEY_SCRIPT" line by line to client on new bee connection
hive.on('connection', (bee) => {
const rl = readline.createInterface({
input: fs.createReadStream(HONEY_SCRIPT),
crlfDelay: Infinity
})
rl.on('line', (line) => {
bee.write(`${line}\n`)
})
})
}
hive.on('clientClose', (bee) => {
const now = moment().format('MMM Do YYYY, HH:mm:ss')
const msg = `[${now}] Bee ${bee.remoteAddress}:${bee.remotePort} (${bee.id})`.yellow + ' died'.red
console.log(msg)
broadcast(queenLoft.getClients(), msg)
})
// QUEEN BEE
const queenLoft = new NetcatServer()
queenLoft.k().address(QUEEN_HOST).port(QUEEN_PORT).listen().on('connection', (queenBee) => { // admin socket
const now = moment().format('MMM Do YYYY, HH:mm:ss')
console.log(`[${now}] A queen bee just entered the Hive`.yellow)
const cli = new HiveInterface({ welcomeMsg, hive, socket: queenBee })
cli.start()
}).on('clientClose', (queenBee) => {
const now = moment().format('MMM Do YYYY, HH:mm:ss')
console.log(`[${now}] A queen bee`, 'quit'.red)
})
}
module.exports = {
start
}
|
mapdive.LooseState = function() {
var stateStartTime = 0;
var self = {};
var hasSentCloudMsg=false;
self.setActive = function() {
sendMessage( {mapzoom : "off"} );
stateStartTime = now();
viewState.player.parachute=now();
viewState.player.trails=0;
setCameraMode("end-loose1", 4);
hasSentCloudMsg=false
}
self.update = function() {
playerRoll.set(0);
playerPitch.set( toRadians(-75) ); // offset from 90 so wires dont go in his head (too much :)
playerPos.y -= 0.01; // fall really slow
var elapsed=now() - stateStartTime;
if(elapsed > 8){
setGameState(STATE_IDLE);
//if (isUserPresent) startNewGame(); else setGameState(STATE_IDLE); // alt to fix cam transition
}
else if (elapsed>7 && hasSentCloudMsg==false) {
sendMessage( { entity: "transition_cloud", state:'fadein' } );
hasSentCloudMsg=true
}
}
return self;
} |
"Create AtomicSolution for SDE."
function Solutions.AtomicSolution(equation::AbstractEquationSDE{DT,TT}) where {DT,TT}
AtomicSolutionSDE(equation.t₀, equation.q₀[begin], zeros(DT,equation.m), zeros(DT,equation.m))
end
"Create AtomicSolution for SDE."
function Solutions.AtomicSolution(solution::SolutionSDE{AT,TT}) where {DT, TT, AT <: AbstractArray{DT}}
AtomicSolutionSDE(get_initial_conditions(solution, 1)..., zeros(DT,solution.nm), zeros(DT,solution.nm))
end
"Create AtomicSolution for partitioned SDE."
function Solutions.AtomicSolution(equation::AbstractEquationPSDE{DT,TT}) where {DT,TT}
AtomicSolutionPSDE(equation.t₀, equation.q₀[begin], equation.p₀[begin], zeros(DT,equation.m), zeros(DT,equation.m))
end
"Create AtomicSolution for partitioned SDE."
function Solutions.AtomicSolution(solution::SolutionPSDE{AT,TT}) where {DT, TT, AT <: AbstractArray{DT}}
AtomicSolutionPSDE(get_initial_conditions(solution, 1)..., zeros(DT,solution.nm), zeros(DT,solution.nm))
end
|
<?php
namespace Ubermanu\Email\Console\Command;
use Magento\Developer\Model\Config\Source\WorkflowType;
use Magento\Framework\Exception\LocalizedException;
use Magento\Framework\Phrase;
use Magento\Store\Model\Store;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class DumpCommand extends Command
{
/**
* @var \Magento\Framework\App\State
*/
protected $_state;
/**
* @var \Magento\Framework\Phrase\RendererInterface
*/
protected $_phraseRenderer;
/**
* @var \Magento\Framework\Mail\Template\FactoryInterface
*/
protected $_templateFactory;
/**
* @var \Magento\Framework\App\Config\ScopeConfigInterface
*/
protected $_scopeConfig;
/**
* @var \Magento\Email\Model\Template\Config
*/
protected $_emailConfig;
public function __construct(
\Magento\Framework\App\State $state,
\Magento\Framework\Phrase\RendererInterface $phraseRenderer,
\Magento\Framework\Mail\Template\FactoryInterface $templateFactory,
\Magento\Framework\App\Config\ScopeConfigInterface $scopeConfig,
\Magento\Email\Model\Template\Config $emailConfig,
$name = null
) {
parent::__construct($name);
$this->_state = $state;
$this->_phraseRenderer = $phraseRenderer;
$this->_templateFactory = $templateFactory;
$this->_scopeConfig = $scopeConfig;
$this->_emailConfig = $emailConfig;
}
/**
* @inheritdoc
*/
protected function configure()
{
$this->setName('email:dump');
$this->setDescription('Dump the content of a transactional email template');
$this->addOption(
'template',
't',
InputOption::VALUE_REQUIRED,
'Email template identifier'
);
$this->addOption(
'store',
's',
InputOption::VALUE_OPTIONAL,
'Store ID',
Store::DEFAULT_STORE_ID
);
$this->addOption(
'vars',
'i',
InputOption::VALUE_OPTIONAL,
'File that contains the variables to inject into the template (YAML)'
);
}
/**
* @inheritdoc
* @throws LocalizedException
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$this->_state->setAreaCode(\Magento\Framework\App\Area::AREA_FRONTEND);
if ($this->_scopeConfig->getValue(WorkflowType::CONFIG_NAME_PATH) === WorkflowType::CLIENT_SIDE_COMPILATION) {
throw new LocalizedException(__('Client side compilation is not supported for this command.'));
}
$templateId = $input->getOption('template');
$storeId = $input->getOption('store');
$variables = [];
if ($input->getOption('vars')) {
try {
$variables = \Symfony\Component\Yaml\Yaml::parse(file_get_contents($input->getOption('vars')));
} catch (\Symfony\Component\Yaml\Exception\ParseException $e) {
throw new LocalizedException(__('Error parsing variables file: %1', [$e->getMessage()]));
}
}
Phrase::setRenderer($this->_phraseRenderer);
$template = $this->_templateFactory
->get($templateId)
->setOptions(
[
'area' => $this->_emailConfig->getTemplateArea($templateId),
'store' => $storeId,
]
)
->setVars($variables);
echo $template->processTemplate();
}
}
|
package no.nav.syfo.config
import no.nav.syfo.kafka.NAV_CALLID
import org.slf4j.MDC
import org.springframework.core.annotation.Order
import org.springframework.http.HttpRequest
import org.springframework.http.client.ClientHttpRequestExecution
import org.springframework.http.client.ClientHttpRequestInterceptor
import org.springframework.http.client.ClientHttpResponse
import org.springframework.stereotype.Component
import java.io.IOException
import java.util.*
private const val REGISTER_CALL_ID = "Nav-Call-Id"
@Component
@Order
class CallIdInterceptor : ClientHttpRequestInterceptor {
@Throws(IOException::class)
override fun intercept(
httpRequest: HttpRequest,
bytes: ByteArray,
clientHttpRequestExecution: ClientHttpRequestExecution
): ClientHttpResponse {
Optional.ofNullable(MDC.get(NAV_CALLID))
.ifPresent { callid ->
httpRequest.headers.add(NAV_CALLID, callid)
httpRequest.headers.add(REGISTER_CALL_ID, callid)
}
return clientHttpRequestExecution.execute(httpRequest, bytes)
}
}
|
/*
* MIT License
*
* Copyright (c) 2020-present Cloudogu GmbH and Contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import configureMockStore from "redux-mock-store";
import thunk from "redux-thunk";
import fetchMock from "fetch-mock";
import reducer, {
CREATE_USER,
CREATE_USER_FAILURE,
CREATE_USER_PENDING,
CREATE_USER_SUCCESS,
createUser,
DELETE_USER,
DELETE_USER_FAILURE,
DELETE_USER_PENDING,
DELETE_USER_SUCCESS,
deleteUser,
deleteUserSuccess,
FETCH_USER,
FETCH_USER_FAILURE,
FETCH_USER_PENDING,
FETCH_USER_SUCCESS,
FETCH_USERS,
FETCH_USERS_FAILURE,
FETCH_USERS_PENDING,
FETCH_USERS_SUCCESS,
fetchUserByLink,
fetchUserByName,
fetchUsers,
fetchUsersSuccess,
fetchUserSuccess,
getCreateUserFailure,
getDeleteUserFailure,
getFetchUserFailure,
getFetchUsersFailure,
getModifyUserFailure,
getUserByName,
getUsersFromState,
isCreateUserPending,
isDeleteUserPending,
isFetchUserPending,
isFetchUsersPending,
isModifyUserPending,
isPermittedToCreateUsers,
MODIFY_USER,
MODIFY_USER_FAILURE,
MODIFY_USER_PENDING,
MODIFY_USER_SUCCESS,
modifyUser,
selectListAsCollection
} from "./users";
const userZaphod = {
active: true,
admin: true,
creationDate: "2018-07-11T12:23:49.027Z",
displayName: "Z. Beeblebrox",
mail: "[email protected]",
name: "zaphod",
password: "",
type: "xml",
properties: {},
_links: {
self: {
href: "http://localhost:8081/api/v2/users/zaphod"
},
delete: {
href: "http://localhost:8081/api/v2/users/zaphod"
},
update: {
href: "http://localhost:8081/api/v2/users/zaphod"
}
}
};
const userFord = {
active: true,
admin: false,
creationDate: "2018-07-06T13:21:18.459Z",
displayName: "F. Prefect",
mail: "[email protected]",
name: "ford",
password: "",
type: "xml",
properties: {},
_links: {
self: {
href: "http://localhost:8081/api/v2/users/ford"
},
delete: {
href: "http://localhost:8081/api/v2/users/ford"
},
update: {
href: "http://localhost:8081/api/v2/users/ford"
}
}
};
const responseBody = {
page: 0,
pageTotal: 1,
_links: {
self: {
href: "http://localhost:3000/api/v2/users/?page=0&pageSize=10"
},
first: {
href: "http://localhost:3000/api/v2/users/?page=0&pageSize=10"
},
last: {
href: "http://localhost:3000/api/v2/users/?page=0&pageSize=10"
},
create: {
href: "http://localhost:3000/api/v2/users/"
}
},
_embedded: {
users: [userZaphod, userFord]
}
};
const response = {
headers: {
"content-type": "application/json"
},
responseBody
};
const URL = "users";
const USERS_URL = "/api/v2/users";
const USER_ZAPHOD_URL = "http://localhost:8081/api/v2/users/zaphod";
const error = new Error("KAPUTT");
describe("users fetch()", () => {
const mockStore = configureMockStore([thunk]);
afterEach(() => {
fetchMock.reset();
fetchMock.restore();
});
it("should successfully fetch users", () => {
fetchMock.getOnce(USERS_URL, response);
const expectedActions = [
{
type: FETCH_USERS_PENDING
},
{
type: FETCH_USERS_SUCCESS,
payload: response
}
];
const store = mockStore({});
return store.dispatch(fetchUsers(URL)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
});
});
it("should fail getting users on HTTP 500", () => {
fetchMock.getOnce(USERS_URL, {
status: 500
});
const store = mockStore({});
return store.dispatch(fetchUsers(URL)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(FETCH_USERS_PENDING);
expect(actions[1].type).toEqual(FETCH_USERS_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
it("should sucessfully fetch single user by name", () => {
fetchMock.getOnce(USERS_URL + "/zaphod", userZaphod);
const store = mockStore({});
return store.dispatch(fetchUserByName(URL, "zaphod")).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(FETCH_USER_PENDING);
expect(actions[1].type).toEqual(FETCH_USER_SUCCESS);
expect(actions[1].payload).toBeDefined();
});
});
it("should fail fetching single user by name on HTTP 500", () => {
fetchMock.getOnce(USERS_URL + "/zaphod", {
status: 500
});
const store = mockStore({});
return store.dispatch(fetchUserByName(URL, "zaphod")).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(FETCH_USER_PENDING);
expect(actions[1].type).toEqual(FETCH_USER_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
it("should sucessfully fetch single user", () => {
fetchMock.getOnce(USER_ZAPHOD_URL, userZaphod);
const store = mockStore({});
return store.dispatch(fetchUserByLink(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(FETCH_USER_PENDING);
expect(actions[1].type).toEqual(FETCH_USER_SUCCESS);
expect(actions[1].payload).toBeDefined();
});
});
it("should fail fetching single user on HTTP 500", () => {
fetchMock.getOnce(USER_ZAPHOD_URL, {
status: 500
});
const store = mockStore({});
return store.dispatch(fetchUserByLink(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(FETCH_USER_PENDING);
expect(actions[1].type).toEqual(FETCH_USER_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
it("should add a user successfully", () => {
// unmatched
fetchMock.postOnce(USERS_URL, {
status: 204
});
// after create, the users are fetched again
fetchMock.getOnce(USERS_URL, response);
const store = mockStore({});
return store.dispatch(createUser(URL, userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(CREATE_USER_PENDING);
expect(actions[1].type).toEqual(CREATE_USER_SUCCESS);
});
});
it("should fail adding a user on HTTP 500", () => {
fetchMock.postOnce(USERS_URL, {
status: 500
});
const store = mockStore({});
return store.dispatch(createUser(URL, userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(CREATE_USER_PENDING);
expect(actions[1].type).toEqual(CREATE_USER_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
it("should call the callback after user successfully created", () => {
// unmatched
fetchMock.postOnce(USERS_URL, {
status: 204
});
let callMe = "not yet";
const callback = () => {
callMe = "yeah";
};
const store = mockStore({});
return store.dispatch(createUser(URL, userZaphod, callback)).then(() => {
expect(callMe).toBe("yeah");
});
});
it("successfully update user", () => {
fetchMock.putOnce(USER_ZAPHOD_URL, {
status: 204
});
fetchMock.getOnce(USER_ZAPHOD_URL, userZaphod);
const store = mockStore({});
return store.dispatch(modifyUser(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions.length).toBe(3);
expect(actions[0].type).toEqual(MODIFY_USER_PENDING);
expect(actions[1].type).toEqual(MODIFY_USER_SUCCESS);
expect(actions[2].type).toEqual(FETCH_USER_PENDING);
});
});
it("should call callback, after successful modified user", () => {
fetchMock.putOnce(USER_ZAPHOD_URL, {
status: 204
});
fetchMock.getOnce(USER_ZAPHOD_URL, userZaphod);
let called = false;
const callMe = () => {
called = true;
};
const store = mockStore({});
return store.dispatch(modifyUser(userZaphod, callMe)).then(() => {
expect(called).toBeTruthy();
});
});
it("should fail updating user on HTTP 500", () => {
fetchMock.putOnce(USER_ZAPHOD_URL, {
status: 500
});
const store = mockStore({});
return store.dispatch(modifyUser(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(MODIFY_USER_PENDING);
expect(actions[1].type).toEqual(MODIFY_USER_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
it("should delete successfully user zaphod", () => {
fetchMock.deleteOnce(USER_ZAPHOD_URL, {
status: 204
});
const store = mockStore({});
return store.dispatch(deleteUser(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions.length).toBe(2);
expect(actions[0].type).toEqual(DELETE_USER_PENDING);
expect(actions[0].payload).toBe(userZaphod);
expect(actions[1].type).toEqual(DELETE_USER_SUCCESS);
});
});
it("should call the callback, after successful delete", () => {
fetchMock.deleteOnce(USER_ZAPHOD_URL, {
status: 204
});
let called = false;
const callMe = () => {
called = true;
};
const store = mockStore({});
return store.dispatch(deleteUser(userZaphod, callMe)).then(() => {
expect(called).toBeTruthy();
});
});
it("should fail to delete user zaphod", () => {
fetchMock.deleteOnce(USER_ZAPHOD_URL, {
status: 500
});
const store = mockStore({});
return store.dispatch(deleteUser(userZaphod)).then(() => {
const actions = store.getActions();
expect(actions[0].type).toEqual(DELETE_USER_PENDING);
expect(actions[0].payload).toBe(userZaphod);
expect(actions[1].type).toEqual(DELETE_USER_FAILURE);
expect(actions[1].payload).toBeDefined();
});
});
});
describe("users reducer", () => {
it("should update state correctly according to FETCH_USERS_SUCCESS action", () => {
const newState = reducer({}, fetchUsersSuccess(responseBody));
expect(newState.list).toEqual({
entries: ["zaphod", "ford"],
entry: {
userCreatePermission: true,
page: 0,
pageTotal: 1,
_links: responseBody._links
}
});
expect(newState.byNames).toEqual({
zaphod: userZaphod,
ford: userFord
});
expect(newState.list.entry.userCreatePermission).toBeTruthy();
});
it("should set userCreatePermission to true if update link is present", () => {
const newState = reducer({}, fetchUsersSuccess(responseBody));
expect(newState.list.entry.userCreatePermission).toBeTruthy();
});
it("should not replace whole byNames map when fetching users", () => {
const oldState = {
byNames: {
ford: userFord
}
};
const newState = reducer(oldState, fetchUsersSuccess(responseBody));
expect(newState.byNames["zaphod"]).toBeDefined();
expect(newState.byNames["ford"]).toBeDefined();
});
it("should remove user from state when delete succeeds", () => {
const state = {
list: {
entries: ["ford", "zaphod"]
},
byNames: {
zaphod: userZaphod,
ford: userFord
}
};
const newState = reducer(state, deleteUserSuccess(userFord));
expect(newState.byNames["zaphod"]).toBeDefined();
expect(newState.byNames["ford"]).toBeFalsy();
expect(newState.list.entries).toEqual(["zaphod"]);
});
it("should set userCreatePermission to true if create link is present", () => {
const newState = reducer({}, fetchUsersSuccess(responseBody));
expect(newState.list.entry.userCreatePermission).toBeTruthy();
expect(newState.list.entries).toEqual(["zaphod", "ford"]);
expect(newState.byNames["ford"]).toBeTruthy();
expect(newState.byNames["zaphod"]).toBeTruthy();
});
it("should update state according to FETCH_USER_SUCCESS action", () => {
const newState = reducer({}, fetchUserSuccess(userFord));
expect(newState.byNames["ford"]).toBe(userFord);
});
it("should affect users state nor the state of other users", () => {
const newState = reducer(
{
list: {
entries: ["zaphod"]
}
},
fetchUserSuccess(userFord)
);
expect(newState.byNames["ford"]).toBe(userFord);
expect(newState.list.entries).toEqual(["zaphod"]);
});
});
describe("selector tests", () => {
it("should return an empty object", () => {
expect(selectListAsCollection({})).toEqual({});
expect(
selectListAsCollection({
users: {
a: "a"
}
})
).toEqual({});
});
it("should return a state slice collection", () => {
const collection = {
page: 3,
totalPages: 42
};
const state = {
users: {
list: {
entry: collection
}
}
};
expect(selectListAsCollection(state)).toBe(collection);
});
it("should return false", () => {
expect(isPermittedToCreateUsers({})).toBe(false);
expect(
isPermittedToCreateUsers({
users: {
list: {
entry: {}
}
}
})
).toBe(false);
expect(
isPermittedToCreateUsers({
users: {
list: {
entry: {
userCreatePermission: false
}
}
}
})
).toBe(false);
});
it("should return true", () => {
const state = {
users: {
list: {
entry: {
userCreatePermission: true
}
}
}
};
expect(isPermittedToCreateUsers(state)).toBe(true);
});
it("should get users from state", () => {
const state = {
users: {
list: {
entries: ["a", "b"]
},
byNames: {
a: {
name: "a"
},
b: {
name: "b"
}
}
}
};
expect(getUsersFromState(state)).toEqual([
{
name: "a"
},
{
name: "b"
}
]);
});
it("should return true, when fetch users is pending", () => {
const state = {
pending: {
[FETCH_USERS]: true
}
};
expect(isFetchUsersPending(state)).toEqual(true);
});
it("should return false, when fetch users is not pending", () => {
expect(isFetchUsersPending({})).toEqual(false);
});
it("should return error when fetch users did fail", () => {
const state = {
failure: {
[FETCH_USERS]: error
}
};
expect(getFetchUsersFailure(state)).toEqual(error);
});
it("should return undefined when fetch users did not fail", () => {
expect(getFetchUsersFailure({})).toBe(undefined);
});
it("should return true if create user is pending", () => {
const state = {
pending: {
[CREATE_USER]: true
}
};
expect(isCreateUserPending(state)).toBe(true);
});
it("should return false if create user is not pending", () => {
const state = {
pending: {
[CREATE_USER]: false
}
};
expect(isCreateUserPending(state)).toBe(false);
});
it("should return error when create user did fail", () => {
const state = {
failure: {
[CREATE_USER]: error
}
};
expect(getCreateUserFailure(state)).toEqual(error);
});
it("should return undefined when create user did not fail", () => {
expect(getCreateUserFailure({})).toBe(undefined);
});
it("should return user ford", () => {
const state = {
users: {
byNames: {
ford: userFord
}
}
};
expect(getUserByName(state, "ford")).toEqual(userFord);
});
it("should return true, when fetch user zaphod is pending", () => {
const state = {
pending: {
[FETCH_USER + "/zaphod"]: true
}
};
expect(isFetchUserPending(state, "zaphod")).toEqual(true);
});
it("should return false, when fetch user zaphod is not pending", () => {
expect(isFetchUserPending({}, "zaphod")).toEqual(false);
});
it("should return error when fetch user zaphod did fail", () => {
const state = {
failure: {
[FETCH_USER + "/zaphod"]: error
}
};
expect(getFetchUserFailure(state, "zaphod")).toEqual(error);
});
it("should return undefined when fetch user zaphod did not fail", () => {
expect(getFetchUserFailure({}, "zaphod")).toBe(undefined);
});
it("should return true, when modify user ford is pending", () => {
const state = {
pending: {
[MODIFY_USER + "/ford"]: true
}
};
expect(isModifyUserPending(state, "ford")).toEqual(true);
});
it("should return false, when modify user ford is not pending", () => {
expect(isModifyUserPending({}, "ford")).toEqual(false);
});
it("should return error when modify user ford did fail", () => {
const state = {
failure: {
[MODIFY_USER + "/ford"]: error
}
};
expect(getModifyUserFailure(state, "ford")).toEqual(error);
});
it("should return undefined when modify user ford did not fail", () => {
expect(getModifyUserFailure({}, "ford")).toBe(undefined);
});
it("should return true, when delete user zaphod is pending", () => {
const state = {
pending: {
[DELETE_USER + "/zaphod"]: true
}
};
expect(isDeleteUserPending(state, "zaphod")).toEqual(true);
});
it("should return false, when delete user zaphod is not pending", () => {
expect(isDeleteUserPending({}, "zaphod")).toEqual(false);
});
it("should return error when delete user zaphod did fail", () => {
const state = {
failure: {
[DELETE_USER + "/zaphod"]: error
}
};
expect(getDeleteUserFailure(state, "zaphod")).toEqual(error);
});
it("should return undefined when delete user zaphod did not fail", () => {
expect(getDeleteUserFailure({}, "zaphod")).toBe(undefined);
});
});
|
// NOTE: This file is not used it production
// It's just here to allow elm-live to work (`yarn memberui`)
import { notifyAuthStatus } from "../auth";
export { notifyAuthStatus } from '../auth';
var app = Elm.Flux.MemberUI.Main.fullscreen();
notifyAuthStatus(app);
//# sourceMappingURL=scripts.js.map |
"use strict";
exports.__esModule = true;
exports.StatementExpressionList = void 0;
var statement_expression_list_children_model_1 = require("./statement-expression-list-children.model");
var location_model_1 = require("./location.model");
var StatementExpressionList = /** @class */ (function () {
function StatementExpressionList() {
this.name = '';
this.children = [new statement_expression_list_children_model_1.StatementExpressionListChildren()];
this.location = new location_model_1.Location();
}
return StatementExpressionList;
}());
exports.StatementExpressionList = StatementExpressionList;
|
// Copyright 2017 Google Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import 'dart:html';
import 'package:pageloader/pageloader.dart';
import 'html_mouse.dart';
import 'html_page_loader_element.dart';
import 'html_pointer.dart';
/// Support for [PageUtils] in HTML context.
class HtmlPageUtils extends PageUtils {
final SyncFn<dynamic> syncFn;
HtmlPageLoaderElement _cachedRoot;
HtmlPageUtils({SyncFn<dynamic> externalSyncFn = noOpExecuteSyncedFn})
: syncFn = externalSyncFn;
/// Gets the body for the current page.
///
/// Caches the [HtmlPageLoaderElement] used between calls to allow listeners
/// to persist.
@override
HtmlPageLoaderElement get root {
_cachedRoot ??= HtmlPageLoaderElement.createFromElement(document.body,
externalSyncFn: syncFn);
return _cachedRoot;
}
/// Gets the element on the DOM that currently has focus.
@override
HtmlPageLoaderElement get focused {
return HtmlPageLoaderElement.createFromElement(document.activeElement,
externalSyncFn: syncFn);
}
/// Gets the current root element for the DOM.
///
/// This is element you should pass in your tests to create new page objects.
@override
PageLoaderElement byTag(String tag) =>
HtmlPageLoaderElement.createFromElement(document.body,
externalSyncFn: syncFn)
.getElementsByCss(tag)
.single;
/// Gets the mouse.
@override
PageLoaderMouse get mouse => globalMouse(syncFn);
/// Gets the pointer.
@override
PageLoaderPointer get pointer => globalPointer(syncFn);
}
|
package com.blueberrysolution.pinelib19.sqlite.exposed
import org.jetbrains.exposed.sql.*
import org.jetbrains.exposed.sql.transactions.transaction
import org.jetbrains.exposed.sql.SchemaUtils.create
import org.jetbrains.exposed.sql.SchemaUtils.drop
class ExposedTest {
fun test(){
Database.connect("jdbc:h2:mem:test", driver = "org.h2.Driver")
transaction {
create (Cities, Users)
val saintPetersburgId = Cities.insert {
it[name] = "St. Petersburg"
} get Cities.id
val munichId = Cities.insert {
it[name] = "Munich"
} get Cities.id
Cities.insert {
it[name] = "Prague"
}
Users.insert {
it[id] = "andrey"
it[name] = "Andrey"
it[cityId] = saintPetersburgId
}
Users.insert {
it[id] = "sergey"
it[name] = "Sergey"
it[cityId] = munichId
}
Users.insert {
it[id] = "eugene"
it[name] = "Eugene"
it[cityId] = munichId
}
Users.insert {
it[id] = "alex"
it[name] = "Alex"
it[cityId] = null
}
Users.insert {
it[id] = "smth"
it[name] = "Something"
it[cityId] = null
}
Users.update({Users.id eq "alex"}) {
it[name] = "Alexey"
}
Users.deleteWhere{Users.name like "%thing"}
println("All cities:")
for (city in Cities.selectAll()) {
println("${city[Cities.id]}: ${city[Cities.name]}")
}
println("Manual join:")
(Users innerJoin Cities).slice(Users.name, Cities.name).
select {(Users.id.eq("andrey") or Users.name.eq("Sergey")) and
Users.id.eq("sergey") and Users.cityId.eq(Cities.id)}.forEach {
println("${it[Users.name]} lives in ${it[Cities.name]}")
}
println("Join with foreign key:")
(Users innerJoin Cities).slice(Users.name, Users.cityId, Cities.name).
select {Cities.name.eq("St. Petersburg") or Users.cityId.isNull()}.forEach {
if (it[Users.cityId] != null) {
println("${it[Users.name]} lives in ${it[Cities.name]}")
}
else {
println("${it[Users.name]} lives nowhere")
}
}
println("Functions and group by:")
((Cities innerJoin Users).slice(Cities.name, Users.id.count()).selectAll().groupBy(Cities.name)).forEach {
val cityName = it[Cities.name]
val userCount = it[Users.id.count()]
if (userCount > 0) {
println("$userCount user(s) live(s) in $cityName")
} else {
println("Nobody lives in $cityName")
}
}
drop (Users, Cities)
}
}
} |
package ratelimiter
import (
"testing"
"time"
"github.com/axiaoxin-com/goutils"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
)
func TestGinMemRatelimiter(t *testing.T) {
gin.SetMode(gin.ReleaseMode)
r := gin.New()
r.Use(GinMemRatelimiter(GinRatelimiterConfig{
TokenBucketConfig: func(c *gin.Context) (time.Duration, int) {
return 1 * time.Second, 1
},
}))
r.GET("/", func(c *gin.Context) {
c.JSON(200, "hi")
})
time.Sleep(1 * time.Second)
recorder, err := goutils.RequestHTTPHandler(r, "GET", "/", nil, nil)
assert.Nil(t, err)
assert.Equal(t, recorder.Code, 200)
recorder, err = goutils.RequestHTTPHandler(r, "GET", "/", nil, nil)
assert.Nil(t, err)
assert.Equal(t, recorder.Code, 429)
time.Sleep(1 * time.Second)
recorder, err = goutils.RequestHTTPHandler(r, "GET", "/", nil, nil)
assert.Nil(t, err)
assert.Equal(t, recorder.Code, 200)
}
|
// TARGET_BACKEND: JVM_IR
// FILE: A.java
public class A {
@Override
public String toString() {
return "O";
}
}
// FILE: B.java
public class B {
@Override
public String toString() {
return "K";
}
}
// FILE: main.kt
fun test(x: Any): String {
return when (x) {
is A -> x.toString()
is B -> x.toString()
else -> "fail"
}
}
fun box(): String {
return test(A()) + test(B())
}
|
import { I18nResolver } from '../index';
export class QueryResolver implements I18nResolver {
constructor(private keys: string[]) {}
resolve(req: any) {
let lang: string;
for (const key of this.keys) {
if (req.query != undefined && req.query[key] !== undefined) {
lang = req.query[key];
break;
}
}
return lang;
}
}
|
using EdFi.SampleDataGenerator.Core.Config;
namespace EdFi.SampleDataGenerator.Core.Helpers
{
public static class SchoolProfileHelpers
{
public static string GetSchoolEntityId(this ISchoolProfile schoolProfile)
{
return $"SCOL_{schoolProfile.SchoolId}";
}
}
}
|
package control.free
import cats.free.{Free, Inject}
import cats.{Id, ~>}
import CounterA._
sealed trait CounterA[A]
object CounterA {
final case class Set(n: Int) extends CounterA[Unit]
final case class Add(n: Int) extends CounterA[Unit]
final case class Subtract(n: Int) extends CounterA[Unit]
final case object Get extends CounterA[Int]
final case object Reset extends CounterA[Unit]
}
class Counter[F[_]](implicit I: Inject[CounterA, F]) {
def set(n: Int): Free[F, Unit] = Free.inject[CounterA, F](Set(n))
def add(n: Int): Free[F, Unit] = Free.inject[CounterA, F](Add(n))
def subtract(n: Int): Free[F, Unit] = Free.inject[CounterA, F](Subtract(n))
def get: Free[F, Int] = Free.inject[CounterA, F](Get)
def reset: Free[F, Unit] = Free.inject[CounterA, F](Reset)
}
object Counter {
implicit def counter[F[_]](implicit I: Inject[CounterA, F]): Counter[F] = new Counter[F]
def interpreter: Interpreter = new Interpreter
class Interpreter extends (CounterA ~> Id) {
var curN: Int = _
def apply[A](fa: CounterA[A]): Id[A] = fa match {
case Set(n) =>
curN = n
()
case Add(n) =>
curN = curN + n
()
case Subtract(n) =>
curN = curN - n
()
case Get =>
curN
case Reset =>
curN = 0
()
}
}
object Interpreter {
def copy: Interpreter => Interpreter = i => {
val res = new Interpreter
res.curN = i.curN
res
}
}
}
|
#!/usr/bin/env bash
echo "build and uploadArchives..."
./gradlew uploadArchives -PRELEASE_REPOSITORY_URL=file:///debug/ -PSNAPSHOT_REPOSITORY_URL=file:///debug/
if [ $? -eq 0 ]
then
echo "deploy successful!"
exit 0
else
echo "deploy failed!"
exit 1
fi
|
export * from './PostHeader';
export * from './PostBody';
|
#!/bin/bash
set -eo pipefail
shopt -s nullglob
#
# populates an environment variable from a file useful with docker secrets
#
secretDebug()
{
if [ ! -z "$ENV_SECRETS_DEBUG" ]; then
echo -e "\033[1m$@\033[0m"
echo
fi
}
getSecrets () {
for env_var in $(printenv | cut -f1 -d"=" | grep _FILE)
do
name="$env_var"
eval value=\$$name
if [ -f "$value" ]; then
value=$(cat "${value}")
export "${name%_FILE}"="$value"
unset $name
secretDebug "Expanded Secret! ${name%_FILE}=$value"
else
secretDebug "Secret file does not exist! $value"
fi
done
}
ENV_SECRETS_DEBUG=""
getSecrets
echo "Executing Docker CMD"
exec "$@"
|
#pragma once
#include "core/utils/RingBuffer.h"
#include "core/midi/MidiMessage.h"
#include <array>
#include <algorithm>
#include <cstdint>
#include <cinttypes>
class RecordHistory {
public:
enum class Type : uint8_t {
NoteOn,
NoteOff,
};
struct Event {
uint32_t tick;
Type type;
int8_t note;
};
RecordHistory() {
clear();
}
void clear() {
_activeNote = -1;
_size = 0;
_write = 0;
}
size_t size() const { return _size; }
void write(uint32_t tick, Type type, int note) {
if (note < 0 || note > 127) {
return;
}
switch (type) {
case Type::NoteOn:
if (_activeNote >= 0 && _activeNote != note) {
write({ tick, Type::NoteOff, _activeNote });
}
_activeNote = note;
write({ tick, Type::NoteOn, int8_t(note) });
break;
case Type::NoteOff:
if (_activeNote == note) {
_activeNote = -1;
write({ tick, Type::NoteOff, int8_t(note) });
}
break;
}
}
void write(uint32_t tick, const MidiMessage &message) {
if (message.isNoteOn()) {
write(tick, Type::NoteOn, message.note());
} else if (message.isNoteOff()) {
write(tick, Type::NoteOff, message.note());
}
}
const Event &operator[](int index) const {
return _events[(_write + index - _size + _events.size()) % _events.size()];
}
bool isNoteActive() const {
return _activeNote >= 0;
}
int activeNote() const {
return _activeNote;
}
private:
void write(const Event &event) {
_events[_write] = event;
_write = (_write + 1) % _events.size();
_size = std::min(_events.size(), _size + 1);
}
int8_t _activeNote;
size_t _size;
size_t _write;
std::array<Event, 4> _events;
};
|
use std::collections::HashMap;
use ryson::{Json,Jerr};
#[test]
fn accepts_null(){
let text = String::from("null");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Null);
}
#[test]
fn accepts_true(){
let text = String::from("true");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Bool(true));
}
#[test]
fn accepts_false(){
let text = String::from("false");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Bool(false));
}
#[test]
fn throws_error_on_unknown_keyword(){
let text = String::from("True");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnexpectedChar(0));
}
#[test]
fn accepts_integers(){
let text = String::from("1024");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Number(String::from("1024")));
}
#[test]
fn error_on_non_digits_after_digits(){
let text = String::from("4534h");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedEnd(4));
}
#[test]
fn error_on_non_zero_starting_with_zero(){
let text = String::from("0916");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::InvalidToken(String::from("0916")));
}
#[test]
fn accepts_rationals(){
let text = String::from("16.824");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Number(String::from("16.824")));
}
#[test]
fn error_on_ending_dot(){
let text = String::from("1624.");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::InvalidToken(String::from("1624.")));
}
#[test]
fn error_on_beginning_dot(){
let text = String::from(".234567");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnexpectedChar(0));
}
#[test]
fn error_on_multiple_dots(){
let text = String::from("23.456.7");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedEnd(6));
}
#[test]
fn accepts_strings(){
let text = String::from("\"hello world\"");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::String(String::from("hello world")));
}
#[test]
fn unexpected_end_of_string(){
let text = String::from("\"hello world");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnexpectedEnd);
}
#[test]
fn error_on_text_after_ending_quote(){
let text = String::from("\"hello \nworld");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnexpectedEnd);
}
#[test]
fn escapes_back_slash_quote(){
let text = String::from("\"a quote is a \\\" sign\"");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::String(String::from("a quote is a \" sign")));
}
#[test]
fn escapes_double_back_slash(){
let text = String::from("\"a backslash is a \\\\ sign\"");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::String(String::from("a backslash is a \\ sign")));
}
#[test]
fn escapes_criagereturn_tab_newline_formfeed_backspace(){
let text = String::from("\"escaped:\\n\\thello\\b\\ftext file\\r\"");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::String(String::from("escaped:\n\thello\x08\x0Ctext file\r")));
}
#[test]
fn escapes_unicode(){
let text = String::from("\"this is theta : \\u03F4\"");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::String(String::from("this is theta : ϴ")));
}
#[test]
fn error_on_invalid_unicode(){
let text = String::from("\"this is invalid : \\u93G4\"");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::InvalidUnicodeSequence(String::from("93G4")));
}
#[test]
fn error_on_unknown_escape(){
let text = String::from("\"I don't know \\a\"");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnknownEscape('a'));
}
#[test]
fn single_element_array(){
let text = String::from("[false]");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Array(vec![Json::Bool(false)]));
}
#[test]
fn multi_element_array(){
let text = String::from(
"[true,1444,\"third element\"]"
);
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Array(vec![
Json::Bool(true),
Json::Number(String::from("1444")),
Json::String(String::from("third element"))
]));
}
#[test]
fn ignore_white_space_newline(){
let text = String::from(
"[true, 1444\n, \"third element\"\n\n ]"
);
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Array(vec![
Json::Bool(true),
Json::Number(String::from("1444")),
Json::String(String::from("third element"))
]));
}
#[test]
fn error_on_not_ended_array(){
let text = String::from(
"[true, 1444\n, \"third element\"\n\n "
);
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::UnexpectedEnd);
}
#[test]
fn error_on_multiple_commas(){
let text = String::from(
"[true, 1444\n, , \"third element\"\n\n "
);
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedValue(15));
}
#[test]
fn error_on_multiple_value(){
let text = String::from(
"[true, 1444\n \"third element\"\n\n "
);
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedCommaOrEnd(14));
}
#[test]
fn accept_nested_arrays(){
let text = String::from("[\n [false]\n]");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Array(vec![Json::Array(vec![Json::Bool(false)])]));
}
#[test]
fn accepts_empty_array(){
let text = String::from("[]");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Array(vec![]));
}
#[test]
fn accepts_single_field_objects(){
let text = String::from("{\"port\":8080}");
let json = Json::parse(&text).unwrap();
let mut map = HashMap::new();
map.insert(String::from("port"), Json::Number(String::from("8080")));
assert_eq!(json,Json::Object(map));
}
#[test]
fn error_on_missing_colon(){
let text = String::from("{\"port\",8080}");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedColon(7));
}
#[test]
fn error_on_invalid_property_identifier(){
let text = String::from("{3,8080}");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedProperty(1));
}
#[test]
fn error_on_missing_property(){
let text = String::from("{\"host\":}");
let jerr = Json::parse(&text).unwrap_err();
assert_eq!(jerr,Jerr::ExpectedValue(8));
}
#[test]
fn accepts_multi_field_objects(){
let text = String::from("{\"port\":80,\n\"host\":\"localhost\"}");
let json = Json::parse(&text).unwrap();
let mut map = HashMap::new();
map.insert(String::from("port"), Json::Number(String::from("80")));
map.insert(String::from("host"), Json::String(String::from("localhost")));
assert_eq!(json,Json::Object(map));
}
#[test]
fn accepts_object_array_property(){
let text = String::from("{\"port\":80,\n\"host\":[\"localhost\",true]}");
let json = Json::parse(&text).unwrap();
let mut map = HashMap::new();
let arr = vec![Json::String(String::from("localhost")),Json::Bool(true)];
map.insert(String::from("port"), Json::Number(String::from("80")));
map.insert(String::from("host"), Json::Array(arr));
assert_eq!(json,Json::Object(map));
}
#[test]
fn accepts_nested_objects(){
let text = String::from("{\"port\":80,\n\"host\":{\"localhost\":true}}");
let json = Json::parse(&text).unwrap();
let mut map = HashMap::new();
let mut inner_map = HashMap::new();
inner_map.insert(String::from("localhost"), Json::Bool(true));
map.insert(String::from("port"), Json::Number(String::from("80")));
map.insert(String::from("host"), Json::Object(inner_map));
assert_eq!(json,Json::Object(map));
}
#[test]
fn accepts_array_with_object_element(){
let text = String::from("[{\"version\":\"1.10.3\"}]");
let json = Json::parse(&text).unwrap();
let mut inner_map = HashMap::new();
inner_map.insert(String::from("version"), Json::String(String::from("1.10.3")));
let arr = Json::Array(vec![Json::Object(inner_map)]);
assert_eq!(json,arr);
}
#[test]
fn accepts_empty_object(){
let text = String::from("{}");
let json = Json::parse(&text).unwrap();
assert_eq!(json,Json::Object(HashMap::new()));
}
#[test]
fn to_string_null(){
let json = Json::Null;
let text = json.to_string();
assert_eq!(text,String::from("null"));
}
#[test]
fn to_string_boolean(){
let json = Json::Bool(false);
let text = json.to_string();
assert_eq!(text,String::from("false"));
}
#[test]
fn to_string_number(){
let num = String::from("2535.99");
let json = Json::Number(num.clone());
let text = json.to_string();
assert_eq!(text,num);
}
#[test]
fn to_string_string(){
let str = String::from("name:foo\nlname:bar");
let json = Json::String(str.clone());
let text = json.to_string();
assert_eq!(text,format!("\"{}\"",str));
}
#[test]
fn to_string_empty_array(){
let json = Json::Array(vec![]);
let text = json.to_string();
assert_eq!(text,"[]");
}
#[test]
fn to_string_non_empty_array(){
let arr = "[2343,true,\"foo\"]";
let json = Json::Array(vec![
Json::Number(String::from("2343")),
Json::Bool(true),
Json::String(String::from("foo")),
]);
let text = json.to_string();
assert_eq!(text,arr);
}
#[test]
fn to_string_empty_object(){
let json = Json::Object(HashMap::new());
let text = json.to_string();
assert_eq!(text,"{}");
}
#[test]
fn to_string_none_empty_object(){
let str1 = "{host:\"http://localhost\",port:80}";
let str2 = "{port:80,host:\"http://localhost\"}";
let mut map : HashMap<String,Json> = HashMap::new();
map.insert(String::from("host"), Json::String(String::from("http://localhost")));
map.insert(String::from("port"), Json::Number(String::from("80")));
let json = Json::Object(map);
let text = json.to_string();
assert!(text == str1 || text == str2);
} |
# 基于Facenet和SVM的实时人脸识别
详细说明参考文章[SVM、Pickle vs HDF5、性能和日志](https://www.imooc.com/article/286128)或[项目Wiki](https://github.com/seed-fe/face_recognition_using_opencv_keras_scikit-learn/wiki)。
另有Facenet+KNN的方案参考[master分支](https://github.com/seed-fe/face_recognition_using_opencv_keras_scikit-learn/tree/master),简单CNN的方案参考[using-simple-convnet分支](https://github.com/seed-fe/face_recognition_using_opencv_keras_scikit-learn/tree/using-simple-convnet)
|
Requirements:
-------------
* Python >= 2.7.3 (NOT Python 3)
* virtualenv
* pip
Steps:
------
* Setup the virtual environment.
```virtualenv chowk_env```
* Activate it
```./chowk_env/bin/activate```
* Install all required libraries inside it
```pip install -r requirements.txt```
* Put your Kannel & RapidPro server details inside settings.py.example
and rename it
```
vim settings.py.example
mv setings.py.example settings.py
```
* Start celery (Preferably in a screen or a seperate terminal so that this process doesn't die when you logout)
```celery worker -A tasks.celery --loglevel=DEBUG```
* Run the main app! (Preferably in a screen or a seperate terminal so that this process doesn't die when you logout)
```python chowk.py```
Troubleshooting:
----------------
chowk produces 2 log files.
error.log -- for messages of ERROR and CRITICAL levels
debug.log -- for messages of DEBUG, INFO and WARNING levels
You can change this from inside chowk.py
Still facing issues ?
---------------------
File an issue here and I will try my best to help :)
|
import React, { useState, useEffect } from 'react'
import { useMutation } from '@apollo/react-hooks'
import { makeStyles } from '@material-ui/styles'
import PropTypes from 'prop-types'
import Box from '@material-ui/core/Box'
import Paper from '@material-ui/core/Paper'
import clsx from 'clsx'
import Typography from '@material-ui/core/Typography'
import TextField from '@material-ui/core/TextField'
import CircularProgress from '@material-ui/core/CircularProgress'
import Button from '@material-ui/core/Button'
import Alert from '@material-ui/lab/Alert'
import IconButton from '@material-ui/core/IconButton'
import CloseIcon from '@material-ui/icons/Close'
import Modal from '@material-ui/core/Modal'
import Backdrop from '@material-ui/core/Backdrop'
import Fade from '@material-ui/core/Fade'
import LockIcon from '@material-ui/icons/Lock'
import { useTranslation } from 'react-i18next'
import { CREDENTIALS_RECOVERY } from '../../gql'
const useStyles = makeStyles((theme) => ({
modal: {
display: 'flex',
alignItems: 'center',
justifyContent: 'center'
},
paper: {
backgroundColor: theme.palette.background.paper,
boxShadow: theme.shadows[5],
height: '80%',
width: 350,
outlineWidth: 0
},
alert: {
marginTop: theme.spacing(2),
marginBottom: theme.spacing(2)
},
textFieldWrapper: {
padding: theme.spacing(2, 0),
display: 'flex',
flexDirection: 'column',
height: 200,
justifyContent: 'space-evenly'
},
closeIcon: {
display: 'flex',
justifyContent: 'flex-end',
'& svg': {
fontSize: 25,
color: theme.palette.secondary.main
}
},
btnWrapper: {
display: 'flex'
},
loginBtn: {
display: 'flex',
alignItems: 'center',
marginTop: "16px"
},
labelOption: {
color: theme.palette.primary.main,
marginLeft: theme.spacing(3),
fontSize: 14,
cursor: "pointer"
},
bodyWrapper: {
height: '90%',
padding: theme.spacing(0, 2)
},
iconOption: {
color: 'rgba(0, 0, 0, 0.54)',
fontSize: 20
}
}))
const CredentialsRecovery = ({ overrideBoxClass, overrideLabelClass }) => {
const { t } = useTranslation('translations')
const [user, setUser] = useState({})
const [errorMessage, setErrorMessage] = useState(null)
const [success, setSuccess] = useState(false)
const classes = useStyles()
const [
credentialsRecovery,
{ loading, error, data: { credentials_recovery: response } = {} }
] = useMutation(CREDENTIALS_RECOVERY)
const [open, setOpen] = useState(false)
const handleOpen = () => {
setOpen(!open)
}
const handleSetField = (field, value) => {
setUser({ ...user, [field]: value })
}
const handleSubmit = () => {
setErrorMessage(null)
credentialsRecovery({
variables: {
...user
}
})
}
useEffect(() => {
if (error) {
setErrorMessage(error.message.replace('GraphQL error: ', ''))
}
}, [error])
useEffect(() => {
if (response) {
setUser({})
setSuccess(response.success)
}
}, [response])
return (
<>
<Box
className={clsx(classes.loginBtn, overrideBoxClass)}
onClick={handleOpen}
>
<LockIcon className={classes.iconOption} />
<Typography
variant="body1"
className={clsx(classes.labelOption, overrideLabelClass)}
>
{t('credentialsRecovery.credentialsRecovery')}
</Typography>
</Box>
<Modal
aria-labelledby="transition-modal-title"
aria-describedby="transition-modal-description"
className={classes.modal}
open={open}
onClose={handleOpen}
closeAfterTransition
BackdropComponent={Backdrop}
BackdropProps={{
timeout: 500
}}
>
<Fade in={open}>
<Paper className={classes.paper}>
<Box className={classes.closeIcon}>
<IconButton
aria-label="close"
color="inherit"
size="small"
onClick={handleOpen}
>
<CloseIcon fontSize="inherit" />
</IconButton>
</Box>
<Box className={classes.bodyWrapper}>
<Typography variant="h3">
{t('credentialsRecovery.credentialsRecovery')}
</Typography>
{errorMessage && (
<Alert
className={classes.alert}
severity="error"
action={
<IconButton
aria-label="close"
color="inherit"
size="small"
onClick={() => setErrorMessage(null)}
>
<CloseIcon fontSize="inherit" />
</IconButton>
}
>
{errorMessage}
</Alert>
)}
{success && (
<Alert
className={classes.alert}
severity="success"
action={
<IconButton
aria-label="close"
color="inherit"
size="small"
onClick={() => setSuccess(false)}
>
<CloseIcon fontSize="inherit" />
</IconButton>
}
>
{t('credentialsRecovery.checkYourEmail')}
</Alert>
)}
<form autoComplete="off">
<Box className={classes.textFieldWrapper}>
<TextField
id="account"
label={t('common.email')}
variant="outlined"
InputLabelProps={{
shrink: true
}}
value={user.email || ''}
onChange={(event) =>
handleSetField('email', event.target.value)
}
/>
</Box>
<Box className={classes.btnWrapper}>
<Button
disabled={!user.email || loading}
variant="contained"
color="primary"
onClick={handleSubmit}
>
{t('credentialsRecovery.recovery')}
</Button>
{loading && <CircularProgress />}
</Box>
</form>
</Box>
</Paper>
</Fade>
</Modal>
</>
)
}
CredentialsRecovery.propTypes = {
overrideBoxClass: PropTypes.any,
overrideLabelClass: PropTypes.any
}
export default CredentialsRecovery
|
!SLIDE subsection
# Docker
!SLIDE center

!SLIDE
# in 5 minutes
!SLIDE small
# containers for your programs
!SLIDE small
# to isolate from other programs
!SLIDE small
# to escape "dependency hell"
!SLIDE small
# *one* container for *one* program
!SLIDE
# **for one Unix process**
!SLIDE
# How it works
!SLIDE
# Paravirtualisation
## Share the kernel
!SLIDE
# Isolate everything
## Filesystem, users, procs, etc.
!SLIDE
# chroot on steroids
!SLIDE
# Not a VM
!SLIDE
# Ligther
!SLIDE
# Small images
!SLIDE
# Start in 2 sec
!SLIDE
# But *ligther*
!SLIDE
# Warning!
!SLIDE
# Ubuntu only
!SLIDE small
# So you may need...
!SLIDE
# **VM**
!SLIDE
# Vagrant + VirtualBox
## See docker install guide
!SLIDE
# Concepts
!SLIDE bullets incremental
# docker container
* running or stopped
* based on an image
* has an entry point: command to start
* exposes TCP ports
!SLIDE bullets incremental
# docker image
* like a filesystem
* imutable data
* inspired by git
!SLIDE bullets incremental
# docker repo
* to store images
* inspired by GitHub
!SLIDE center

!SLIDE center

!SLIDE
# Don't worry!
!SLIDE center

!SLIDE
# EOF
|
# Services
Use this folder to store your services, such as HTTP, GraphQL and Websocket.
|
package walkmc.graphical.engines
import org.bukkit.inventory.*
import walkmc.*
import walkmc.graphical.*
/**
* A toggle engine that's will toggle the filter state of a filter graphical.
*
* If the graphical owner of this engine is not a [FilterGraphical], nothing will be done.
*/
open class ToggleFilterEngine : ToggleEngine {
constructor(type: Materials, amount: Int = 1) : super(type, amount)
constructor(stack: ItemStack) : super(stack)
val graph get() = graphical as FilterGraphical<*>
override fun cycle(forward: Boolean) {
super.cycle(forward)
graph.filterEngine.notifyChange()
}
override fun handleToggled() {
graph.disableFilter()
}
override fun handleUntoggled() {
graph.enableFilter()
}
}
|
<?php
namespace Spydemon\CatalogProductImportCategoryByID\Exception;
use Exception;
use Magento\Framework\Exception\LocalizedException;
/**
* Class CategoryNotFoundException
*/
class CategoryNotFoundException extends LocalizedException
{
/**
* CategoryNotFoundException constructor.
*
* @param Exception|null $cause
* @param int $code
*/
public function __construct(Exception $cause = null, $code = 0)
{
parent::__construct(__('Category not found.'), $cause, $code);
}
}
|
#!/bin/sh
#
# CIP Core tiny profile
# A helper script to easily run images on QEMU targets
#
# Copyright (c) 2019 TOSHIBA Corp.
#
# SPDX-License-Identifier: MIT
#
usage() {
echo "Usage: ${0} <MACHINE> [QEMU_OPTS]"
exit 1
}
MACHINE="${1}"
if [ -z "${MACHINE}" ]; then
usage
fi
DEPLOY_DIR=build/tmp/deploy/images/${MACHINE}
case "${MACHINE}" in
"qemux86-64")
QEMU="qemu-system-x86_64"
KERNEL=${DEPLOY_DIR}/bzImage
INITRD=${DEPLOY_DIR}/core-image-minimal-${MACHINE}.cpio.gz
APPEND="console=ttyS0"
QEMU_OPTS="-M q35"
;;
*)
echo "Invalid MACHINE"
exit 1
;;
esac
if [ -z "${DISPLAY}" ]; then
QEMU_OPTS="${QEMU_OPTS} -nographic"
fi
shift
${QEMU} \
-m 1G \
-kernel ${KERNEL} \
-initrd ${INITRD} \
-append "${APPEND}" \
${QEMU_OPTS} ${@}
|
class Task {
Task(String content) {
this.content = content;
isDone = false;
}
bool isDone;
String content;
}
|
#!/bin/bash
set -e
BASEDIR=$PWD
REBUILD=false
UPLOAD=false
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-r|--rebuild)
REBUILD=true
;;
-u|--upload)
UPLOAD=true
;;
*)
echo "Unknown command line argument $1" # unknown option
exit 1
;;
esac
shift # past argument or value
done
if $REBUILD; then
cd papers/ITP2019
./make.sh
cd talk
make
cd $BASEDIR
cd papers/IJCAR2020
make
cd $BASEDIR
cd papers/2019_Rennes_Talk
make
cd $BASEDIR
cd papers/2020_Enschede_Talk
make
cd $BASEDIR
cd papers/2021_Enschede_Talk
make
cd $BASEDIR
cd thys
isabelle build -v -D .
# isabelle build -v -d '$AFP' -D .
cd $BASEDIR
./mkdist.sh
fi
rm -rf html
mkdir -p html
ISABELLE_BROWSER_INFO=$(isabelle getenv ISABELLE_BROWSER_INFO | sed -re 's/.*=//')
cp -a $ISABELLE_BROWSER_INFO/Unsorted/Isabelle_LLVM html/
cp index.md html/
cp dist.tgz html/
cp dist-2020.tgz html/
cp dist-v1.1.tgz html/
cp LICENSE html/
cp etc/logo/logo_200.png html/
cp papers/IJCAR2020/main.pdf html/paper_IJCAR2020.pdf
cp papers/IJCAR2020/talk/pres.pdf html/slides_IJCAR2020.pdf
cp papers/ITP2019/main.pdf html/paper_ITP2019.pdf
cp papers/ITP2019/talk/pres.pdf html/slides_ITP2019.pdf
cp papers/2019_Rennes_Talk/pres.pdf html/rennes2019.pdf
cp papers/2020_Enschede_Talk/pres.pdf html/enschede2020.pdf
cp papers/2021_Enschede_Talk/pres.pdf html/enschede2021.pdf
cp papers/2021_RF_Pres/pres.pdf html/RF_pres.pdf
pandoc -V pagetitle="Isabelle LLVM" -s index.md > html/index.html
if $UPLOAD; then
LOCAL_DEST=~/devel/www21-lammich/isabelle_llvm
rm -rf $LOCAL_DEST
cp -a html $LOCAL_DEST
cd $LOCAL_DEST
echo ADD
hg add .
echo COMMIT
hg commit -m "Automatic update of Isabelle-LLVM" .
echo PUSH
hg push
echo DONE
cd $BASEDIR
fi
|
const baseURL = process.env.REACT_APP_API_URL;
export const urlChallenges = `${baseURL}/challenges`; |
using System.Collections.Generic;
using System.Linq.Expressions;
using PrtgAPI.Parameters;
namespace PrtgAPI.Linq.Expressions
{
abstract class QueryHelper<TObject>
{
public abstract Expression FlagKeep(Expression expr);
public abstract List<List<SearchFilter>> AdjustFilters(List<SearchFilter> filters);
public abstract void FixupParameters(IParameters parameters);
public abstract IEqualityComparer<TObject> GetComparer();
public abstract bool CanLimitProperties(List<List<SearchFilter>> filterSets);
}
}
|
import { STORAGE_PREFIX } from "@/constants";
import { DataState } from "@/types/enums";
import {
loadFromStorage,
removeFromStorage,
saveToStorage,
} from "@/utils/storage";
import Subscribable from "./Subscribable";
const DEFAULT_VERSION = 1;
interface IStoreEntry<T> {
state: DataState;
version?: number;
fetchedAt?: Date;
data?: T;
}
class DataStore<T> extends Subscribable<IStoreEntry<T>> {
private _storageKey: string;
private _maxStorageAge: number;
private _version: number;
private _entries: Map<string, IStoreEntry<T>> = new Map();
private _localStorageLoaded = false;
constructor(
storageKey: string,
maxCacheAge: number,
version = DEFAULT_VERSION
) {
super();
this._storageKey = storageKey;
this._maxStorageAge = maxCacheAge;
this._version = version;
}
private loadLocalStorage() {
this._localStorageLoaded = true;
const existingData = loadFromStorage<Map<string, IStoreEntry<T>>>(
STORAGE_PREFIX + this._storageKey,
Map
);
if (existingData === null) return;
const now = new Date();
existingData.forEach((entry: IStoreEntry<T>, key: string) => {
if (
!entry.fetchedAt ||
this._version > (entry.version ?? DEFAULT_VERSION)
)
return;
const age = (+now - +new Date(entry.fetchedAt)) / 1000 / 60;
entry.state =
age > this._maxStorageAge ? DataState.Waiting : DataState.Fetched;
this._entries.set(key, entry);
});
}
private saveLocalStorage() {
const storage = {};
this._entries.forEach((value: IStoreEntry<T>, key: string) => {
if (value.data && Object.keys(value.data).length !== 0) {
storage[key] = { ...value, version: this._version };
}
});
if (Object.keys(storage).length !== 0) {
saveToStorage(STORAGE_PREFIX + this._storageKey, storage);
} else {
removeFromStorage(STORAGE_PREFIX + this._storageKey);
}
}
public setState(key: string, state: DataState): IStoreEntry<T> {
const entry = this.get(key) || {};
entry.state = state;
return this.set(key, entry);
}
public setData(key: string, data: T): IStoreEntry<T> {
const entry = this.get(key) || {};
entry.state = DataState.Fetched;
entry.fetchedAt = new Date();
entry.data = data;
return this.set(key, entry);
}
private set(key: string, entry: IStoreEntry<T>): IStoreEntry<T> {
this._entries.set(key, entry);
this.invoke(key, entry);
this.saveLocalStorage();
return entry;
}
public get(key: string): IStoreEntry<T> {
if (!this._localStorageLoaded) {
this.loadLocalStorage();
}
if (this._entries.has(key)) {
return this._entries.get(key) as IStoreEntry<T>;
}
const entry = {
state: DataState.Waiting,
};
this._entries.set(key, entry);
return entry;
}
public isWaiting(key: string): boolean {
const entry = this.get(key);
if (entry.state === DataState.Waiting) {
entry.state = DataState.Fetching;
return true;
}
return false;
}
}
export default DataStore;
|
import { CollectionViewer, DataSource } from "@angular/cdk/collections";
import { Team } from "../models/team.model";
import { catchError, finalize } from "rxjs/operators";
import { of, Observable, BehaviorSubject } from "rxjs";
import { TeamService } from "./team.service";
import OrderByDirection = firebase.firestore.OrderByDirection;
export class TeamsDataSource implements DataSource<Team> {
private teamsSubject = new BehaviorSubject<Team[]>([]);
private loadingSubject = new BehaviorSubject<boolean>(false);
public loading$ = this.loadingSubject.asObservable();
constructor(private teamService: TeamService) {}
loadTeams(
filter: string,
sortField: string,
sortOrder: OrderByDirection,
pageSize: number
) {
this.loadingSubject.next(true);
this.teamService
.findTeams(filter, sortField, sortOrder, pageSize)
.pipe(
catchError(() => of([])),
finalize(() => this.loadingSubject.next(false))
)
.subscribe(teams => {
this.teamsSubject.next(teams);
this.loadingSubject.next(false);
});
}
connect(collectionViewer: CollectionViewer): Observable<Team[]> {
// console.log("Connecting team data source");
return this.teamsSubject.asObservable();
}
disconnect(collectionViewer: CollectionViewer): void {
this.teamsSubject.complete();
this.loadingSubject.complete();
}
}
|
package com.z80h3x.kezd_kov.ui.add_char
import com.z80h3x.kezd_kov.data.generic.BaseCharacter
sealed class AddCharViewState
object Form : AddCharViewState()
object Loading : AddCharViewState()
object CharacterFailed : AddCharViewState()
data class AddCharReady(val characterId: Long) : AddCharViewState()
data class MonsterNamesReady(val monsterNames: MutableList<String>) : AddCharViewState()
data class MonsterReady(val character: BaseCharacter) : AddCharViewState()
data class CloudNamesReady(val cloudNames: MutableList<String>) : AddCharViewState()
data class CloudCharReady(val character: BaseCharacter) : AddCharViewState()
|
require 'logger'
module Advansible
# class Logger
# def initialize(logfile = $stdout)
# @instance = ::Logger.new(logfile)
# @instance.progname = 'advansible'
# # :nocov:
# @instance.formatter = proc do |severity, datetime, progname, msg|
# "#{severity} [#{datetime}]: #{msg}\n"
# end
# # :nocov:
# @instance
# end
# end
# def self.logger
# @logger ||= Logger.new($stdout)
# end
class << self
def logger
return @logger if @logger
@logger = Logger.new $stdout
@logger.level = Logger::WARN
@logger.progname = 'advansible'
# :nocov:
@logger.formatter = proc do |severity, datetime, progname, msg|
"#{severity} [#{datetime}]: #{msg}\n"
end
# :nocov:
@logger
end
end
end |
A Simple Solution is to keep an array of size k. The idea is to keep the array sorted so that the k’th largest element can be found in O(1) time (we just need to return first element of array if array is sorted in increasing order)
How to process a new element of stream?
For every new element in stream, check if the new element is smaller than current k’th largest element. If yes, then ignore it. If no, then remove the smallest element from array and insert new element in sorted order. Time complexity of processing a new element is O(k).
A Better Solution is to use a Self Balancing Binary Search Tree of size k. The k’th largest element can be found in O(Logk) time.
How to process a new element of stream?
For every new element in stream, check if the new element is smaller than current k’th largest element. If yes, then ignore it. If no, then remove the smallest element from the tree and insert new element. Time complexity of processing a new element is O(Logk).
An Efficient Solution is to use Min Heap of size k to store k largest elements of stream. The k’th largest element is always at root and can be found in O(1) time.
How to process a new element of stream?
Compare the new element with root of heap. If new element is smaller, then ignore it. Otherwise replace root with new element and call heapify for the root of modified heap. Time complexity of finding the k’th largest element is O(Logk).
|
package prometheus
type kialiMetric struct {
name string
istioName string
isHisto bool
useErrorLabels bool
}
var (
kialiMetrics = []kialiMetric{
kialiMetric{
name: "request_count",
istioName: "istio_request_count",
isHisto: false},
kialiMetric{
name: "request_error_count",
istioName: "istio_request_count",
isHisto: false,
useErrorLabels: true},
kialiMetric{
name: "request_size",
istioName: "istio_request_size",
isHisto: true},
kialiMetric{
name: "request_duration",
istioName: "istio_request_duration",
isHisto: true},
kialiMetric{
name: "response_size",
istioName: "istio_response_size",
isHisto: true}}
)
func (in *kialiMetric) labelsToUse(labelsIn, labelsOut, labelsErrorIn, labelsErrorOut string) (string, string) {
if in.useErrorLabels {
return labelsErrorIn, labelsErrorOut
}
return labelsIn, labelsOut
}
|
package botkop.numsca
import botkop.{numsca => ns}
import org.scalatest.{FlatSpec, Matchers}
class NumscaSpec extends FlatSpec with Matchers {
val ta: Tensor = ns.arange(10)
val tb: Tensor = ns.reshape(ns.arange(9), 3, 3)
val tc: Tensor = ns.reshape(ns.arange(2 * 3 * 4), 2, 3, 4)
"A Tensor" should "transpose over multiple dimensions" in {
val x = ns.arange(6).reshape(1, 2, 3)
val y = ns.transpose(x, 1, 0, 2)
val z = ns.reshape(x, 2, 1, 3)
assert(ns.arrayEqual(y, z))
}
it should "retrieve the correct elements" in {
// todo: implicitly convert tensor to double when only 1 element?
assert(ta(1).squeeze() == 1)
assert(tb(1, 0).squeeze() == 3)
assert(tc(1, 0, 2).squeeze() == 14)
val i = List(1, 0, 1)
assert(tc(i: _*).squeeze() == 13)
}
it should "change array values in place" in {
val t = ta.copy()
t(3) := -5
assert(t.data sameElements Array(0, 1, 2, -5, 4, 5, 6, 7, 8, 9))
t(0) += 7
assert(t.data sameElements Array(7, 1, 2, -5, 4, 5, 6, 7, 8, 9))
val t2 = tb.copy()
t2(2, 1) := -7
t2(1, 2) := -3
assert(
arrayEqual(t2,
Tensor(0.00f, 1.00f, 2.00f, 3.00f, 4.00f, -3.00f, 6.00f, -7.00f,
8.00f).reshape(3, 3)))
}
}
|
package com.developi.wink.template.api;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.ibm.commons.util.io.json.JsonJavaObject;
import com.ibm.domino.osgi.core.context.ContextInfo;
import lotus.domino.NotesException;
import lotus.domino.Session;
@Path("/test")
public class TestResource {
@GET
public Response test(){
JsonJavaObject resp = new JsonJavaObject();
try {
resp.put("message", "Hello " + getUserSession().getEffectiveUserName());
return Response.ok().type(MediaType.APPLICATION_JSON).entity(resp.toString()).build();
} catch (NotesException e) {
e.printStackTrace();
return Response.serverError().build();
}
}
private Session getUserSession() {
return ContextInfo.getUserSession();
}
}
|
alias antlr4='java -Xmx500M -cp "/usr/local/lib/antlr-4.7.1-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
antlr4 -Dlanguage=JavaScript src/Hplsql.g4 -visitor
|
#!/usr/bin/env ruby
require_relative "../challenge_utils"
include ChallengeUtils
## SOLUTION BEGINS
def solve(input)
duplicate_counts = Hash.new(0)
input.map do |id|
id.chars.sort_by(&:ord).group_by {|c| c}.values.map(&:size).uniq.select {|l| l.between?(2,3)}.each do |l|
duplicate_counts[l] += 1
end
end
duplicate_counts.values.reduce(&:*)
end
## SOLUTION ENDS
# test scenarios
test(["abcdef", "bababc", "abbcde", "abcccd", "aabcdd", "abcdee", "ababab"], 12)
puts "-"*50, ""
# solve for reals
puts "Solution:", solve(read_input("input"))
|
require 'spec_helper'
describe ScopedSerializer::Scope do
let(:scope) { ScopedSerializer::Scope.new(:default) }
describe '.from_hash' do
it 'should initialize a scope from hash' do
scope = ScopedSerializer::Scope.from_hash({
:attributes => [:title, :created_at],
:associations => [:user, :account]
})
scope.attributes.should == [:title, :created_at]
scope.associations.should == { :user => {}, :account => {}}
end
end
describe '#initialize' do
it 'should initialize with defaults' do
scope.root :test
scope.attributes :id, :name
scope.association :blog_posts
new_scope = ScopedSerializer::Scope.new(:name, scope)
new_scope.options.should == { :root => :test }
new_scope.attributes.should == [:id, :name]
new_scope.associations.should == { :blog_posts => {} }
end
it 'should initialize with block' do
scope = ScopedSerializer::Scope.new(:default) do
attributes :id, :name
end
scope.attributes.should == [:id, :name]
end
end
describe 'options' do
it 'should set root' do
scope.root :test
scope.options[:root].should == :test
end
end
describe '#attributes' do
it 'should store attributes' do
scope.attributes :id, :name
scope.attributes.should == [:id, :name]
end
it 'should keep attributes uniq' do
scope.attributes :id, :name, :name
scope.attributes :name
scope.attributes.should == [:id, :name]
end
end
describe '#association' do
it 'should store association by hash' do
scope.association :blog_posts => :user, :serializer => 'test'
scope.associations.should == {
:blog_posts => {
:include => :user,
:serializer => 'test',
:preload => true,
}
}
end
it 'should store association by single argument' do
scope.association :blog_posts
scope.associations.should == {
:blog_posts => {}
}
end
it 'should store association by single argument and separate options' do
scope.association :blog_posts, :serializer => 'test'
scope.associations.should == {
:blog_posts => {
:serializer => 'test'
}
}
end
it 'should support association types as methods' do
scope.should respond_to :belongs_to
scope.should respond_to :has_one
scope.should respond_to :has_many
end
end
end
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using TanMiniToolSet.Common;
///<summary>
///项目名称:Diana轻量级开发框架
///版本:0.0.1版
///开发团队成员:胡凯雨,张梦丽,艾美珍,易传佳,陈祚松,康文洋,张婷婷,王露,周庆,夏萍萍,陈兰兰
///模块和代码页功能描述:第三方登录工厂类
///最后修改时间:2018/1/22
/// </summary>
namespace common.ThreeLogin
{
public class ThreeLoginFactory
{
private string appKey { get; set; }
private string appSecret { get; set; }
private BlakTypeEnum Type { get; set; }
public ThreeLoginFactory(BlakTypeEnum Type)
{
this.Type = Type;
this.appKey = ConfigHelper.GetConfigString(Type + "_appkey"); ;
this.appSecret = ConfigHelper.GetConfigString(Type + "_appSecret"); ;
}
public IThreeLoginHelp GetLoginHelp()
{
switch (Type)
{
case BlakTypeEnum.BAIDU: return new HelpBaidu(appKey, appSecret);
case BlakTypeEnum.WEIBO: return new HelpWeibo(appKey, appSecret);
case BlakTypeEnum.QQ: return new HelpBaidu(appKey, appSecret); //待完善
case BlakTypeEnum.WEIXIN: return new HelpBaidu(appKey, appSecret); //待完善
default: return new HelpBaidu(appKey, appSecret);
}
}
}
} |
package com.genymobile.scrcpy;
public final class DisplayInfo {
private final Size size;
private final int rotation;
private final int type;
private String name;
private final int ownerUid;
private String ownerPackageName;
public DisplayInfo(Size size, int rotation, int type, String name, int ownerUid, String ownerPackageName) {
this.size = size;
this.rotation = rotation;
this.type = type;
this.name = name;
this.ownerUid = ownerUid;
this.ownerPackageName = ownerPackageName;
}
public Size getSize() {
return size;
}
public int getRotation() {
return rotation;
}
public int getType() {
return type;
}
public String getName() {
return name;
}
public String getOwnerPackageName() {
return ownerPackageName;
}
public int getOwnerUid() {
return ownerUid;
}
}
|
from alento_bot.storage_module.formats.save_format import SaveLoadConfig
from alento_bot.storage_module.formats.config_format import ConfigData
import logging
logger = logging.getLogger("main_bot")
class BaseCache(SaveLoadConfig, path="you_shouldnt_see_this_cache.yaml"):
def __init__(self, config: ConfigData):
super().__init__()
self._data_path = f"{config.data_folder_path}/cache/{self._name}.yaml"
self._from_disk: bool = False
@classmethod
def __init_subclass__(cls, name: str = "default_cache_name", save_on_exit: bool = True, **kwargs):
super().__init_subclass__(path=name)
cls._name = name
cls._save_on_exit: bool = save_on_exit
def save(self, exiting: bool = False):
if not exiting or (exiting and self._save_on_exit):
logger.debug(f"Saving cache data for {self._data_path}...")
super().save()
else:
logger.debug(f"Cache {self._name} disabled saving on exit, ignoring.")
def cache_transformer(name: str = "default_cache_name", save_on_exit: bool = True):
def decorator(cls):
class CacheWrapperClass(cls, BaseCache, name=name, save_on_exit=save_on_exit):
def __init__(self, config: ConfigData, **kwargs):
BaseCache.__init__(self, config)
cls.__init__(self, **kwargs)
return CacheWrapperClass
return decorator
|
/*
Navicat MySQL Data Transfer
Source Server : mysql
Source Server Version : 50720
Source Host : localhost:3306
Source Database : school
Target Server Type : MYSQL
Target Server Version : 50720
File Encoding : 65001
Date: 2019-06-13 18:45:13
*/
-- 创建数据库并使用
CREATE DATABASE `school` CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci;
USE school; -- 使用数据库
SET FOREIGN_KEY_CHECKS=0;
-- 创建表并插入数据
-- ----------------------------
-- Table structure for `class`
-- ----------------------------
DROP TABLE IF EXISTS `class`;
CREATE TABLE `class` (
`cid` int(11) NOT NULL AUTO_INCREMENT,
`caption` varchar(32) NOT NULL,
PRIMARY KEY (`cid`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of class
-- ----------------------------
INSERT INTO `class` VALUES ('1', '三年二班');
INSERT INTO `class` VALUES ('2', '三年三班');
INSERT INTO `class` VALUES ('3', '一年二班');
INSERT INTO `class` VALUES ('4', '二年九班');
-- ----------------------------
-- Table structure for `course`
-- ----------------------------
DROP TABLE IF EXISTS `course`;
CREATE TABLE `course` (
`cid` int(11) NOT NULL AUTO_INCREMENT,
`cname` varchar(32) NOT NULL,
`teacher_id` int(11) NOT NULL,
PRIMARY KEY (`cid`),
KEY `fk_course_teacher` (`teacher_id`),
CONSTRAINT `fk_course_teacher` FOREIGN KEY (`teacher_id`) REFERENCES `teacher` (`tid`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of course
-- ----------------------------
INSERT INTO `course` VALUES ('1', '生物', '1');
INSERT INTO `course` VALUES ('2', '物理', '2');
INSERT INTO `course` VALUES ('3', '体育', '3');
INSERT INTO `course` VALUES ('4', '美术', '2');
-- ----------------------------
-- Table structure for `score`
-- ----------------------------
DROP TABLE IF EXISTS `score`;
CREATE TABLE `score` (
`sid` int(11) NOT NULL AUTO_INCREMENT,
`student_id` int(11) NOT NULL,
`course_id` int(11) NOT NULL,
`num` int(11) NOT NULL,
PRIMARY KEY (`sid`),
KEY `fk_score_student` (`student_id`),
KEY `fk_score_course` (`course_id`),
CONSTRAINT `fk_score_course` FOREIGN KEY (`course_id`) REFERENCES `course` (`cid`),
CONSTRAINT `fk_score_student` FOREIGN KEY (`student_id`) REFERENCES `student` (`sid`)
) ENGINE=InnoDB AUTO_INCREMENT=53 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of score
-- ----------------------------
INSERT INTO `score` VALUES ('1', '1', '1', '10');
INSERT INTO `score` VALUES ('2', '1', '2', '9');
INSERT INTO `score` VALUES ('5', '1', '4', '66');
INSERT INTO `score` VALUES ('6', '2', '1', '8');
INSERT INTO `score` VALUES ('8', '2', '3', '68');
INSERT INTO `score` VALUES ('9', '2', '4', '99');
INSERT INTO `score` VALUES ('10', '3', '1', '77');
INSERT INTO `score` VALUES ('11', '3', '2', '66');
INSERT INTO `score` VALUES ('12', '3', '3', '87');
INSERT INTO `score` VALUES ('13', '3', '4', '99');
INSERT INTO `score` VALUES ('14', '4', '1', '79');
INSERT INTO `score` VALUES ('15', '4', '2', '11');
INSERT INTO `score` VALUES ('16', '4', '3', '67');
INSERT INTO `score` VALUES ('17', '4', '4', '100');
INSERT INTO `score` VALUES ('18', '5', '1', '79');
INSERT INTO `score` VALUES ('19', '5', '2', '11');
INSERT INTO `score` VALUES ('20', '5', '3', '67');
INSERT INTO `score` VALUES ('21', '5', '4', '100');
INSERT INTO `score` VALUES ('22', '6', '1', '9');
INSERT INTO `score` VALUES ('23', '6', '2', '100');
INSERT INTO `score` VALUES ('24', '6', '3', '67');
INSERT INTO `score` VALUES ('25', '6', '4', '100');
INSERT INTO `score` VALUES ('26', '7', '1', '9');
INSERT INTO `score` VALUES ('27', '7', '2', '100');
INSERT INTO `score` VALUES ('28', '7', '3', '67');
INSERT INTO `score` VALUES ('29', '7', '4', '88');
INSERT INTO `score` VALUES ('30', '8', '1', '9');
INSERT INTO `score` VALUES ('31', '8', '2', '100');
INSERT INTO `score` VALUES ('32', '8', '3', '67');
INSERT INTO `score` VALUES ('33', '8', '4', '88');
INSERT INTO `score` VALUES ('34', '9', '1', '91');
INSERT INTO `score` VALUES ('35', '9', '2', '88');
INSERT INTO `score` VALUES ('36', '9', '3', '67');
INSERT INTO `score` VALUES ('37', '9', '4', '22');
INSERT INTO `score` VALUES ('38', '10', '1', '90');
INSERT INTO `score` VALUES ('39', '10', '2', '77');
INSERT INTO `score` VALUES ('40', '10', '3', '43');
INSERT INTO `score` VALUES ('41', '10', '4', '87');
INSERT INTO `score` VALUES ('42', '11', '1', '90');
INSERT INTO `score` VALUES ('43', '11', '2', '77');
INSERT INTO `score` VALUES ('44', '11', '3', '43');
INSERT INTO `score` VALUES ('45', '11', '4', '87');
INSERT INTO `score` VALUES ('46', '12', '1', '90');
INSERT INTO `score` VALUES ('47', '12', '2', '77');
INSERT INTO `score` VALUES ('48', '12', '3', '43');
INSERT INTO `score` VALUES ('49', '12', '4', '87');
INSERT INTO `score` VALUES ('52', '13', '3', '87');
-- ----------------------------
-- Table structure for `student`
-- ----------------------------
DROP TABLE IF EXISTS `student`;
CREATE TABLE `student` (
`sid` int(11) NOT NULL AUTO_INCREMENT,
`gender` char(1) NOT NULL,
`class_id` int(11) NOT NULL,
`sname` varchar(32) NOT NULL,
PRIMARY KEY (`sid`),
KEY `fk_class` (`class_id`),
CONSTRAINT `fk_class` FOREIGN KEY (`class_id`) REFERENCES `class` (`cid`)
) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of student
-- ----------------------------
INSERT INTO `student` VALUES ('1', '男', '1', '理解');
INSERT INTO `student` VALUES ('2', '女', '1', '钢蛋');
INSERT INTO `student` VALUES ('3', '男', '1', '张三');
INSERT INTO `student` VALUES ('4', '男', '1', '张一');
INSERT INTO `student` VALUES ('5', '女', '1', '张二');
INSERT INTO `student` VALUES ('6', '男', '1', '张四');
INSERT INTO `student` VALUES ('7', '女', '2', '铁锤');
INSERT INTO `student` VALUES ('8', '男', '2', '李三');
INSERT INTO `student` VALUES ('9', '男', '2', '李一');
INSERT INTO `student` VALUES ('10', '女', '2', '李二');
INSERT INTO `student` VALUES ('11', '男', '2', '李四');
INSERT INTO `student` VALUES ('12', '女', '3', '如花');
INSERT INTO `student` VALUES ('13', '男', '3', '刘三');
INSERT INTO `student` VALUES ('14', '男', '3', '刘一');
INSERT INTO `student` VALUES ('15', '女', '3', '刘二');
INSERT INTO `student` VALUES ('16', '男', '3', '刘四');
-- ----------------------------
-- Table structure for `teacher`
-- ----------------------------
DROP TABLE IF EXISTS `teacher`;
CREATE TABLE `teacher` (
`tid` int(11) NOT NULL AUTO_INCREMENT,
`tname` varchar(32) NOT NULL,
PRIMARY KEY (`tid`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of teacher
-- ----------------------------
INSERT INTO `teacher` VALUES ('1', '张磊老师');
INSERT INTO `teacher` VALUES ('2', '李平老师');
INSERT INTO `teacher` VALUES ('3', '刘海燕老师');
INSERT INTO `teacher` VALUES ('4', '朱云海老师');
INSERT INTO `teacher` VALUES ('5', '李杰老师');
|
import { axios } from "../helpers/auth";
import { BASE_URL } from "./../constants/Config";
const config = {
headers: { "Content-Type": "multipart/form-data" },
};
// CREATE AN INSTANCE OF AXIOS
const axiosInstance = axios.create({
baseURL: BASE_URL,
timeout: 100000,
});
axiosInstance.defaults.headers.common = axios.defaults.headers.common;
const postDataWithParams = async (url, data, params) => {
try {
const result = await axiosInstance.post(url, data, { params: params });
return result;
} catch (e) {
throw e;
}
};
const getDataByID = async (url, id) => {
try {
const result = await axiosInstance.get(`${url}/${id}`);
return result;
} catch (e) {
throw e;
}
};
const getDataByParams = async (url, params) => {
try {
const result = await axiosInstance.get(url, { params: params });
return result;
} catch (e) {
throw e;
}
};
const getTakenData = async (url) => {
try {
const result = await axiosInstance.get(url);
return result;
} catch (e) {
throw e;
}
};
const postDataMultipart = async (url, data) => {
try {
const result = await axiosInstance.post(url, data, config);
return result;
} catch (e) {
throw e;
}
};
const postData = async (url, data) => {
try {
const result = await axiosInstance.post(url, data);
return result;
} catch (e) {
throw e;
}
};
const deleteById = async (url, id) => {
try {
const result = await axiosInstance.delete(`${url}/${id}`);
return result;
} catch (e) {
throw e;
}
};
const deleteByUrl = async (url) => {
try {
const result = await axiosInstance.delete(url);
return result;
} catch (e) {
throw e;
}
};
const putData = async (url, id, data) => {
try {
const result = await axiosInstance.put(`${url}/${id}`, data);
return result;
} catch (e) {
throw e;
}
};
const putDataWithUrl = async (url, data) => {
try {
const result = await axiosInstance.post(url, data);
return result;
} catch (e) {
throw e;
}
};
const putDataUrl = async (url, data) => {
try {
const result = await axiosInstance.put(url, data);
return result;
} catch (e) {
throw e;
}
};
const putDataWithParams = async (url, id, data, params) => {
try {
const result = await axiosInstance.put(`${url}/${id}`, data, {
params: params,
});
return result;
} catch (e) {
throw e;
}
};
export {
axiosInstance,
postDataWithParams,
getDataByID,
getTakenData,
postDataMultipart,
postData,
deleteById,
putData,
putDataWithUrl,
putDataUrl,
deleteByUrl,
putDataWithParams,
getDataByParams,
};
|
Ext.define('VIV.view.admin.roles.RolesGrid', {
extend: 'Ext.grid.Panel',
xtype: 'roles-grid',
width: '100%',
border: false,
autoScroll: true,
features: [{
groupHeaderTpl: 'Modulo: {name}',
ftype: 'groupingsummary',
collapsible: false
}],
initComponent: function() {
var me = this; // Ambito del componente.
// Store
me.store = Ext.create('VIV.store.admin.RolesStore');
// Modelo de columna
me.columns = [{
xtype : 'rownumberer',
text : 'No',
width : 40,
align : 'center'
}, {
text: 'Id',
dataIndex: 'id',
width: 35,
hidden: true
}, {
text: 'Roles',
dataIndex: 'role',
flex: 1
}];
// Carga nuestra configuaración y se la pasa al componente padre.
me.callParent(arguments);
}
}); |
import React from 'react';
interface ListviewProps {
items: JSX.Element[];
}
interface ListviewState {
//
}
export class Listview extends React.Component<ListviewProps, ListviewState> {
public render() {
if (this.props.items.length < 1) {
return <div>There is no items!</div>;
} else {
return <ul className="list-view">
{this.props.items.map(function (item, itemIndex) {
return <li key={itemIndex}>{item}</li>;
})}
</ul>;
}
}
}
|
#include <stdio.h>
#include "esp_system.h"
#include "esp_wifi.h"
#include "freertos/FreeRTOS.h"
#include "freertos/event_groups.h"
#include "freertos/task.h"
#include "freertos/timers.h"
#include "mdf_common.h"
#include "unity.h"
#define WIFI_SSID CONFIG_WIFI_SSID
#define WIFI_PASSWORD CONFIG_WIFI_PSWD
static const char TAG[] = "UNIT_TEST";
static EventGroupHandle_t g_user_event;
#define NETWORK_CONNECTED BIT0
static void event_handler(void *arg, esp_event_base_t event_base, int32_t event_id, void *event_data)
{
if (event_base == WIFI_EVENT) {
switch (event_id) {
case WIFI_EVENT_STA_START:
MDF_LOGI("wifi sta mode is running");
MDF_ERROR_ASSERT(esp_wifi_connect());
break;
case WIFI_EVENT_STA_STOP:
MDF_LOGI("wifi sta mode is stoped");
break;
default:
MDF_LOGD("WIFI_EVENT (%d)", event_id);
break;
}
} else if (event_base == IP_EVENT) {
switch (event_id) {
case IP_EVENT_STA_GOT_IP: {
ip_event_got_ip_t *event = (ip_event_got_ip_t *)event_data;
MDF_LOGI("wifi sta got ip");
MDF_LOGI("net address: " IPSTR, IP2STR(&(event->ip_info.ip)));
MDF_LOGI("net mask: " IPSTR, IP2STR(&(event->ip_info.netmask)));
MDF_LOGI("net gateway: " IPSTR, IP2STR(&(event->ip_info.gw)));
xEventGroupSetBits(g_user_event, NETWORK_CONNECTED);
break;
}
case IP_EVENT_STA_LOST_IP:
MDF_LOGI("wifi sta lost ip");
xEventGroupClearBits(g_user_event, NETWORK_CONNECTED);
break;
default:
MDF_LOGD("IP_EVENT (%d)", event_id);
break;
}
} else {
MDF_LOGE("Unsupportted event base(%s)", event_base);
}
}
static mdf_err_t wifi_init()
{
mdf_err_t ret = nvs_flash_init();
if (ret == ESP_ERR_NVS_NO_FREE_PAGES || ret == ESP_ERR_NVS_NEW_VERSION_FOUND) {
MDF_ERROR_ASSERT(nvs_flash_erase());
ret = nvs_flash_init();
}
MDF_ERROR_ASSERT(ret);
MDF_ERROR_ASSERT(esp_netif_init());
MDF_ERROR_ASSERT(esp_event_loop_create_default());
MDF_ERROR_ASSERT(esp_event_handler_register(WIFI_EVENT, ESP_EVENT_ANY_ID, &event_handler, NULL));
MDF_ERROR_ASSERT(esp_event_handler_register(IP_EVENT, ESP_EVENT_ANY_ID, &event_handler, NULL));
if (!esp_netif_create_default_wifi_sta()) {
MDF_LOGE("Create default wifi sta netif failed");
return MDF_FAIL;
}
wifi_init_config_t init_cfg = WIFI_INIT_CONFIG_DEFAULT();
MDF_ERROR_ASSERT(esp_wifi_init(&init_cfg));
wifi_config_t cfg = {
.sta = {
.ssid = WIFI_SSID,
.password = WIFI_PASSWORD,
}
};
MDF_ERROR_ASSERT(esp_wifi_set_mode(WIFI_MODE_STA));
MDF_ERROR_ASSERT(esp_wifi_set_config(ESP_IF_WIFI_STA, &cfg));
MDF_ERROR_ASSERT(esp_wifi_start());
MDF_LOGI("connect to wifi(%s,%s)", cfg.sta.ssid, cfg.sta.password);
return MDF_OK;
}
void app_main()
{
g_user_event = xEventGroupCreate();
ESP_ERROR_CHECK(wifi_init());
EventBits_t bits = xEventGroupWaitBits(g_user_event, NETWORK_CONNECTED, pdFALSE, pdFALSE, pdMS_TO_TICKS(10 * 1000));
if ((bits & NETWORK_CONNECTED) != NETWORK_CONNECTED) {
MDF_LOGE("Can not connected Wi-Fi");
assert(false);
}
printf("Running aliyun sdk unit test\n");
UNITY_BEGIN();
unity_run_all_tests();
UNITY_END();
} |
use crate::api::use_context;
use crate::codegen_traits::LuaApiTable;
use crate::component::{
Camera, GlyphRenderer, GlyphRendererConfig, LuaComponentCamera, LuaComponentGlyphRenderer,
LuaComponentNinePatchRenderer, LuaComponentSpriteRenderer, LuaComponentTilemapRenderer,
LuaComponentUIScaler, NinePatchRenderer, Size, SpriteRenderer, TilemapRenderer, Transform,
UIElement, UIScaleMode, UIScaler,
};
use crate::render::{
Color, LuaRcFont, LuaRcShader, LuaRcSprite, LuaRcSpriteNinePatch, LuaRcTilemap,
};
use crate::structure::Vec2;
use crate::ui::{UIAnchor, UIMargin};
use codegen::{LuaComponentNoWrapper, LuaStruct};
use legion::world::Entry;
use mlua::prelude::*;
use std::marker::PhantomData;
#[derive(LuaComponentNoWrapper, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Entity {
#[lua_hidden]
entity: legion::Entity,
#[lua_readonly]
#[lua_userfunc(get=lua_get_transform)]
transform: PhantomData<Transform>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_size)]
size: PhantomData<Size>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_ui_element)]
ui_element: PhantomData<UIElement>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_ui_scaler)]
ui_scaler: PhantomData<UIScaler>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_camera)]
camera: PhantomData<LuaComponentCamera>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_glyph_renderer)]
glyph_renderer: PhantomData<LuaComponentGlyphRenderer>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_sprite_renderer)]
sprite_renderer: PhantomData<LuaComponentSpriteRenderer>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_nine_patch_renderer)]
nine_patch_renderer: PhantomData<LuaComponentNinePatchRenderer>,
#[lua_readonly]
#[lua_userfunc(get=lua_get_tilemap_renderer)]
tilemap_renderer: PhantomData<LuaComponentTilemapRenderer>,
#[lua_method]
listen: PhantomData<()>,
#[lua_method]
unlisten: PhantomData<()>,
}
impl Entity {
pub fn new(entity: legion::Entity) -> Self {
Self {
entity,
transform: PhantomData,
size: PhantomData,
ui_element: PhantomData,
ui_scaler: PhantomData,
camera: PhantomData,
glyph_renderer: PhantomData,
sprite_renderer: PhantomData,
nine_patch_renderer: PhantomData,
tilemap_renderer: PhantomData,
listen: PhantomData,
unlisten: PhantomData,
}
}
pub fn entity(&self) -> legion::Entity {
self.entity
}
pub fn with_entry<T>(&self, f: impl FnOnce(&Entry) -> T) -> Option<T> {
let mut world = use_context().world_mut();
let entry = match world.entry(self.entity) {
Some(entry) => entry,
None => return None,
};
Some(f(&entry))
}
pub fn with_entry_mut<T>(&self, f: impl FnOnce(&mut Entry) -> T) -> Option<T> {
let mut world = use_context().world_mut();
let mut entry = match world.entry(self.entity) {
Some(entry) => entry,
None => return None,
};
Some(f(&mut entry))
}
fn lua_get_transform<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| e.get_component::<Transform>().ok().cloned())
.to_lua(lua)
}
fn lua_get_size<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| e.get_component::<Size>().ok().cloned())
.to_lua(lua)
}
fn lua_get_ui_element<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| e.get_component::<UIElement>().ok().cloned())
.to_lua(lua)
}
fn lua_get_ui_scaler<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<UIScaler>()
.ok()
.map(|_| LuaComponentUIScaler::from(self.entity))
})
.to_lua(lua)
}
fn lua_get_camera<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<Camera>()
.ok()
.map(|_| LuaComponentCamera::from(self.entity))
})
.to_lua(lua)
}
fn lua_get_glyph_renderer<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<GlyphRenderer>()
.ok()
.map(|_| LuaComponentGlyphRenderer::from(self.entity))
})
.to_lua(lua)
}
fn lua_get_sprite_renderer<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<SpriteRenderer>()
.ok()
.map(|_| LuaComponentSpriteRenderer::from(self.entity))
})
.to_lua(lua)
}
fn lua_get_nine_patch_renderer<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<NinePatchRenderer>()
.ok()
.map(|_| LuaComponentNinePatchRenderer::from(self.entity))
})
.to_lua(lua)
}
fn lua_get_tilemap_renderer<'lua>(&self, lua: &'lua Lua) -> LuaResult<LuaValue<'lua>> {
self.with_entry(|e| {
e.get_component::<TilemapRenderer>()
.ok()
.map(|_| LuaComponentTilemapRenderer::from(self.entity))
})
.to_lua(lua)
}
fn listen(&self, lua: &Lua, (event, function): (String, LuaFunction)) -> LuaResult<usize> {
use_context()
.entity_event_mgr_mut()
.add_entity_listener(lua, function, event, self.entity)
}
fn unlisten(&self, _lua: &Lua, handler: usize) -> LuaResult<()> {
use_context()
.entity_event_mgr_mut()
.remove_entity_listener(self.entity, handler);
Ok(())
}
}
impl LuaApiTable for Entity {
fn api_name() -> &'static str {
"Entity"
}
#[allow(unused_variables)]
fn fill_api_table(lua: &Lua, table: &LuaTable) -> LuaResult<()> {
table.set(
"build",
lua.create_function(|lua, param: EntityBuildParam| {
let context = use_context();
let mut world = context.world_mut();
let entity = world.push(());
let mut entry = world.entry(entity).unwrap();
let mut transform_mgr = context.transform_mgr_mut();
let transform = transform_mgr.alloc(entity);
entry.add_component(Transform::new(transform));
transform_mgr.set_name(transform, param.name);
if let Some(param) = param.transform {
transform_mgr
.set_parent(transform, param.parent.map(|transform| transform.index()));
let transform = transform_mgr.transform_mut(transform);
if let Some(position) = param.position {
transform.position = position;
}
if let Some(scale) = param.scale {
transform.scale = scale;
}
if let Some(angle) = param.angle {
transform.angle = angle;
}
}
let mut size = Size::new(transform);
if let Some(param) = param.size {
size.width = param.width;
size.height = param.height;
}
entry.add_component(Size::new(transform));
if let Some(param) = param.ui_element {
let mut ui_mgr = context.ui_mgr_mut();
let element = ui_mgr.alloc(entity);
let e = ui_mgr.element_mut(element);
if let Some(anchor) = param.anchor {
e.anchor = anchor;
}
if let Some(margin) = param.margin {
e.margin = margin;
}
if let (Some(position), Some(size)) = (param.position, param.size) {
e.anchor = UIAnchor::new(Vec2::new(0f32, 0f32), Vec2::new(0f32, 0f32));
e.margin = UIMargin::new(0f32, -size.width, 0f32, -size.height);
}
if let Some(is_interactible) = param.is_interactible {
e.set_interactible(is_interactible);
}
ui_mgr
.element_mut(element)
.set_order_index(param.order_index);
entry.add_component(UIElement::new(element));
}
if let Some(param) = param.ui_scaler {
entry.add_component(UIScaler::new(param.mode, param.reference_size));
}
if let Some(param) = param.camera {
entry.add_component(Camera {
layer: param.layer.unwrap_or_default(),
order: param.order.unwrap_or_default(),
});
}
if let Some(param) = param.glyph_renderer {
let mut glyph_renderer = GlyphRenderer::new(
<_>::from(param.shader),
<_>::from(param.font),
param.font_size,
param.thickness,
param.smoothness,
);
if let Some(layer) = param.layer {
glyph_renderer.layer = layer;
}
if let Some(order) = param.order {
glyph_renderer.order = order;
}
if let Some(color) = param.color {
glyph_renderer.color = color;
}
if let Some(config) = param.config {
glyph_renderer.set_config(config);
}
if let Some(text) = param.text {
glyph_renderer.set_text(text);
}
entry.add_component(glyph_renderer);
}
if let Some(param) = param.sprite_renderer {
let mut sprite_renderer =
SpriteRenderer::new(<_>::from(param.shader), <_>::from(param.sprite));
if let Some(layer) = param.layer {
sprite_renderer.layer = layer;
}
if let Some(order) = param.order {
sprite_renderer.order = order;
}
if let Some(color) = param.color {
sprite_renderer.color = color;
}
entry.add_component(sprite_renderer);
}
if let Some(param) = param.nine_patch_renderer {
let mut nine_patch_renderer = NinePatchRenderer::new(
<_>::from(param.shader),
<_>::from(param.nine_patch),
);
if let Some(layer) = param.layer {
nine_patch_renderer.layer = layer;
}
if let Some(order) = param.order {
nine_patch_renderer.order = order;
}
if let Some(color) = param.color {
nine_patch_renderer.color = color;
}
entry.add_component(nine_patch_renderer);
}
if let Some(param) = param.tilemap_renderer {
let mut tilemap_renderer =
TilemapRenderer::new(<_>::from(param.shader), <_>::from(param.tilemap));
if let Some(layer) = param.layer {
tilemap_renderer.layer = layer;
}
if let Some(order) = param.order {
tilemap_renderer.order = order;
}
if let Some(color) = param.color {
tilemap_renderer.color = color;
}
entry.add_component(tilemap_renderer);
}
Ok(Entity::new(entity))
})?,
)?;
table.set(
"get_by_name",
lua.create_function(|lua, name: String| {
let transform_mgr = use_context().transform_mgr();
Ok(transform_mgr.find_by_name(name).map(|indices| {
indices
.iter()
.map(|index| Entity::new(transform_mgr.entity(*index)))
.collect::<Vec<_>>()
}))
})?,
)?;
Ok(())
}
}
#[derive(LuaStruct)]
struct TransformBuildParam {
pub parent: Option<Transform>,
pub position: Option<Vec2>,
pub scale: Option<Vec2>,
pub angle: Option<f32>,
}
#[derive(LuaStruct)]
struct SizeBuildParam {
pub width: f32,
pub height: f32,
}
#[derive(LuaStruct)]
struct UIElementBuildParam {
pub anchor: Option<UIAnchor>,
pub margin: Option<UIMargin>,
pub position: Option<Vec2>,
pub size: Option<crate::structure::Size>,
pub is_interactible: Option<bool>,
pub order_index: u32,
}
#[derive(LuaStruct)]
struct UIScalerBuildParam {
pub mode: UIScaleMode,
pub reference_size: crate::structure::Size,
}
#[derive(LuaStruct)]
struct CameraBuildParam {
pub layer: Option<crate::render::Layer>,
pub order: Option<isize>,
}
#[derive(LuaStruct)]
struct GlyphRendererBuildParam {
pub layer: Option<crate::render::Layer>,
pub order: Option<isize>,
pub color: Option<Color>,
pub shader: LuaRcShader,
pub font: LuaRcFont,
pub font_size: f32,
pub thickness: f32,
pub smoothness: f32,
pub config: Option<GlyphRendererConfig>,
pub text: Option<String>,
}
#[derive(LuaStruct)]
struct SpriteRendererBuildParam {
pub layer: Option<crate::render::Layer>,
pub order: Option<isize>,
pub color: Option<Color>,
pub shader: LuaRcShader,
pub sprite: LuaRcSprite,
}
#[derive(LuaStruct)]
struct NinePatchRendererBuildParam {
pub layer: Option<crate::render::Layer>,
pub order: Option<isize>,
pub color: Option<Color>,
pub shader: LuaRcShader,
pub nine_patch: LuaRcSpriteNinePatch,
}
#[derive(LuaStruct)]
struct TilemapRendererBuildParam {
pub layer: Option<crate::render::Layer>,
pub order: Option<isize>,
pub color: Option<Color>,
pub shader: LuaRcShader,
pub tilemap: LuaRcTilemap,
}
#[derive(LuaStruct)]
struct EntityBuildParam {
name: Option<String>,
transform: Option<TransformBuildParam>,
size: Option<SizeBuildParam>,
ui_element: Option<UIElementBuildParam>,
ui_scaler: Option<UIScalerBuildParam>,
camera: Option<CameraBuildParam>,
glyph_renderer: Option<GlyphRendererBuildParam>,
sprite_renderer: Option<SpriteRendererBuildParam>,
nine_patch_renderer: Option<NinePatchRendererBuildParam>,
tilemap_renderer: Option<TilemapRendererBuildParam>,
}
|
#!/bin/bash
set -e
IFS='|'
profileName=${AWS_PROFILE:-default}
FLUTTERCONFIG="{\
\"ResDir\":\"./lib/\",\
}"
AMPLIFY="{\
\"projectName\":\"amplifyDataStoreInteg\",\
\"envName\":\"test\",\
\"defaultEditor\":\"code\"\
}"
FRONTEND="{\
\"frontend\":\"flutter\",\
\"config\":$FLUTTERCONFIG\
}"
AWSCLOUDFORMATIONCONFIG="{\
\"configLevel\":\"project\",\
\"useProfile\":\"true\",\
\"profileName\":\"$profileName\",\
\"region\":\"us-west-2\"\
}"
PROVIDERS="{\
\"awscloudformation\":$AWSCLOUDFORMATIONCONFIG\
}"
# read the request template and the schema
requestTemplate=`cat tool/add_api_request.json`
schema=`cat tool/schema.graphql`
# escape quotes and remove new lines from schema
schema=${schema//$'"'/'\"'}
schema=${schema//$'\n'/}
# create the request with the actual schema
request="${requestTemplate/<SCHEMA_PLACEHOLDER>/$schema}"
amplify init \
--amplify $AMPLIFY \
--frontend $FRONTEND \
--providers $PROVIDERS \
--yes
echo "$request" | jq -c | amplify add api --headless
amplify push --yes
|
#!/usr/bin/env python
__author__ = "Bharat Medasani"
__copyright__ = "Copyright 2014, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Bharat Medasani"
__email__ = "[email protected]"
__status__ = "Development"
__date__ = "Jul 24, 2016"
import os
import logging
import logging.config
from monty.serialization import loadfn
MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
def initialize_logging(filename=None, level=None):
config_dict = loadfn(os.path.join(MODULE_DIR, 'logging.yaml'))
if filename:
config_dict['handlers']['file_handler'].update({'filename': filename})
if level:
config_dict['handlers']['file_handler'].update({'level': level})
logging.config.dictConfig(config_dict)
|
import sbt._
import com.twitter.sbt._
class NodeRegistry(info: ProjectInfo) extends StandardProject(info) {
val specs = "org.scala-tools.testing" % "specs" % "1.6.2.1"
val vscaladoc = "org.scala-tools" % "vscaladoc" % "1.1-md-3"
val configgy = "net.lag" % "configgy" % "1.6.1"
val xrayspecs = "com.twitter" % "xrayspecs" % "1.0.7" //--auto--
val twitter = "com.twitter" % "json" % "1.1"
val zookeeper = "com.twitter" % "zookeeper-client" % "1.5.1"
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Input;
using RealmiusAdvancedExample.Model;
using Xamarin.Forms;
namespace RealmiusAdvancedExample.ViewModel
{
public class AuthorisationPageViewModel : RootViewModel
{
public bool UserAuthorised
{
get { return App.UserAuthorised; }
set { App.UserAuthorised = value; }
}
public ICommand LoginCommand { get; set; }
public ICommand SkipCommand { get; set; }
public string UserName { get; set; }
public string UserPassword { get; set; }
public User CurrentUser
{
get { return App.CurrentUser; }
set { App.CurrentUser = value; }
}
public string ErrorMsg { get; set; }
public bool AccessGranted { get; set; }
public AuthorisationPageViewModel()
{
LoginCommand = new Command(Login);
SkipCommand = new Command(Skip);
}
private void Login()
{
if (String.IsNullOrWhiteSpace(UserName) || String.IsNullOrWhiteSpace(UserPassword))
{
ErrorMsg = "Fill all fields!";
OnPropertyChanged(nameof(ErrorMsg));
return;
}
if (UsersCredentialsDict.CheckUser(UserName, UserPassword))
{
ErrorMsg = null;
CurrentUser = new User(UserName, UserPassword);
OnPropertyChanged(nameof(CurrentUser));
UserAuthorised = true;
OnPropertyChanged(nameof(UserAuthorised));
GrantAccess(4000);
}
else
{
ErrorMsg = "Wrong name or password!";
}
OnPropertyChanged(nameof(ErrorMsg));
}
public void Skip()
{
var testUser = UsersCredentialsDict.GetDefaultUserCreds();
CurrentUser = new User(testUser.Key, testUser.Value);
OnPropertyChanged(nameof(CurrentUser));
GrantAccess(0);
}
public async void GrantAccess(int delayTime)
{
AccessGranted = true;
OnPropertyChanged(nameof(AccessGranted));
await Task.Delay(delayTime);
OnAuthorisePageClosed?.Invoke(new object(), EventArgs.Empty);
}
public EventHandler OnAuthorisePageClosed;
}
}
|
#!/usr/bin/env node
import path from 'path';
import React from 'react';
import ReactDOMServer from 'react-dom/server';
import DirectoryTree from './components/DirectoryTree';
import { initConfig, getConfig } from './config';
import * as FSUtils from './fsUtils';
const mdFileTree = async (dirPath, linkPrefix, depth = 0) => {
if (depth > getConfig('maxDepth')) {
return null;
}
const tree = {
rootName: path.basename(dirPath),
rootPath: path.resolve(dirPath),
hasRootMd: false,
childDirs: [],
childFiles: [],
};
const children = await FSUtils.readdirPromise(dirPath);
await Promise.all(children.map(async child => {
const childPath = path.resolve(dirPath, child);
if (getConfig('exclude').some(exclude => exclude.test(childPath))) {
return;
}
const childStats = await FSUtils.statPromise(childPath);
if (childStats.isDirectory()) {
const childLinkPrefix = `${linkPrefix}`;
const childDir = await mdFileTree(childPath, childLinkPrefix, depth + 1);
if (childDir !== null) {
tree.childDirs.push(childDir);
}
} else if (
depth === 0 && getConfig('rootFileName') === child ||
tree.rootName === FSUtils.trimExtension(child)
) {
tree.hasRootMd = true;
} else if (getConfig('include').some(include => include.test(child))) {
tree.childFiles.push(child);
}
return;
}));
return tree;
};
const run = async configPath => {
try {
await initConfig(configPath || '.mftrc.json');
const dirPath = path.resolve(FSUtils.resolveHome(getConfig('source')));
const tree = await mdFileTree(dirPath, getConfig('linkPrefix'));
const treeHtml = ReactDOMServer.renderToStaticMarkup(<DirectoryTree tree={tree} />);
const outputPath = path.resolve(FSUtils.resolveHome(getConfig('output')));
await FSUtils.writeFilePromise(outputPath, treeHtml);
} catch (error) {
console.log(error);
}
};
run(process.argv[2]);
|
package com.vlad1m1r.bltaxi.about
import android.app.Application
import android.os.Build
import androidx.fragment.app.testing.launchFragmentInContainer
import androidx.test.platform.app.InstrumentationRegistry
import com.nhaarman.mockitokotlin2.mock
import com.nhaarman.mockitokotlin2.verify
import com.nhaarman.mockitokotlin2.whenever
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.koin.android.ext.koin.androidContext
import org.koin.android.ext.koin.androidLogger
import org.koin.core.context.loadKoinModules
import org.koin.core.context.startKoin
import org.koin.core.context.stopKoin
import org.koin.core.logger.Level
import org.koin.core.module.Module
import org.koin.dsl.module
import org.robolectric.RobolectricTestRunner
import org.robolectric.annotation.Config
class TestApplication : Application() {
override fun onCreate() {
super.onCreate()
setTheme(R.style.Theme_App)
stopKoin()
startKoin {
androidLogger()
androidContext(this@TestApplication)
// TODO Await fix for Koin and replace the explicit invocations
// of loadModules() and createRootScope() with a single call to modules()
// (https://github.com/InsertKoinIO/koin/issues/847)
koin.loadModules(emptyList())
koin.createRootScope()
}
}
internal fun injectModule(module: Module) {
loadKoinModules(module)
}
}
@RunWith(RobolectricTestRunner::class)
@Config(sdk = [Build.VERSION_CODES.P], application = TestApplication::class)
class AboutFragmentShould {
val fragmentViewModel = mock<AboutViewModel>()
@Before
fun before() {
val applicationContext =
InstrumentationRegistry.getInstrumentation().targetContext.applicationContext as TestApplication
applicationContext.injectModule(
module {
single {
fragmentViewModel
}
}
)
}
@Test
fun sendEmail_whenButtonSendEmailIsClicked() {
launchFragmentInContainer<AboutFragment>()
aboutScreen { clickButtonSendEmail() }
verify(fragmentViewModel).writeEmail()
}
@Test
fun rateApp_whenButtonRateAppIsClicked() {
launchFragmentInContainer<AboutFragment>()
aboutScreen { clickButtonRateApp() }
verify(fragmentViewModel).rateApp()
}
@Test
fun shareApp_whenButtonShareAppIsClicked() {
launchFragmentInContainer<AboutFragment>()
aboutScreen { clickButtonShareApp() }
verify(fragmentViewModel).shareApp()
}
@Test
fun openPrivacyPolicy_whenButtonPrivacyPolicyIsClicked() {
launchFragmentInContainer<AboutFragment>()
aboutScreen { clickButtonPrivacyPolicy() }
verify(fragmentViewModel).openPrivacyPolicy()
}
@Test
fun openTermsAndConditions_whenButtonTermsAndConditionsIsClicked() {
launchFragmentInContainer<AboutFragment>()
aboutScreen { clickButtonTermsAndConditions() }
verify(fragmentViewModel).openTermsAndConditions()
}
@Test
fun showVersionName() {
whenever(fragmentViewModel.getAppVersionName()).thenReturn("version_name")
launchFragmentInContainer<AboutFragment>()
aboutScreen { textAppVersionIsEqualTo("Version: version_name") }
}
} |
import { randBetween } from "./SensorDataUtil";
class KeyEvent {
time: number;
idCharCodeSum: number;
longerThanBefore: boolean;
constructor(time: number, idCharCodeSum: number, longerThanBefore: boolean) {
this.time = time;
this.idCharCodeSum = idCharCodeSum;
this.longerThanBefore = longerThanBefore;
}
public toString(): string {
return `2,${this.time},${this.idCharCodeSum}${this.longerThanBefore ? ",1" : ""};`;
}
}
export default class KeyEventList {
keyEvents: KeyEvent[]
constructor() {
this.keyEvents = [];
}
public randomize(sensorCollectionStartTimestamp: number): void {
this.keyEvents = [];
if (Math.random() > 0.05) return;
let timeSinceSensorCollectionStart = Date.now() - sensorCollectionStartTimestamp;
if (timeSinceSensorCollectionStart < 10000) return;
let eventCount = randBetween(2, 5);
let idCharCodeSum = randBetween(517, 519);
for (let i = 0; i < eventCount; i++) {
let time = (i === 0 ? randBetween(5000, 8000) : randBetween(10, 50));
this.keyEvents.push(new KeyEvent(time, idCharCodeSum, (Math.random() > 0.5)));
}
}
/*
examples:
2,29555,517;2,152,517,1;2,13,518,1;
2,20016,517;2,366,518,1;2,27,517,1;
2,13840,517,1;2,19,518,1;
structure:
1. seems to always be 2
2. time between this event and last event
3. sumCharCodes of ID of text edit field
4. (only present if text field length is longer than before the event) 1
*/
public toString(): string {
return this.keyEvents.map(event => event.toString()).join("");
}
public getSum(): number {
let sum = 0;
for (let keyEvent of this.keyEvents) {
sum += keyEvent.idCharCodeSum;
sum += keyEvent.time;
sum += 2;
}
return sum;
}
}
|
#!/bin/sh
set -eu
: "${SOURCECRED_REMOTE:[email protected]:sourcecred/sourcecred.git}"
: "${SOURCECRED_REF:=origin/master}"
: "${DEPLOY_REMOTE:[email protected]:sourcecred/sourcecred.github.io.git}"
: "${DEPLOY_BRANCH:=master}"
: "${DEPLOY_CNAME_URL:=sourcecred.io}"
toplevel="$(git -C "$(dirname "$0")" rev-parse --show-toplevel)"
. $toplevel/scripts/monorepo_vars.sh
export GIT_CONFIG_NOSYSTEM=1
export GIT_ATTR_NOSYSTEM=1
main() {
parse_args "$@"
cd "${CORE_PATH}"
sourcecred_repo=
static_site=
sourcecred_site=
preview_dir=
trap cleanup EXIT
ensure_clean_working_tree
build_and_deploy
}
parse_args() {
while [ $# -gt 0 ]; do
case "$1" in
-n|--dry-run)
printf 'Setting DRY_RUN=1.\n'
DRY_RUN=1
;;
*)
printf >&2 'unknown argument: %s\n' "$1"
exit 1
;;
esac
shift
done
}
# Adapted from:
# https://github.com/git/git/blob/8d530c4d64ffcc853889f7b385f554d53db375ed/git-sh-setup.sh#L207-L222
ensure_clean_working_tree() {
err=0
if ! git diff-files --quiet --ignore-submodules; then
printf >&2 'Cannot deploy: You have unstaged changes.\n'
err=1
fi
if ! git diff-index --cached --quiet --ignore-submodules HEAD -- ; then
if [ "${err}" -eq 0 ]; then
printf >&2 'Cannot deploy: Your index contains uncommitted changes.\n'
else
printf >&2 'Additionally, your index contains uncommitted changes.\n'
fi
err=1
fi
if [ "${err}" -ne 0 ]; then
exit "${err}"
fi
}
build_and_deploy() {
sourcecred_repo="$(mktemp -d --suffix ".sourcecred-repo")"
git clone "${SOURCECRED_REMOTE}" "${sourcecred_repo}"
sourcecred_hash="$(
git -C "${sourcecred_repo}" rev-parse --verify "${SOURCECRED_REF}" --
)"
git -C "${sourcecred_repo}" checkout --detach "${sourcecred_hash}"
static_site="$(mktemp -d --suffix ".static-site")"
"${sourcecred_repo}/scripts/build_static_site.sh" \
--target "${static_site}" \
${DEPLOY_CNAME_URL:+--cname "${DEPLOY_CNAME_URL}"} \
--project @sourcecred \
--project @filecoin-project \
--project @ipld \
--project @libp2p \
;
sourcecred_site="$(mktemp -d --suffix ".sourcecred-site")"
git clone "${DEPLOY_REMOTE}" "${sourcecred_site}"
if ! base_commit="$(
git -C "${sourcecred_site}" rev-parse --verify \
"refs/remotes/origin/${DEPLOY_BRANCH}" --
)"; then
printf >&2 'No deploy branch %s.\n' "${DEPLOY_BRANCH}"
exit 1
fi
git -C "${sourcecred_site}" checkout --detach "${base_commit}"
rm "${sourcecred_site}/.git/index"
git -C "${sourcecred_site}" clean -qfdx
# Explode the static site contents into the current directory.
find "${static_site}" -mindepth 1 -maxdepth 1 \
\( -name .git -prune \) -o \
-exec cp -r -t "${sourcecred_site}" -- {} +
git -C "${sourcecred_site}" add --all .
git -C "${sourcecred_site}" commit -m "deploy-v1: ${sourcecred_hash}"
deploy_commit="$(git -C "${sourcecred_site}" rev-parse HEAD)"
preview_dir="$(mktemp -d --suffix ".sourcecred-prvw")"
git clone -q --no-local --no-checkout "${sourcecred_site}" "${preview_dir}"
git -C "${preview_dir}" checkout -q --detach "${deploy_commit}"
printf '\n'
printf 'Please review the build output now---run:\n'
printf ' cd "%s" && python -m SimpleHTTPServer\n' "${preview_dir}"
line=
while [ "${line}" != yes ] && [ "${line}" != no ]; do
printf 'Do you want to deploy? yes/no> '
read -r line
done
if [ "${line}" = yes ]; then
(
set -x;
git -C "${sourcecred_site}" push ${DRY_RUN:+--dry-run} \
origin \
"${deploy_commit}:${DEPLOY_BRANCH}" \
;
)
else
printf 'Aborting.\n'
fi
printf 'Done.\n'
}
cleanup() {
if [ -d "${sourcecred_repo}" ]; then
rm -rf "${sourcecred_repo}"
fi
if [ -d "${static_site}" ]; then
rm -rf "${static_site}"
fi
if [ -d "${sourcecred_site}" ]; then
rm -rf "${sourcecred_site}"
fi
if [ -d "${preview_dir}" ]; then
rm -rf "${preview_dir}"
fi
}
main "$@"
|
return {
zh_CN: "Simplified Chinese",
en_US: "English",
ja_JP: "Japanese"
};
|
package dance_strategy
import "fmt"
func Waltz() {
fmt.Println("I'm dancing waltz")
}
|
from .helpers import update_mapping, proc_mapping
from .ScopeObjs import ScopeTransformer
from sklearn.preprocessing import OneHotEncoder
import pandas as pd
class BPtTransformer(ScopeTransformer):
def fit(self, X, y=None, mapping=None,
fit_index=None, **fit_params):
# Need the output from a transform to full fit,
# so when fit is called, call fit_transform instead
self.fit_transform(X=X, y=y, mapping=mapping,
fit_index=fit_index,
**fit_params)
return self
def _update_transformer_mapping(self, mapping):
# Need to update the mapping before returning
# Update inds / rest inds by current out mapping
inds = proc_mapping(self.inds_, self.out_mapping_)
rest_inds = proc_mapping(self.rest_inds_, self.out_mapping_)
# Many to many case for transformer,
# override existing out_mapping_
new_out_mapping_ = {}
X_trans_inds = list(range(self.n_trans_feats_))
# Many to many case, each ind is mapped
# to all outputted X_trans_inds
for i in inds:
new_out_mapping_[i] = X_trans_inds
# Fill the remaining spots sequentially,
# for each of the rest inds.
for c in range(len(rest_inds)):
ind = rest_inds[c]
new_out_mapping_[ind] = self.n_trans_feats_ + c
# Override
self.out_mapping_ = new_out_mapping_
# Update the original mapping, this is the mapping which
# will be passed to the next piece of the pipeline
update_mapping(mapping, self.out_mapping_)
# Set final out mapping
self.out_mapping_ = mapping.copy()
return self
def _all_case_update_transformer_mapping(self, X, mapping):
# TODO Should this be changed at all???
# Get as list of
X_trans_inds = list(range(self.n_trans_feats_))
# All case out mapping
self.out_mapping_ = {i: X_trans_inds for i in range(X.shape[1])}
# Since no rest inds, update mapping
update_mapping(mapping, self.out_mapping_)
return self
def fit_transform(self, X, y=None, mapping=None,
fit_index=None,
transform_index=None, **fit_params):
if mapping is None:
mapping = {}
# Call parent fit
super().fit(X, y=y, mapping=mapping,
fit_index=fit_index,
**fit_params)
# If skip
if self.estimator_ is None:
return X
# Transform X - since fit_transform, index is fit index
X_trans = self.transform(X, transform_index=fit_index)
# Update mapping and set out_mapping_
# special all case
if self.inds_ is Ellipsis:
self._all_case_update_transformer_mapping(X, mapping)
else:
self._update_transformer_mapping(mapping)
# Now return X_trans
return X_trans
def transform_df(self, df, base_name='transformer', encoders=None):
return super().transform_df(df, base_name=base_name, encoders=encoders)
def _proc_new_names(self, feat_names, base_name, encoders=None):
# If skip, return passed names as is
if self.estimator_ is None:
return feat_names
# Check for one hot encoder
if isinstance(self.estimator_, OneHotEncoder):
new_names =\
self._proc_one_hot_new_names(feat_names, encoders=encoders)
# Get new names
else:
if self.inds_ is Ellipsis:
alt_name = base_name
elif len(self.inds_) == 1:
alt_name = feat_names[self.inds_[0]]
else:
alt_name = base_name
try:
new_names = [alt_name + '_' + str(i)
for i in range(self.n_trans_feats_)]
except IndexError:
new_names = [base_name + '_' + str(i)
for i in range(self.n_trans_feats_)]
# Remove old names - using parent method
feat_names = self._remove_old_names(feat_names)
# New names come first, then rest of names
all_names = new_names + feat_names
return all_names
def _proc_one_hot_new_names(self, feat_names, encoders=None):
def get_display_cat(name, cat):
# If encoders passed, and name in encoder
# use de-coded name as the cat
if encoders is not None and name in encoders:
try:
cat = encoders[name][cat]
# If error, keep as is
except KeyError:
cat = cat
# If float, turn to int before cast to str
if isinstance(cat, float):
if pd.isnull(cat):
cat = 'NaN'
else:
cat = int(cat)
return str(name) + '=' + repr(cat)
# Scope all case, set inds as identity
# over all passed feat names
if self.inds_ is Ellipsis:
inds = list(range(len(feat_names)))
# Otherwise use self.inds_
else:
inds = self.inds_
# Save new names in new_names
new_names = []
# If no drop
if self.estimator_.drop_idx_ is None:
for name_ind, category in zip(inds,
self.estimator_.categories_):
name = feat_names[name_ind]
for cat in category:
new_names.append(get_display_cat(name, cat))
# Otherwise if drop index
else:
for name_ind, category, to_drop in zip(inds,
self.estimator_.categories_,
self.estimator_.drop_idx_):
name = feat_names[name_ind]
for i, cat in enumerate(category):
if i != to_drop:
new_names.append(get_display_cat(name, cat))
return new_names
|
#|
This file is a part of Courier
(c) 2019 Shirakumo http://tymoon.eu ([email protected])
Author: Nicolas Hafner <[email protected]>
|#
(in-package #:courier)
(defun make-tag (campaign &key title description (save T))
(let ((campaign (ensure-campaign campaign)))
(check-title-exists 'tag title (db:query (:and (:= 'campaign (dm:id campaign))
(:= 'title title))))
(dm:with-model tag ('tag NIL)
(setf-dm-fields tag campaign title description)
(when save (dm:insert tag))
tag)))
(defun edit-tag (tag-ish &key title description (save T))
(let ((tag (ensure-tag tag-ish)))
(setf-dm-fields tag title description)
(when save (dm:save tag))
tag))
(defun ensure-tag (tag-ish)
(or
(etypecase tag-ish
(dm:data-model tag-ish)
(db:id (dm:get-one 'tag (db:query (:= '_id tag-ish))))
(string (ensure-tag (db:ensure-id tag-ish))))
(error 'request-not-found :message "No such tag.")))
(defun delete-tag (tag)
(db:with-transaction ()
(db:remove 'tag-table (db:query (:= 'tag (dm:id tag))))
(db:remove 'mail-tag-table (db:query (:= 'tag (dm:id tag))))
(delete-triggers-for tag)
(dm:delete tag)))
(defun list-tags (thing &key amount (skip 0) query)
(with-query (query title description)
(ecase (dm:collection thing)
(campaign
(dm:get 'tag (query (:= 'campaign (dm:id thing)))
:sort '((title :asc)) :amount amount :skip skip))
(subscriber
(fixup-ids (dm:get (rdb:join (tag _id) (tag-table tag)) (db:query (:= 'subscriber (dm:id thing)))
:sort '((title :asc)) :amount amount :skip skip :hull 'tag)
"tag"))
(mail
(fixup-ids (dm:get (rdb:join (tag _id) (mail-tag-table tag)) (db:query (:= 'mail (dm:id thing)))
:sort '((title :asc)) :amount amount :skip skip :hull 'tag)
"tag")))))
(defun list-tagged (tag type &key amount (skip 0))
(ecase type
(subscriber
(fixup-ids
(dm:get (rdb:join (subscriber _id) (tag-table subscriber)) (db:query (:= 'tag (dm:id tag)))
:sort '((signup-time :DESC)) :amount amount :skip skip :hull 'subscriber)
"subscriber"))
(mail
(fixup-ids
(dm:get (rdb:join (mail _id) (mail-tag-table mail)) (db:query (:= 'tag (dm:id tag)))
:sort '((time :DESC)) :amount amount :skip skip :hull 'mail)
"mail"))))
(defun tagged-p (tag thing)
(< 0 (ecase (dm:collection thing)
(subscriber
(db:count 'tag-table (db:query (:and (:= 'subscriber (ensure-id thing))
(:= 'tag (ensure-id tag))))))
(mail
(db:count 'mail-tag-table (db:query (:and (:= 'mail (ensure-id thing))
(:= 'tag (ensure-id tag)))))))))
(defun tag (thing tag)
(db:with-transaction ()
(let ((tag (ensure-tag tag)))
(unless (tagged-p tag thing)
(ecase (dm:collection thing)
(subscriber
(db:insert 'tag-table `(("tag" . ,(ensure-id tag))
("subscriber" . ,(ensure-id thing))))
(process-triggers thing tag))
(mail
(db:insert 'mail-tag-table `(("tag" . ,(ensure-id tag))
("mail" . ,(ensure-id thing))))))))))
(defun untag (thing tag)
(db:with-transaction ()
(let ((tag (ensure-tag tag)))
(when (tagged-p tag thing)
(ecase (dm:collection thing)
(subscriber
(db:remove 'tag-table (db:query (:and (:= 'tag (ensure-id tag))
(:= 'subscriber (ensure-id thing)))))
(process-triggers thing (list-source-triggers tag :type 20)))
(mail
(db:remove 'mail-tag-table (db:query (:and (:= 'tag (ensure-id tag))
(:= 'mail (ensure-id thing)))))))))))
|
<?php
namespace DeltaCli\Extension\WordPress\Script;
use DeltaCli\Project;
use DeltaCli\Script;
use Symfony\Component\Console\Input\InputOption;
class Install extends Script
{
private $force = false;
public function __construct(Project $project)
{
parent::__construct(
$project,
'wp:install',
'Install WordPress with stock Delta themes and plugins.'
);
}
protected function configure()
{
$this->addSetterOption(
'force',
null,
InputOption::VALUE_NONE,
'Force install even if remote folder is not empty.'
);
}
public function setForce($force)
{
$this->force = $force;
return $this;
}
protected function addSteps()
{
// @todo Detect non-empty httpdocs and stop unless --force option is set
// @todo Download core
// @todo Create and edit wp-config
// @todo Download public plugins
// @todo Download themes and plugins from Delta repos
}
} |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | The multi-valued version of mtl's RWS / RWST
module Control.Monad.Trans.MultiRWS.Lazy
(
-- * MultiRWST
MultiRWST(..)
, MultiRWSTNull
, MultiRWS
-- * MonadMulti classes
, MonadMultiReader(..)
, MonadMultiWriter(..)
, MonadMultiGet(..)
, MonadMultiState(..)
-- * run-functions (extracting from RWST)
, runMultiRWST
, runMultiRWSTASW
, runMultiRWSTW
, runMultiRWSTAW
, runMultiRWSTSW
, runMultiRWSTNil
, runMultiRWSTNil_
-- * with-functions (extending an RWST)
, withMultiReader
, withMultiReader_
, withMultiReaders
, withMultiReaders_
, withMultiWriter
, withMultiWriterAW
, withMultiWriterWA
, withMultiWriterW
, withMultiWriters
, withMultiWritersAW
, withMultiWritersWA
, withMultiWritersW
, withMultiState
, withMultiStateAS
, withMultiStateSA
, withMultiStateA
, withMultiStateS
, withMultiState_
, withMultiStates
, withMultiStatesAS
, withMultiStatesSA
, withMultiStatesA
, withMultiStatesS
, withMultiStates_
-- * without-functions (reducing an RWST; inverse of with)
, withoutMultiReader
, withoutMultiState
-- * inflate-functions (run simple transformer in MultiRWST)
, inflateReader
, inflateMultiReader
, inflateWriter
, inflateMultiWriter
, inflateState
, inflateMultiState
-- * other functions
, mapMultiRWST
, mGetRawR
, mGetRawW
, mGetRawS
, mPutRawR
, mPutRawW
, mPutRawS
)
where
import Data.HList.HList
import Data.HList.ContainsType
import Control.Monad.Trans.MultiReader.Class ( MonadMultiReader(..) )
import Control.Monad.Trans.MultiWriter.Class ( MonadMultiWriter(..) )
import Control.Monad.Trans.MultiState.Class
import Control.Monad.Trans.MultiReader.Lazy ( MultiReaderT(..)
, runMultiReaderT )
import Control.Monad.Trans.MultiWriter.Lazy ( MultiWriterT(..)
, runMultiWriterT )
import Control.Monad.Trans.MultiState.Lazy ( MultiStateT(..)
, runMultiStateT )
import Control.Monad.State.Lazy ( StateT(..)
, MonadState(..)
, execStateT
, evalStateT
, mapStateT )
import Control.Monad.Reader ( ReaderT(..) )
import Control.Monad.Writer.Lazy ( WriterT(..) )
import Control.Monad.Trans.Class ( MonadTrans
, lift )
import Data.Functor.Identity ( Identity )
import Control.Applicative ( Applicative(..)
, Alternative(..)
)
import Control.Monad ( MonadPlus(..)
, liftM
, ap
, void )
import Control.Monad.Base ( MonadBase(..)
, liftBaseDefault
)
import Control.Monad.Trans.Control ( MonadTransControl(..)
, MonadBaseControl(..)
, ComposeSt
, defaultLiftBaseWith
, defaultRestoreM
)
import Control.Monad.Fix ( MonadFix(..) )
import Control.Monad.IO.Class ( MonadIO(..) )
import Data.Monoid
newtype MultiRWST r w s m a = MultiRWST {
runMultiRWSTRaw :: StateT (HList r, HList w, HList s) m a
}
type MultiRWSTNull = MultiRWST '[] '[] '[]
type MultiRWS r w s = MultiRWST r w s Identity
instance (Functor f) => Functor (MultiRWST r w s f) where
fmap f = MultiRWST . fmap f . runMultiRWSTRaw
instance (Applicative m, Monad m) => Applicative (MultiRWST r w s m) where
pure = MultiRWST . pure
(<*>) = ap
instance (Monad m) => Monad (MultiRWST r w s m) where
return = MultiRWST . return
k >>= f = MultiRWST $ runMultiRWSTRaw k >>= runMultiRWSTRaw . f
instance MonadTrans (MultiRWST r w s) where
lift = MultiRWST . lift
instance
#if MIN_VERSION_base(4,8,0)
{-# OVERLAPPING #-}
#endif
(Monad m, ContainsType a r)
=> MonadMultiReader a (MultiRWST r w s m) where
mAsk = MultiRWST $ liftM (\(r,_,_) -> getHListElem r) get
instance
#if MIN_VERSION_base(4,8,0)
{-# OVERLAPPING #-}
#endif
(Monad m, ContainsType a w, Monoid a)
=> MonadMultiWriter a (MultiRWST r w s m) where
mTell v = MultiRWST $ do
~(r,w,s) <- get
put $ (r, setHListElem (getHListElem w `mappend` v) w, s)
instance
#if MIN_VERSION_base(4,8,0)
{-# OVERLAPPING #-}
#endif
(Monad m, ContainsType a s)
=> MonadMultiGet a (MultiRWST r w s m) where
mGet = MultiRWST $ do
~(_,_,s) <- get
return $ getHListElem s
instance
#if MIN_VERSION_base(4,8,0)
{-# OVERLAPPING #-}
#endif
(Monad m, ContainsType a s)
=> MonadMultiState a (MultiRWST r w s m) where
mSet v = MultiRWST $ do
~(r,w,s) <- get
put (r, w, setHListElem v s)
instance MonadFix m => MonadFix (MultiRWST r w s m) where
mfix f = MultiRWST $ mfix (runMultiRWSTRaw . f)
-- methods
runMultiRWST :: ( Monad m
, Monoid (HList w)
)
=> HList r
-> HList s
-> MultiRWST r w s m a
-> m (a, HList s, HList w)
runMultiRWSTASW :: ( Monad m
, Monoid (HList w)
)
=> HList r
-> HList s
-> MultiRWST r w s m a
-> m (a, HList s, HList w)
runMultiRWSTW :: ( Monad m
, Monoid (HList w)
)
=> HList r
-> HList s
-> MultiRWST r w s m a
-> m (HList w)
runMultiRWSTAW :: ( Monad m
, Monoid (HList w)
)
=> HList r
-> HList s
-> MultiRWST r w s m a
-> m (a, HList w)
runMultiRWSTSW :: ( Monad m
, Monoid (HList w)
)
=> HList r
-> HList s
-> MultiRWST r w s m a
-> m (HList s, HList w)
runMultiRWSTNil :: ( Monad m )
=> MultiRWST '[] '[] '[] m a
-> m a
runMultiRWSTNil_ :: ( Monad m, Functor m )
=> MultiRWST '[] '[] '[] m a
-> m ()
runMultiRWST = runMultiRWSTASW
runMultiRWSTASW r s k = do
~(x, ~(_, w, s')) <- runStateT (runMultiRWSTRaw k) (r, mempty, s)
return $ (x, s', w)
runMultiRWSTW r s k = do
~(_, w, _) <- execStateT (runMultiRWSTRaw k) (r, mempty, s)
return $ w
runMultiRWSTAW r s k = do
~(x, ~(_, w, _)) <- runStateT (runMultiRWSTRaw k) (r, mempty, s)
return $ (x, w)
runMultiRWSTSW r s k = do
~(_, w, s') <- execStateT (runMultiRWSTRaw k) (r, mempty, s)
return $ (s', w)
runMultiRWSTNil k = evalStateT (runMultiRWSTRaw k) (HNil, HNil, HNil)
runMultiRWSTNil_ k = void $ runStateT (runMultiRWSTRaw k) (HNil, HNil, HNil)
withMultiReader :: Monad m => r -> MultiRWST (r ': rs) w s m a -> MultiRWST rs w s m a
withMultiReader_ :: (Functor m, Monad m) => r -> MultiRWST (r ': rs) w s m a -> MultiRWST rs w s m ()
withMultiReaders :: Monad m => HList r1 -> MultiRWST (Append r1 r2) w s m a -> MultiRWST r2 w s m a
withMultiReaders_ :: (Functor m, Monad m) => HList r1 -> MultiRWST (Append r1 r2) w s m a -> MultiRWST r2 w s m ()
withMultiReader x k = MultiRWST $ do
(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (x :+: r, w, s)
put (r, w', s')
return a
withMultiReader_ x k = MultiRWST $ do
(r, w, s) <- get
~(_, w', s') <- lift $ execStateT (runMultiRWSTRaw k) (x :+: r, w, s)
put (r, w', s')
withMultiReaders HNil = id
withMultiReaders (x :+: xs) = withMultiReaders xs . withMultiReader x
withMultiReaders_ HNil = void
withMultiReaders_ (x :+: xs) = withMultiReaders_ xs . withMultiReader x
withMultiWriter :: (Monoid w, Monad m) => MultiRWST r (w ': ws) s m a -> MultiRWST r ws s m (a, w)
withMultiWriterAW :: (Monoid w, Monad m) => MultiRWST r (w ': ws) s m a -> MultiRWST r ws s m (a, w)
withMultiWriterWA :: (Monoid w, Monad m) => MultiRWST r (w ': ws) s m a -> MultiRWST r ws s m (w, a)
withMultiWriterW :: (Monoid w, Monad m) => MultiRWST r (w ': ws) s m a -> MultiRWST r ws s m w
withMultiWriters :: forall r w1 w2 s m a
. (Monoid (HList w1), Monad m, HInit w1)
=> MultiRWST r (Append w1 w2) s m a
-> MultiRWST r w2 s m (a, HList w1)
withMultiWritersAW :: forall r w1 w2 s m a
. (Monoid (HList w1), Monad m, HInit w1)
=> MultiRWST r (Append w1 w2) s m a
-> MultiRWST r w2 s m (a, HList w1)
withMultiWritersWA :: forall r w1 w2 s m a
. (Monoid (HList w1), Monad m, HInit w1)
=> MultiRWST r (Append w1 w2) s m a
-> MultiRWST r w2 s m (HList w1, a)
withMultiWritersW :: forall r w1 w2 s m a
. (Monoid (HList w1), Monad m, HInit w1)
=> MultiRWST r (Append w1 w2) s m a
-> MultiRWST r w2 s m (HList w1)
withMultiWriter = withMultiWriterAW
withMultiWriterAW k = MultiRWST $ do
(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, mempty :+: w, s)
case w' of
x' :+: wr' -> do
put (r, wr', s')
return (a, x')
withMultiWriterWA k = MultiRWST $ do
(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, mempty :+: w, s)
case w' of
x' :+: wr' -> do
put (r, wr', s')
return (x', a)
withMultiWriterW k = MultiRWST $ do
(r, w, s) <- get
~(_, w', s') <- lift $ execStateT (runMultiRWSTRaw k) (r, mempty :+: w, s)
case w' of
x' :+: wr' -> do
put (r, wr', s')
return x'
withMultiWriters = withMultiWritersAW
withMultiWritersAW k = MultiRWST $ do
(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, hAppend (mempty :: HList w1) w, s)
let (o, wr') = hSplit w'
put (r, wr', s')
return (a, o)
withMultiWritersWA k = MultiRWST $ do
(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, hAppend (mempty :: HList w1) w, s)
let (o, wr') = hSplit w'
put (r, wr', s')
return (o, a)
withMultiWritersW k = MultiRWST $ do
(r, w, s) <- get
~(_, w', s') <- lift $ execStateT (runMultiRWSTRaw k) (r, hAppend (mempty :: HList w1) w, s)
let (o, wr') = hSplit w'
put (r, wr', s')
return o
withMultiState :: Monad m => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m (a, s)
withMultiStateAS :: Monad m => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m (a, s)
withMultiStateSA :: Monad m => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m (s, a)
withMultiStateA :: Monad m => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m a
withMultiStateS :: Monad m => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m s
withMultiState_ :: (Functor m, Monad m) => s -> MultiRWST r w (s ': ss) m a -> MultiRWST r w ss m ()
withMultiStates :: Monad m => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m (a, HList s1)
withMultiStatesAS :: Monad m => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m (a, HList s1)
withMultiStatesSA :: Monad m => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m (HList s1, a)
withMultiStatesA :: Monad m => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m a
withMultiStatesS :: Monad m => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m (HList s1)
withMultiStates_ :: (Functor m, Monad m) => HList s1 -> MultiRWST r w (Append s1 s2) m a -> MultiRWST r w s2 m ()
withMultiState = withMultiStateAS
withMultiStateAS x k = MultiRWST $ do
~(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, w, (x :+: s))
case s' of
x' :+: sr' -> do
put (r, w', sr')
return (a, x')
withMultiStateSA x k = MultiRWST $ do
~(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, w, (x :+: s))
case s' of
x' :+: sr' -> do
put (r, w', sr')
return (x', a)
withMultiStateA x k = MultiRWST $ do
~(r, w, s) <- get
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, w, (x :+: s))
case s' of
_ :+: sr' -> do
put (r, w', sr')
return a
withMultiStateS x k = MultiRWST $ do
~(r, w, s) <- get
~(_, w', s') <- lift $ execStateT (runMultiRWSTRaw k) (r, w, (x :+: s))
case s' of
x' :+: sr' -> do
put (r, w', sr')
return x'
withMultiState_ x k = MultiRWST $ do
~(r, w, s) <- get
~(_, w', s') <- lift $ execStateT (runMultiRWSTRaw k) (r, w, (x :+: s))
case s' of _ :+: sr' -> put (r, w', sr')
withMultiStates = withMultiStatesAS
withMultiStatesAS HNil k = do a <- k; return (a, HNil)
withMultiStatesAS (x :+: xs) k = do
~(~(a, x'), xs') <- withMultiStates xs $ withMultiState x k
return (a, x' :+: xs')
withMultiStatesSA HNil k = do a <- k; return (HNil, a)
withMultiStatesSA (x :+: xs) k = do
~(~(a, x'), xs') <- withMultiStates xs $ withMultiState x k
return (x' :+: xs', a)
withMultiStatesA HNil = id
withMultiStatesA (x :+: xs) = withMultiStatesA xs . withMultiStateA x
withMultiStatesS HNil k = k >> return HNil
withMultiStatesS (x :+: xs) k = do
~(x', xs') <- withMultiStates xs $ withMultiStateS x k
return (x' :+: xs')
withMultiStates_ HNil = void
withMultiStates_ (x :+: xs) = withMultiStates_ xs . withMultiState_ x
withoutMultiReader :: Monad m => MultiRWST rs w s m a -> MultiRWST (r ': rs) w s m a
withoutMultiReader k = MultiRWST $ get >>= \case
(rs@(_ :+: rr), w, s) -> do
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (rr, w, s)
put (rs, w', s')
return a
withoutMultiState :: Monad m => MultiRWST r w ss m a -> MultiRWST r w (s ': ss) m a
withoutMultiState k = MultiRWST $ get >>= \case
(r, w, s :+: sr) -> do
~(a, ~(_, w', s')) <- lift $ runStateT (runMultiRWSTRaw k) (r, w, sr)
put (r, w', s :+: s')
return a
inflateReader :: (Monad m, ContainsType r rs)
=> ReaderT r m a
-> MultiRWST rs w s m a
inflateReader k = mAsk >>= lift . runReaderT k
inflateMultiReader :: Monad m => MultiReaderT r m a -> MultiRWST r w s m a
inflateMultiReader k = do
r <- mGetRawR
lift $ runMultiReaderT r k
inflateWriter :: (Monad m, ContainsType w ws, Monoid w)
=> WriterT w m a
-> MultiRWST r ws s m a
inflateWriter k = do
~(x, w) <- lift $ runWriterT k
mTell w
return x
inflateMultiWriter :: (Functor m, Monad m, Monoid (HList w))
=> MultiWriterT w m a
-> MultiRWST r w s m a
inflateMultiWriter k = do
~(x, w) <- lift $ runMultiWriterT k
mPutRawW w
return x
inflateState :: (Monad m, MonadMultiState s (t m), MonadTrans t)
=> StateT s m a
-> t m a
inflateState k = do
s <- mGet
~(x, s') <- lift $ runStateT k s
mSet s'
return x
inflateMultiState :: (Functor m, Monad m)
=> MultiStateT s m a
-> MultiRWST r w s m a
inflateMultiState k = do
s <- mGetRawS
~(x, s') <- lift $ runMultiStateT s k
mPutRawS s'
return x
mGetRawR :: Monad m => MultiRWST r w s m (HList r)
mPutRawR :: Monad m => HList r -> MultiRWST r w s m ()
mGetRawW :: Monad m => MultiRWST r w s m (HList w)
mPutRawW :: Monad m => HList w -> MultiRWST r w s m ()
mGetRawS :: Monad m => MultiRWST r w s m (HList s)
mPutRawS :: Monad m => HList s -> MultiRWST r w s m ()
mGetRawR = (\(r, _, _) -> r) `liftM` MultiRWST get
mPutRawR r = MultiRWST $ do
~(_, w, s) <- get
put (r, w, s)
mGetRawW = (\(_, w, _) -> w) `liftM` MultiRWST get
mPutRawW w = MultiRWST $ do
~(r, _, s) <- get
put (r, w, s)
mGetRawS = (\(_, _, s) -> s) `liftM` MultiRWST get
mPutRawS s = MultiRWST $ do
~(r, w, _) <- get
put (r, w, s)
mapMultiRWST :: (ss ~ (HList r, HList w, HList s))
=> (m (a, ss) -> m' (a', ss))
-> MultiRWST r w s m a
-> MultiRWST r w s m' a'
mapMultiRWST f = MultiRWST . mapStateT f . runMultiRWSTRaw
-- foreign lifting instances
instance MonadIO m => MonadIO (MultiRWST r w s m) where
liftIO = lift . liftIO
instance (Functor m, Applicative m, MonadPlus m) => Alternative (MultiRWST r w s m) where
empty = lift mzero
MultiRWST m <|> MultiRWST n = MultiRWST $ m <|> n
instance MonadPlus m => MonadPlus (MultiRWST r w s m) where
mzero = MultiRWST $ mzero
MultiRWST m `mplus` MultiRWST n = MultiRWST $ m `mplus` n
instance MonadBase b m => MonadBase b (MultiRWST r w s m) where
liftBase = liftBaseDefault
instance MonadTransControl (MultiRWST r w s) where
type StT (MultiRWST r w s) a = (a, (HList r, HList w, HList s))
liftWith f = MultiRWST $ liftWith $ \s -> f $ \r -> s $ runMultiRWSTRaw r
restoreT = MultiRWST . restoreT
instance MonadBaseControl b m => MonadBaseControl b (MultiRWST r w s m) where
type StM (MultiRWST r w s m) a = ComposeSt (MultiRWST r w s) m a
liftBaseWith = defaultLiftBaseWith
restoreM = defaultRestoreM
|
<?php
/**
* This prints all modules and their providers.
* The list can be copied by gdo6 authors to Core/ModuleProviders.php
*/
use GDO\File\Filewalker;
use GDO\Util\Common;
use GDO\Util\Strings;
# Use gdo6 core
include "GDO6.php";
include "protected/config.php";
global $mode;
/** @var $argv string **/
$mode = @$argv[1];
if ($mode)
{
echo "'Captcha' => ['gdo6-captcha', 'gdo6-recaptcha2'],\n";
echo "'Session' => ['gdo6-session-db', 'gdo6-session-cookie'],\n";
}
Filewalker::traverse("GDO", null, false,
function ($entry, $fullpath)
{
if (is_dir('GDO/' . $entry . "/.git"))
{
global $mode;
$c = file_get_contents('GDO/' . $entry . "/.git/config");
$c = Common::regex('#/gizmore/([-_a-z0-9]+)#m', $c);
if (str_starts_with($entry, 'gdo6-'))
{
return;
}
if (!$mode)
{
echo "$entry - < https://github.com/gizmore/$c >\n";
}
else
{
echo "'" . $entry . "' => '$c',\n";
}
}
}, 0);
|
---
title: Slicing volume data
---
We can slice our 3D Fermi map data in order to get a particular plane using the
`plane_slice` function. Say, we need a constant energy cut.
```python showLineNumbers
import arpespythontools as arp
data, energy, theta, phi = arp.load_ses_map('sample_map_data.zip')
# We want iso-energy surface integrated between energy values 15.6 and 15.8 eV
iso_energy_surf = arp.plane_slice(data, energy, 15.6, 15.8)
# Plot image
import matplotlib.pyplot as plt
%matplotlib inline
# Above line is specific to Jupyter Notebook
plt.figure(figsize = (8, 6))
plt.imshow(iso_energy_surf, origin = 'lower', aspect = 'auto', \
extent = (theta[0], theta[-1], phi[0], phi[-1]))
plt.xlabel('$\\phi$ (deg)')
plt.ylabel("$\\theta$ (deg)")
plt.show()
```
This should give you an iso-energy surface like this:

How about if we want the slice along another axis? All we need is transpose the
data, and provide the correct axis order.
```python showLineNumbers
# integrating phi values between (-0.5, 0.5) degrees
phi_slice = arp.plane_slice(data.transpose([2, 0, 1]), phi, -0.5, 0.5)
# Plot image
import matplotlib.pyplot as plt
%matplotlib inline
# Above line is specific to Jupyter Notebook
plt.figure(figsize = (8, 6))
plt.imshow(phi_slice, origin = 'lower', aspect = 'auto', \
extent = (phi[0], phi[-1], energy[0], energy[-1]))
plt.xlabel("$\\theta$ (deg)")
plt.ylabel('$E_{kin}$ (eV)')
plt.show()
```

|
using Acr.UserDialogs;
using Xamarin.Forms;
// ReSharper disable ExplicitCallerInfoArgument
namespace SchinkZeShips.Core.Infrastructure
{
public abstract class ViewModelBase : NotifyPropertyChangedBase
{
/// <summary>
/// Constant for the PushView request
/// </summary>
public const string NavigationPushView = "Naviagion.PushView";
protected static readonly IUserDialogs Dialogs = UserDialogs.Instance;
protected readonly GameLogicService Service = new GameLogicService();
protected static void ShowLoading(string title)
{
Dialogs.ShowLoading(title);
}
protected static void HideLoading()
{
Dialogs.HideLoading();
}
/// <summary>
/// Tells the current view to navigate to the provided page
/// </summary>
/// <typeparam name="TViewModel">The type of the viewmodel</typeparam>
/// <param name="instance">The instance of the viewmodel</param>
/// <param name="page">The page to navigate to</param>
protected static void PushView<TViewModel>(TViewModel instance, ContentPage page) where TViewModel : class
{
Dialogs.HideLoading();
MessagingCenter.Send(instance, NavigationPushView, page);
}
/// <summary>
/// Changes the main page of the application, disabling navigation
/// Wraps the provided page inside a navigation page
/// </summary>
/// <param name="page">The page to display</param>
protected static void PushViewModal(ContentPage page)
{
Device.BeginInvokeOnMainThread(() => { Application.Current.MainPage = new NavigationPage(page); });
}
public virtual void OnAppearing()
{
}
public virtual void OnDisappearing()
{
}
}
} |
import 'package:flutter/material.dart';
import 'package:rnr/style/styles.dart';
import 'gender.dart';
import 'about.dart';
import 'package:rnr/model/config.dart';
class SettingPageWidget extends StatelessWidget {
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('设置'),
),
body: Container(
child: ListView(
children: <Widget>[
ListTile(
leading: Icon(Icons.tag_faces,color: kAppIconColor),
title: Text('性别'),
onTap: (){
_pushToGenderPage(context);
},
),
ListTile(
leading: Icon(Icons.info_outline,color: kAppIconColor),
title: Text('关于'),
onTap: (){
_pushToAboutPage(context);
},
),
],
),
)
);
}
void _pushToGenderPage(BuildContext context) {
GenderPageWidget page = GenderPageWidget(boyCallback: (){
Configs.shared.setBoy(true);
Navigator.pop(context);
},girlCallback: (){
Configs.shared.setBoy(false);
Navigator.pop(context);
});
Navigator.push(context, MaterialPageRoute(builder: (context) => page));
}
void _pushToAboutPage(BuildContext context) {
AboutPageWidget page = AboutPageWidget();
Navigator.push(context, MaterialPageRoute(builder: (context) => page));
}
} |
/*
* Copyright © 2021 Michael Smith <[email protected]>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
* REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
* INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
* LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
* OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
#include <stdbool.h>
#include <errmsg.h>
#include <opt.h>
#include "../include/build.h"
USAGE("[-n] [-C workdir] [command...] [-w]");
int main(int argc, char *argv[]) {
const char *workdir = "."; bool hasworkdir = false;
bool nonblock = false;
bool waitall = false;
FOR_OPTS(argc, argv, {
case 'C': workdir = OPTARG(argc, argv); hasworkdir = true; break;
// doesn't really make any sense to have both these flags set...
case 'n': nonblock = true; waitall = false; break;
case 'w': waitall = true; nonblock = false;
});
if (waitall) {
if (argc) usage();
return build_dep_wait();
}
if (!argc) {
if (!hasworkdir) {
errmsg_warnx("expected a workdir and/or command");
usage();
}
*--argv = "./Buildfile"; // just cram that in...
}
build_dep((const char *const *)argv, workdir);
if (!nonblock) return build_dep_wait();
return 0;
}
// vi: sw=4 ts=4 noet tw=80 cc=80
|
class BurritoController < ApplicationController
# this controller is/should only be accessed by admin
before do
# Check if User exists & is logged in
if !current_user
redirect "/"
end
end
get "/burritos/new" do
# should have proper admin check here
if current_user.username == "sam_the_owner"
# go to new burrito form
erb :"/burritos/new"
else
# got to burrito index
redirect "/logout"
end
end
get "/burritos/index" do
@burritos = Burrito.all
# go to list of burritos
erb :"/burritos/index"
end
get "/burritos/:id" do
# show single order
@burrito = Burrito.find(params[:id].to_i)
erb :'/burritos/show'
end
get "/burritos/:id/edit" do
@burrito = Burrito.find_by_id(params[:id].to_i)
# edit the burrito
erb :"/burritos/edit"
end
post "/burritos/new" do
# receive new burrito form and save burrito to burrito table
@burrito = Burrito.new(params)
@burrito.save
redirect "/burritos/new"
end
patch "/burritos/edit" do
@burrito = Burrito.find_by_id(params[:id].to_i)
@burrito.update({name: params[:name], description: params[:description], price: params[:price]})
@burrito.save
redirect "/burritos/index"
end
delete '/burritos/:id' do
# delete requested order and return to user index
@burrito = Burrito.delete(params[:id])
redirect "/burritos/index"
end
end
|
import { getRelativePathTo, getRoot } from '../src';
function setup(path: string): { path: string; spy: jest.SpyInstance } {
const spy = jest.spyOn(process, 'cwd');
spy.mockReturnValue(path);
return { path, spy };
}
describe('getRoot', () => {
it('returns the root path', () => {
const { path, spy } = setup('/some/path');
expect(getRoot()).toEqual(path);
spy.mockRestore();
});
});
describe('getRelativePathTo', () => {
it('returns the relative path from the package root to the given file', () => {
const { spy } = setup('/some/path/');
const pathA = '/some/path/.esmblyrc.js';
const pathB = '/some/path/dir/.esmblyrc.js';
expect(getRelativePathTo(pathA)).toEqual('.esmblyrc.js');
expect(getRelativePathTo(pathB)).toEqual('dir/.esmblyrc.js');
spy.mockRestore();
});
});
|
package com.wavesplatform.dex.api.http.routes.v0
import akka.http.scaladsl.marshalling.ToResponseMarshallable
import akka.http.scaladsl.server._
import akka.stream.Materializer
import com.wavesplatform.dex.api.http.directives.HttpKamonDirectives._
import com.wavesplatform.dex.api.http.directives.ProtectDirective
import com.wavesplatform.dex.api.http.{HasStatusBarrier, _}
import com.wavesplatform.dex.api.routes.PathMatchers.OrderPM
import com.wavesplatform.dex.api.routes.{ApiRoute, AuthRoute}
import com.wavesplatform.dex.app.MatcherStatus
import com.wavesplatform.dex.db.OrderDb
import com.wavesplatform.dex.domain.transaction.ExchangeTransactionV2
import com.wavesplatform.dex.domain.utils.ScorexLogging
import io.swagger.annotations._
import play.api.libs.json._
import javax.ws.rs.Path
import scala.concurrent.{ExecutionContext, Future}
@Path("/matcher")
@Api()
final class TransactionsRoute(
override val matcherStatus: () => MatcherStatus,
orderDb: OrderDb[Future],
override val apiKeyHashes: List[Array[Byte]]
)(implicit mat: Materializer)
extends ApiRoute
with ProtectDirective
with HasStatusBarrier
with AuthRoute
with ScorexLogging {
implicit private val executionContext: ExecutionContext = mat.executionContext
override lazy val route: Route = pathPrefix("matcher" / "transactions")(getTransactionsByOrderId)
@Path("/transactions/{orderId}#getTransactionsByOrderId")
@ApiOperation(
value = "Get Exchange Transactions by Order",
notes = "Get all exchange transactions created by DEX on execution of the given order",
httpMethod = "GET",
tags = Array("transactions"),
response = classOf[Array[ExchangeTransactionV2]]
)
@ApiImplicitParams(
Array(
new ApiImplicitParam(name = "orderId", value = "Order ID", dataType = "string", paramType = "path")
)
)
def getTransactionsByOrderId: Route =
(path(OrderPM) & get) { orderIdOrError =>
(withMetricsAndTraces("getTransactionsByOrderId") & protect) {
withOrderId(orderIdOrError) { orderId =>
complete {
orderDb.transactionsByOrder(orderId).map(x => ToResponseMarshallable(Json.toJson(x))).recover {
case th =>
log.error("error while retrieving order transactions", th)
ToResponseMarshallable(entities.InternalError)
}
}
}
}
}
}
|
module SearchForLettersSpec (spec) where
import SearchForLetters (search)
import Test.Hspec
spec :: Spec
spec = do
it "example tests" $ do
search "a **& bZ" `shouldBe` "11000000000000000000000001"
search "" `shouldBe` "00000000000000000000000000"
search "\144748" `shouldBe` "00000000000000000000000000"
|
#/usr/bin/env bats
load test_helper
@test "Verify that AppArmor is enabled on the kernel command line" {
run bash -c "grep 'apparmor=1' /proc/cmdline"
[ "$status" -eq 0 ]
}
@test "Verify that AppArmor is enabled" {
run bash -c "apparmor_status | grep 'apparmor module is loaded'"
[ "$status" -eq 0 ]
}
@test "Verify pam_apparmor" {
run bash -c "grep 'session.*pam_apparmor.so order=user,group,default' /etc/pam.d/*"
[ "$status" -eq 0 ]
}
|
require 'qtrix/logging'
require 'qtrix/persistence'
require 'qtrix/queue_store'
require 'qtrix/override_store'
require 'qtrix/queue'
require 'qtrix/override'
require 'qtrix/matrix'
require 'qtrix/host_manager'
require 'qtrix/locking'
##
# Facade into a dynamically adjusting global worker pool that auto
# balances workers according to a desired distribution of resources
# for each queue.
#
# The desired distribution can be modified in real time, and the
# workers throughout our global pool across all servers should morph
# to reflect the new desired distribution. Further details on how
# desired distribution is achieved can be found in the
# lib/qtrix/matrix.rb comments.
#
# Overrides should be able to be specified, so that we can say
# out of all of our workers, N should specifically service this list
# of queues. This is for flood event handling -- a queue gets flooded
# and we need to direct resources to it to help process the jobs faster.
#
# This is the primary entry point to the system, a GUI, CLI or script
# meant to interact with the system should probably work through this
# module
module Qtrix
class Client
include Logging
attr_reader :redis
def initialize(redis)
@redis = redis
end
##
# Returns a list of objects that define the desired distribution
# of workers. Each element will contain the queue name, weight, and
# relative_weight (weight / total weight of all queues).
def desired_distribution
queue_store.all_queues
end
##
# Specifies the queue/weight mapping table.
# This will be used to generate the queue list for workers and thus the
# desired distribution of resources to queues. Args can be:
#
# map: the queue-to-weight mappings as a hash of queue names to
# float values.
def map_queue_weights(map)
with_lock do
matrix_store.clear!
queue_store.map_queue_weights(map)
end
rescue Exception => e
error(e)
raise
end
##
# Add a list of queue names to use as an override for a number
# of worker processes. The number of worker processes will be removed from
# the desired distribution and start working the list of queues in the
# verride. args should be:
#
# queues: Array of queue names.
# processes: Integer specifying the number of workers
# to override queues for.
def add_override(queues, processes)
with_lock do
override_store.add(queues, processes)
matrix_store.clear!
true
end
rescue Exception => e
error(e)
raise
end
##
# Removes an override.
# That number of worker processes will quit servicing the queues in the
# override and be brought back into servicing the desired distribution.
# Args can be:
#
# queues: Array of queues in the override.
# processes: Number of processes to remove from overriding.
def remove_override(queues, processes)
with_lock do
override_store.remove(queues, processes)
matrix_store.clear!
true
end
rescue Exception => e
error(e)
raise
end
def clear_overrides
with_lock do
override_store.clear!
matrix_store.clear!
true
end
rescue Exception => e
error(e)
raise
end
##
# Retrieves all currently defined overrides.
def overrides
override_store.all
end
##
# Retrieves lists of queues as appropriate to the overall system balance
# for the number of workers specified for the given +hostname+.
def fetch_queues(hostname, workers, opts={})
host_manager.ping(hostname)
clear_matrix_if_any_hosts_offline
with_lock timeout: opts.fetch(:timeout, 5), on_timeout: last_result do
debug("fetching #{workers} queue lists for #{hostname}")
overrides_queues = override_store.overrides_for(hostname, workers)
debug("overrides for #{hostname}: #{overrides_queues}")
delta = workers - overrides_queues.size
matrix_queues = delta > 0 ? matrix_store.update_matrix_to_satisfy_request!(hostname, delta) : []
debug("matrix queue lists: #{matrix_queues}")
orchestrated_flag = [:__orchestrated__]
new_result = overrides_queues + matrix_queues.map{|q| q + orchestrated_flag}
info("queue lists changed") if new_result != @last_result
debug("list details: #{new_result}")
@last_result = new_result
end
rescue Exception => e
error(e)
raise
end
##
# Clears redis of all information related to the orchestration system
def clear!
with_lock do
info "clearing data"
override_store.clear_claims!
host_manager.clear!
matrix_store.clear!
end
end
def known_hosts
host_manager.all
end
private
def host_manager
@host_manager ||= HostManager.new(redis)
end
def queue_store
@queue_store ||= QueueStore.new(redis)
end
def locker
@locker ||= Qtrix::Locking.new(redis)
end
def matrix_store
@matrix_store ||= Qtrix::Matrix.new(redis)
end
def override_store
@override_store ||= Qtrix::OverrideStore.new(redis)
end
def with_lock(*args, &block)
locker.with_lock(*args, &block)
end
def last_result
lambda do
if @last_result
@last_result
else
raise "no previous result (unable to obtain lock on first attempt)"
end
end
end
def clear_matrix_if_any_hosts_offline
if host_manager.any_offline?
info "hosts detected offline: #{host_manager.offline.join(', ')}"
clear!
end
end
end
end
|
/**
入驻小区
**/
(function (vc) {
var DEFAULT_PAGE = 1;
var DEFAULT_ROWS = 10;
vc.extends({
data: {
groupBuyManageInfo: {
products: [],
total: 0,
records: 1,
moreCondition: false,
productId: '',
conditions: {
groupProdName: '',
keyword: '',
barCode: '',
}
}
},
_initMethod: function () {
let batchId = vc.getParam('batchId');
if(batchId){
$that.groupBuyManageInfo.conditions.batchId = batchId;
}
vc.component._listProducts(DEFAULT_PAGE, DEFAULT_ROWS);
},
_initEvent: function () {
vc.on('groupBuyManage', 'listProduct', function (_param) {
$that.groupBuyManageInfo.componentShow = 'groupBuyManage';
vc.component._listProducts(DEFAULT_PAGE, DEFAULT_ROWS);
});
vc.on('pagination', 'page_event', function (_currentPage) {
vc.component._listProducts(_currentPage, DEFAULT_ROWS);
});
},
methods: {
_listProducts: function (_page, _rows) {
vc.component.groupBuyManageInfo.conditions.page = _page;
vc.component.groupBuyManageInfo.conditions.row = _rows;
var param = {
params: vc.component.groupBuyManageInfo.conditions
};
//发送get请求
vc.http.apiGet('/groupBuy/queryGroupBuy',
param,
function (json, res) {
var _groupBuyManageInfo = JSON.parse(json);
vc.component.groupBuyManageInfo.total = _groupBuyManageInfo.total;
vc.component.groupBuyManageInfo.records = _groupBuyManageInfo.records;
vc.component.groupBuyManageInfo.products = _groupBuyManageInfo.data;
vc.emit('pagination', 'init', {
total: vc.component.groupBuyManageInfo.records,
currentPage: _page
});
}, function (errInfo, error) {
console.log('请求失败处理');
}
);
},
_queryProductMethod: function () {
vc.component._listProducts(DEFAULT_PAGE, DEFAULT_ROWS);
},
_moreCondition: function () {
if (vc.component.groupBuyManageInfo.moreCondition) {
vc.component.groupBuyManageInfo.moreCondition = false;
} else {
vc.component.groupBuyManageInfo.moreCondition = true;
}
}
}
});
})(window.vc);
|
do $$
begin
if exists (select * from system_settings where key = 'shopwood') then
update system_settings set value = '1' where key = 'wood';
else
insert into system_settings (key, value) values('shopwood', '1');
end if;
if exists (select * from system_settings where key = 'shopbronze') then
update system_settings set value = '2' where key = 'shopbronze';
else
insert into system_settings (key, value) values('shopbronze', '2');
end if;
if exists (select * from system_settings where key = 'shopsilver') then
update system_settings set value = '3' where key = 'shopsilver';
else
insert into system_settings (key, value) values('shopsilver', '3');
end if;
if exists (select * from system_settings where key = 'shopgold') then
update system_settings set value = '4' where key = 'shopgold';
else
insert into system_settings (key, value) values('shopgold', '4');
end if;
if exists (select * from system_settings where key = 'shopplatinum') then
update system_settings set value = '5' where key = 'shopplatinum';
else
insert into system_settings (key, value) values('shopplatinum', '5');
end if;
if exists (select * from system_settings where key = 'shopiridium') then
update system_settings set value = '6' where key = 'shopiridium';
else
insert into system_settings (key, value) values('shopiridium', '6');
end if;
end
$$ |
"use strict";
var parse5 = require('parse5');
var serializer = new parse5.TreeSerializer(require('./documentAdapter'));
exports.domToHtml = function(dom) {
if (dom._toArray) {
// node list
dom = dom._toArray();
}
if (typeof dom.length !== "undefined") {
var ret = "";
for (var i = 0, len = dom.length; i < len; i++) {
ret += dom[i]._tagName === "#document" ?
serializer.serialize(dom[i]) :
serializer.serialize({ childNodes: [dom[i]] });
}
return ret;
} else {
return dom._tagName === "#document" ?
serializer.serialize(dom) :
serializer.serialize({ childNodes: [dom] });
}
};
|
require 'test_helper'
require 'e_courier/services/fetch_emails'
require 'e_courier/models/email'
require 'net/imap'
require 'time'
module ECourier
class FetchEmailsTest < MiniTest::Test
@@service = FetchEmails.new
@@service.execute
def test_fetches_emails
assert_kind_of Email, @@service.emails.first
end
def test_email_has_proper_data
assert_kind_of Time, @@service.emails.first.sent_at
end
end
end
|
package edu.uoc.elc.spring.lti.tool;
import com.fasterxml.jackson.databind.ObjectMapper;
import edu.uoc.lti.namesrole.ContentTypes;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
/**
* @author [email protected]
*/
public class NamesRoleServiceResponseMessageConverter extends MappingJackson2HttpMessageConverter {
public NamesRoleServiceResponseMessageConverter() {
super();
setContentType();
}
public NamesRoleServiceResponseMessageConverter(ObjectMapper objectMapper) {
super(objectMapper);
setContentType();
}
private void setContentType() {
setSupportedMediaTypes(Collections.singletonList(mediaType(ContentTypes.RESPONSE)));
}
private MediaType mediaType(ContentTypes contentTypes) {
return new MediaType(contentTypes.getType(), contentTypes.getSubtype(), StandardCharsets.UTF_8);
}
}
|
namespace MobileDevApp.RemoteProviders.Models
{
public class MessageInput
{
public int ReceiverID { get; set; }
public int? ChatID { get; set; }
public string Text { get; set; }
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.time.ZoneOffset
import java.util.Date
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.commons.lang3.StringUtils
import org.json4s.JsonAST.{JArray, JString}
import org.json4s.jackson.JsonMethods._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, SQLConfHelper, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Cast, ExprId, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.connector.catalog.CatalogManager
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.sql.util.CaseInsensitiveStringMap
/**
* A function defined in the catalog.
*
* @param identifier name of the function
* @param className fully qualified class name, e.g. "org.apache.spark.util.MyFunc"
* @param resources resource types and Uris used by the function
*/
case class CatalogFunction(
identifier: FunctionIdentifier,
className: String,
resources: Seq[FunctionResource])
/**
* Storage format, used to describe how a partition or a table is stored.
*/
case class CatalogStorageFormat(
locationUri: Option[URI],
inputFormat: Option[String],
outputFormat: Option[String],
serde: Option[String],
compressed: Boolean,
properties: Map[String, String]) {
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("Storage(", ", ", ")")
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
locationUri.foreach(l => map.put("Location", l.toString))
serde.foreach(map.put("Serde Library", _))
inputFormat.foreach(map.put("InputFormat", _))
outputFormat.foreach(map.put("OutputFormat", _))
if (compressed) map.put("Compressed", "")
SQLConf.get.redactOptions(properties) match {
case props if props.isEmpty => // No-op
case props =>
map.put("Storage Properties", props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]"))
}
map
}
}
object CatalogStorageFormat {
/** Empty storage format for default values and copies. */
val empty = CatalogStorageFormat(locationUri = None, inputFormat = None,
outputFormat = None, serde = None, compressed = false, properties = Map.empty)
}
/**
* A partition (Hive style) defined in the catalog.
*
* @param spec partition spec values indexed by column name
* @param storage storage format of the partition
* @param parameters some parameters for the partition
* @param createTime creation time of the partition, in milliseconds
* @param lastAccessTime last access time, in milliseconds
* @param stats optional statistics (number of rows, total size, etc.)
*/
case class CatalogTablePartition(
spec: CatalogTypes.TablePartitionSpec,
storage: CatalogStorageFormat,
parameters: Map[String, String] = Map.empty,
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
stats: Option[CatalogStatistics] = None) {
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
map.put("Partition Values", s"[$specString]")
map ++= storage.toLinkedHashMap
if (parameters.nonEmpty) {
map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
}
map.put("Created Time", new Date(createTime).toString)
val lastAccess = {
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
}
map.put("Last Access", lastAccess)
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogPartition(\n\t", "\n\t", ")")
}
/** Readable string representation for the CatalogTablePartition. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\n", "")
}
/** Return the partition location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
throw QueryCompilationErrors.partitionNotSpecifyLocationUriError(specString)
}
/**
* Given the partition schema, returns a row with that schema holding the partition values.
*/
def toRow(partitionSchema: StructType, defaultTimeZondId: String): InternalRow = {
val caseInsensitiveProperties = CaseInsensitiveMap(storage.properties)
val timeZoneId = caseInsensitiveProperties.getOrElse(
DateTimeUtils.TIMEZONE_OPTION, defaultTimeZondId)
InternalRow.fromSeq(partitionSchema.map { field =>
val partValue = if (spec(field.name) == ExternalCatalogUtils.DEFAULT_PARTITION_NAME) {
null
} else {
spec(field.name)
}
Cast(Literal(partValue), field.dataType, Option(timeZoneId)).eval()
})
}
}
/**
* A container for bucketing information.
* Bucketing is a technology for decomposing data sets into more manageable parts, and the number
* of buckets is fixed so it does not fluctuate with data.
*
* @param numBuckets number of buckets.
* @param bucketColumnNames the names of the columns that used to generate the bucket id.
* @param sortColumnNames the names of the columns that used to sort data in each bucket.
*/
case class BucketSpec(
numBuckets: Int,
bucketColumnNames: Seq[String],
sortColumnNames: Seq[String]) extends SQLConfHelper {
if (numBuckets <= 0 || numBuckets > conf.bucketingMaxBuckets) {
throw QueryCompilationErrors.invalidBucketNumberError(
conf.bucketingMaxBuckets, numBuckets)
}
override def toString: String = {
val bucketString = s"bucket columns: [${bucketColumnNames.mkString(", ")}]"
val sortString = if (sortColumnNames.nonEmpty) {
s", sort columns: [${sortColumnNames.mkString(", ")}]"
} else {
""
}
s"$numBuckets buckets, $bucketString$sortString"
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
mutable.LinkedHashMap[String, String](
"Num Buckets" -> numBuckets.toString,
"Bucket Columns" -> bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]"),
"Sort Columns" -> sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
)
}
}
/**
* A table defined in the catalog.
*
* Note that Hive's metastore also tracks skewed columns. We should consider adding that in the
* future once we have a better understanding of how we want to handle skewed columns.
*
* @param provider the name of the data source provider for this table, e.g. parquet, json, etc.
* Can be None if this table is a View, should be "hive" for hive serde tables.
* @param unsupportedFeatures is a list of string descriptions of features that are used by the
* underlying table but not supported by Spark SQL yet.
* @param tracksPartitionsInCatalog whether this table's partition metadata is stored in the
* catalog. If false, it is inferred automatically based on file
* structure.
* @param schemaPreservesCase Whether or not the schema resolved for this table is case-sensitive.
* When using a Hive Metastore, this flag is set to false if a case-
* sensitive schema was unable to be read from the table properties.
* Used to trigger case-sensitive schema inference at query time, when
* configured.
* @param ignoredProperties is a list of table properties that are used by the underlying table
* but ignored by Spark SQL yet.
* @param createVersion records the version of Spark that created this table metadata. The default
* is an empty string. We expect it will be read from the catalog or filled by
* ExternalCatalog.createTable. For temporary views, the value will be empty.
*/
case class CatalogTable(
identifier: TableIdentifier,
tableType: CatalogTableType,
storage: CatalogStorageFormat,
schema: StructType,
provider: Option[String] = None,
partitionColumnNames: Seq[String] = Seq.empty,
bucketSpec: Option[BucketSpec] = None,
owner: String = "",
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
createVersion: String = "",
properties: Map[String, String] = Map.empty,
stats: Option[CatalogStatistics] = None,
viewText: Option[String] = None,
comment: Option[String] = None,
unsupportedFeatures: Seq[String] = Seq.empty,
tracksPartitionsInCatalog: Boolean = false,
schemaPreservesCase: Boolean = true,
ignoredProperties: Map[String, String] = Map.empty,
viewOriginalText: Option[String] = None) {
import CatalogTable._
/**
* schema of this table's partition columns
*/
def partitionSchema: StructType = {
val partitionFields = schema.takeRight(partitionColumnNames.length)
assert(partitionFields.map(_.name) == partitionColumnNames)
StructType(partitionFields)
}
/**
* schema of this table's data columns
*/
def dataSchema: StructType = {
val dataFields = schema.dropRight(partitionColumnNames.length)
StructType(dataFields)
}
/** Return the database this table was specified to belong to, assuming it exists. */
def database: String = identifier.database.getOrElse {
throw QueryCompilationErrors.tableNotSpecifyDatabaseError(identifier)
}
/** Return the table location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
throw QueryCompilationErrors.tableNotSpecifyLocationUriError(identifier)
}
/** Return the fully qualified name of this table, assuming the database was specified. */
def qualifiedName: String = identifier.unquotedString
/**
* Return the current catalog and namespace (concatenated as a Seq[String]) of when the view was
* created.
*/
def viewCatalogAndNamespace: Seq[String] = {
if (properties.contains(VIEW_CATALOG_AND_NAMESPACE)) {
val numParts = properties(VIEW_CATALOG_AND_NAMESPACE).toInt
(0 until numParts).map { index =>
properties.getOrElse(
s"$VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX$index",
throw QueryCompilationErrors.corruptedTableNameContextInCatalogError(numParts, index)
)
}
} else if (properties.contains(VIEW_DEFAULT_DATABASE)) {
// Views created before Spark 3.0 can only access tables in the session catalog.
Seq(CatalogManager.SESSION_CATALOG_NAME, properties(VIEW_DEFAULT_DATABASE))
} else {
Nil
}
}
/**
* Return the SQL configs of when the view was created, the configs are applied when parsing and
* analyzing the view, should be empty if the CatalogTable is not a View or created by older
* versions of Spark(before 3.1.0).
*/
def viewSQLConfigs: Map[String, String] = {
try {
for ((key, value) <- properties if key.startsWith(CatalogTable.VIEW_SQL_CONFIG_PREFIX))
yield (key.substring(CatalogTable.VIEW_SQL_CONFIG_PREFIX.length), value)
} catch {
case e: Exception =>
throw QueryCompilationErrors.corruptedViewSQLConfigsInCatalogError(e)
}
}
/**
* Return the output column names of the query that creates a view, the column names are used to
* resolve a view, should be empty if the CatalogTable is not a View or created by older versions
* of Spark(before 2.2.0).
*/
def viewQueryColumnNames: Seq[String] = {
for {
numCols <- properties.get(VIEW_QUERY_OUTPUT_NUM_COLUMNS).toSeq
index <- 0 until numCols.toInt
} yield properties.getOrElse(
s"$VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX$index",
throw QueryCompilationErrors.corruptedViewQueryOutputColumnsInCatalogError(numCols, index)
)
}
/**
* Return temporary view names the current view was referred. should be empty if the
* CatalogTable is not a Temporary View or created by older versions of Spark(before 3.1.0).
*/
def viewReferredTempViewNames: Seq[Seq[String]] = {
try {
properties.get(VIEW_REFERRED_TEMP_VIEW_NAMES).map { json =>
parse(json).asInstanceOf[JArray].arr.map { namePartsJson =>
namePartsJson.asInstanceOf[JArray].arr.map(_.asInstanceOf[JString].s)
}
}.getOrElse(Seq.empty)
} catch {
case e: Exception =>
throw QueryCompilationErrors.corruptedViewReferredTempViewInCatalogError(e)
}
}
/**
* Return temporary function names the current view was referred. should be empty if the
* CatalogTable is not a Temporary View or created by older versions of Spark(before 3.1.0).
*/
def viewReferredTempFunctionNames: Seq[String] = {
try {
properties.get(VIEW_REFERRED_TEMP_FUNCTION_NAMES).map { json =>
parse(json).asInstanceOf[JArray].arr.map(_.asInstanceOf[JString].s)
}.getOrElse(Seq.empty)
} catch {
case e: Exception =>
throw QueryCompilationErrors.corruptedViewReferredTempFunctionsInCatalogError(e)
}
}
/** Syntactic sugar to update a field in `storage`. */
def withNewStorage(
locationUri: Option[URI] = storage.locationUri,
inputFormat: Option[String] = storage.inputFormat,
outputFormat: Option[String] = storage.outputFormat,
compressed: Boolean = false,
serde: Option[String] = storage.serde,
properties: Map[String, String] = storage.properties): CatalogTable = {
copy(storage = CatalogStorageFormat(
locationUri, inputFormat, outputFormat, serde, compressed, properties))
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val tableProperties = properties.toSeq.sortBy(_._1)
.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
val lastAccess = {
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
}
identifier.database.foreach(map.put("Database", _))
map.put("Table", identifier.table)
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
map.put("Created Time", new Date(createTime).toString)
map.put("Last Access", lastAccess)
map.put("Created By", "Spark " + createVersion)
map.put("Type", tableType.name)
provider.foreach(map.put("Provider", _))
bucketSpec.foreach(map ++= _.toLinkedHashMap)
comment.foreach(map.put("Comment", _))
if (tableType == CatalogTableType.VIEW) {
viewText.foreach(map.put("View Text", _))
viewOriginalText.foreach(map.put("View Original Text", _))
if (viewCatalogAndNamespace.nonEmpty) {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
map.put("View Catalog and Namespace", viewCatalogAndNamespace.quoted)
}
if (viewQueryColumnNames.nonEmpty) {
map.put("View Query Output Columns", viewQueryColumnNames.mkString("[", ", ", "]"))
}
}
if (properties.nonEmpty) map.put("Table Properties", tableProperties)
stats.foreach(s => map.put("Statistics", s.simpleString))
map ++= storage.toLinkedHashMap
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
if (partitionColumnNames.nonEmpty) map.put("Partition Columns", partitionColumns)
if (schema.nonEmpty) map.put("Schema", schema.treeString)
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogTable(\n", "\n", ")")
}
/** Readable string representation for the CatalogTable. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\n", "")
}
}
object CatalogTable {
val VIEW_PREFIX = "view."
// Starting from Spark 3.0, we don't use this property any more. `VIEW_CATALOG_AND_NAMESPACE` is
// used instead.
val VIEW_DEFAULT_DATABASE = VIEW_PREFIX + "default.database"
val VIEW_CATALOG_AND_NAMESPACE = VIEW_PREFIX + "catalogAndNamespace.numParts"
val VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX = VIEW_PREFIX + "catalogAndNamespace.part."
// Convert the current catalog and namespace to properties.
def catalogAndNamespaceToProps(
currentCatalog: String,
currentNamespace: Seq[String]): Map[String, String] = {
val props = new mutable.HashMap[String, String]
val parts = currentCatalog +: currentNamespace
if (parts.nonEmpty) {
props.put(VIEW_CATALOG_AND_NAMESPACE, parts.length.toString)
parts.zipWithIndex.foreach { case (name, index) =>
props.put(s"$VIEW_CATALOG_AND_NAMESPACE_PART_PREFIX$index", name)
}
}
props.toMap
}
val VIEW_SQL_CONFIG_PREFIX = VIEW_PREFIX + "sqlConfig."
val VIEW_QUERY_OUTPUT_PREFIX = VIEW_PREFIX + "query.out."
val VIEW_QUERY_OUTPUT_NUM_COLUMNS = VIEW_QUERY_OUTPUT_PREFIX + "numCols"
val VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX = VIEW_QUERY_OUTPUT_PREFIX + "col."
val VIEW_REFERRED_TEMP_VIEW_NAMES = VIEW_PREFIX + "referredTempViewNames"
val VIEW_REFERRED_TEMP_FUNCTION_NAMES = VIEW_PREFIX + "referredTempFunctionsNames"
def splitLargeTableProp(
key: String,
value: String,
addProp: (String, String) => Unit,
defaultThreshold: Int): Unit = {
val threshold = SQLConf.get.getConf(SQLConf.HIVE_TABLE_PROPERTY_LENGTH_THRESHOLD)
.getOrElse(defaultThreshold)
if (value.length <= threshold) {
addProp(key, value)
} else {
val parts = value.grouped(threshold).toSeq
addProp(s"$key.numParts", parts.length.toString)
parts.zipWithIndex.foreach { case (part, index) =>
addProp(s"$key.part.$index", part)
}
}
}
def readLargeTableProp(props: Map[String, String], key: String): Option[String] = {
props.get(key).orElse {
if (props.filterKeys(_.startsWith(key)).isEmpty) {
None
} else {
val numParts = props.get(s"$key.numParts")
if (numParts.isEmpty) {
throw QueryCompilationErrors.cannotReadCorruptedTablePropertyError(key)
} else {
val parts = (0 until numParts.get.toInt).map { index =>
props.getOrElse(s"$key.part.$index", {
throw QueryCompilationErrors.cannotReadCorruptedTablePropertyError(
key, s"Missing part $index, $numParts parts are expected.")
})
}
Some(parts.mkString)
}
}
}
}
def isLargeTableProp(originalKey: String, propKey: String): Boolean = {
propKey == originalKey || propKey == s"$originalKey.numParts" ||
propKey.startsWith(s"$originalKey.part.")
}
def normalize(table: CatalogTable): CatalogTable = {
val nondeterministicProps = Set(
"CreateTime",
"transient_lastDdlTime",
"grantTime",
"lastUpdateTime",
"last_modified_by",
"last_modified_time",
"Owner:",
// The following are hive specific schema parameters which we do not need to match exactly.
"totalNumberFiles",
"maxFileSize",
"minFileSize"
)
table.copy(
createTime = 0L,
lastAccessTime = 0L,
properties = table.properties.filterKeys(!nondeterministicProps.contains(_)).toMap,
stats = None,
ignoredProperties = Map.empty
)
}
}
/**
* This class of statistics is used in [[CatalogTable]] to interact with metastore.
* We define this new class instead of directly using [[Statistics]] here because there are no
* concepts of attributes in catalog.
*/
case class CatalogStatistics(
sizeInBytes: BigInt,
rowCount: Option[BigInt] = None,
colStats: Map[String, CatalogColumnStat] = Map.empty) {
/**
* Convert [[CatalogStatistics]] to [[Statistics]], and match column stats to attributes based
* on column names.
*/
def toPlanStats(planOutput: Seq[Attribute], planStatsEnabled: Boolean): Statistics = {
if (planStatsEnabled && rowCount.isDefined) {
val attrStats = AttributeMap(planOutput
.flatMap(a => colStats.get(a.name).map(a -> _.toPlanStat(a.name, a.dataType))))
// Estimate size as number of rows * row size.
val size = EstimationUtils.getOutputSize(planOutput, rowCount.get, attrStats)
Statistics(sizeInBytes = size, rowCount = rowCount, attributeStats = attrStats)
} else {
// When plan statistics are disabled or the table doesn't have other statistics,
// we apply the size-only estimation strategy and only propagate sizeInBytes in statistics.
Statistics(sizeInBytes = sizeInBytes)
}
}
/** Readable string representation for the CatalogStatistics. */
def simpleString: String = {
val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else ""
s"$sizeInBytes bytes$rowCountString"
}
}
/**
* This class of statistics for a column is used in [[CatalogTable]] to interact with metastore.
*/
case class CatalogColumnStat(
distinctCount: Option[BigInt] = None,
min: Option[String] = None,
max: Option[String] = None,
nullCount: Option[BigInt] = None,
avgLen: Option[Long] = None,
maxLen: Option[Long] = None,
histogram: Option[Histogram] = None,
version: Int = CatalogColumnStat.VERSION) {
/**
* Returns a map from string to string that can be used to serialize the column stats.
* The key is the name of the column and name of the field (e.g. "colName.distinctCount"),
* and the value is the string representation for the value.
* min/max values are stored as Strings. They can be deserialized using
* [[CatalogColumnStat.fromExternalString]].
*
* As part of the protocol, the returned map always contains a key called "version".
* Any of the fields that are null (None) won't appear in the map.
*/
def toMap(colName: String): Map[String, String] = {
val map = new scala.collection.mutable.HashMap[String, String]
map.put(s"${colName}.${CatalogColumnStat.KEY_VERSION}", CatalogColumnStat.VERSION.toString)
distinctCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_DISTINCT_COUNT}", v.toString)
}
nullCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_NULL_COUNT}", v.toString)
}
avgLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_AVG_LEN}", v.toString) }
maxLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_LEN}", v.toString) }
min.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MIN_VALUE}", v) }
max.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_VALUE}", v) }
histogram.foreach { h =>
CatalogTable.splitLargeTableProp(
s"$colName.${CatalogColumnStat.KEY_HISTOGRAM}",
HistogramSerializer.serialize(h),
map.put,
4000)
}
map.toMap
}
/** Convert [[CatalogColumnStat]] to [[ColumnStat]]. */
def toPlanStat(
colName: String,
dataType: DataType): ColumnStat =
ColumnStat(
distinctCount = distinctCount,
min = min.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
max = max.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
nullCount = nullCount,
avgLen = avgLen,
maxLen = maxLen,
histogram = histogram,
version = version)
}
object CatalogColumnStat extends Logging {
// List of string keys used to serialize CatalogColumnStat
val KEY_VERSION = "version"
private val KEY_DISTINCT_COUNT = "distinctCount"
private val KEY_MIN_VALUE = "min"
private val KEY_MAX_VALUE = "max"
private val KEY_NULL_COUNT = "nullCount"
private val KEY_AVG_LEN = "avgLen"
private val KEY_MAX_LEN = "maxLen"
private val KEY_HISTOGRAM = "histogram"
val VERSION = 2
private def getTimestampFormatter(isParsing: Boolean): TimestampFormatter = {
TimestampFormatter(
format = "yyyy-MM-dd HH:mm:ss.SSSSSS",
zoneId = ZoneOffset.UTC,
isParsing = isParsing)
}
/**
* Converts from string representation of data type to the corresponding Catalyst data type.
*/
def fromExternalString(s: String, name: String, dataType: DataType, version: Int): Any = {
dataType match {
case BooleanType => s.toBoolean
case DateType if version == 1 => DateTimeUtils.fromJavaDate(java.sql.Date.valueOf(s))
case DateType => DateFormatter(ZoneOffset.UTC).parse(s)
case TimestampType if version == 1 =>
DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf(s))
case TimestampType => getTimestampFormatter(isParsing = true).parse(s)
case ByteType => s.toByte
case ShortType => s.toShort
case IntegerType => s.toInt
case LongType => s.toLong
case FloatType => s.toFloat
case DoubleType => s.toDouble
case _: DecimalType => Decimal(s)
// This version of Spark does not use min/max for binary/string types so we ignore it.
case BinaryType | StringType => null
case _ =>
throw QueryCompilationErrors.columnStatisticsDeserializationNotSupportedError(
name, dataType)
}
}
/**
* Converts the given value from Catalyst data type to string representation of external
* data type.
*/
def toExternalString(v: Any, colName: String, dataType: DataType): String = {
val externalValue = dataType match {
case DateType => DateFormatter(ZoneOffset.UTC).format(v.asInstanceOf[Int])
case TimestampType => getTimestampFormatter(isParsing = false).format(v.asInstanceOf[Long])
case BooleanType | _: IntegralType | FloatType | DoubleType => v
case _: DecimalType => v.asInstanceOf[Decimal].toJavaBigDecimal
// This version of Spark does not use min/max for binary/string types so we ignore it.
case _ =>
throw QueryCompilationErrors.columnStatisticsSerializationNotSupportedError(
colName, dataType)
}
externalValue.toString
}
/**
* Creates a [[CatalogColumnStat]] object from the given map.
* This is used to deserialize column stats from some external storage.
* The serialization side is defined in [[CatalogColumnStat.toMap]].
*/
def fromMap(
table: String,
colName: String,
map: Map[String, String]): Option[CatalogColumnStat] = {
try {
Some(CatalogColumnStat(
distinctCount = map.get(s"${colName}.${KEY_DISTINCT_COUNT}").map(v => BigInt(v.toLong)),
min = map.get(s"${colName}.${KEY_MIN_VALUE}"),
max = map.get(s"${colName}.${KEY_MAX_VALUE}"),
nullCount = map.get(s"${colName}.${KEY_NULL_COUNT}").map(v => BigInt(v.toLong)),
avgLen = map.get(s"${colName}.${KEY_AVG_LEN}").map(_.toLong),
maxLen = map.get(s"${colName}.${KEY_MAX_LEN}").map(_.toLong),
histogram = CatalogTable.readLargeTableProp(map, s"$colName.$KEY_HISTOGRAM")
.map(HistogramSerializer.deserialize),
version = map(s"${colName}.${KEY_VERSION}").toInt
))
} catch {
case NonFatal(e) =>
logWarning(s"Failed to parse column statistics for column ${colName} in table $table", e)
None
}
}
}
case class CatalogTableType private(name: String)
object CatalogTableType {
val EXTERNAL = new CatalogTableType("EXTERNAL")
val MANAGED = new CatalogTableType("MANAGED")
val VIEW = new CatalogTableType("VIEW")
val tableTypes = Seq(EXTERNAL, MANAGED, VIEW)
}
/**
* A database defined in the catalog.
*/
case class CatalogDatabase(
name: String,
description: String,
locationUri: URI,
properties: Map[String, String])
object CatalogTypes {
/**
* Specifications of a table partition. Mapping column name to column value.
*/
type TablePartitionSpec = Map[String, String]
/**
* Initialize an empty spec.
*/
lazy val emptyTablePartitionSpec: TablePartitionSpec = Map.empty[String, String]
}
/**
* A placeholder for a table relation, which will be replaced by concrete relation like
* `LogicalRelation` or `HiveTableRelation`, during analysis.
*/
case class UnresolvedCatalogRelation(
tableMeta: CatalogTable,
options: CaseInsensitiveStringMap = CaseInsensitiveStringMap.empty(),
override val isStreaming: Boolean = false) extends LeafNode {
assert(tableMeta.identifier.database.isDefined)
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* A wrapper to store the temporary view info, will be kept in `SessionCatalog`
* and will be transformed to `View` during analysis
*/
case class TemporaryViewRelation(tableMeta: CatalogTable) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* A `LogicalPlan` that represents a hive table.
*
* TODO: remove this after we completely make hive as a data source.
*/
case class HiveTableRelation(
tableMeta: CatalogTable,
dataCols: Seq[AttributeReference],
partitionCols: Seq[AttributeReference],
tableStats: Option[Statistics] = None,
@transient prunedPartitions: Option[Seq[CatalogTablePartition]] = None)
extends LeafNode with MultiInstanceRelation {
assert(tableMeta.identifier.database.isDefined)
assert(tableMeta.partitionSchema.sameType(partitionCols.toStructType))
assert(tableMeta.dataSchema.sameType(dataCols.toStructType))
// The partition column should always appear after data columns.
override def output: Seq[AttributeReference] = dataCols ++ partitionCols
def isPartitioned: Boolean = partitionCols.nonEmpty
override def doCanonicalize(): HiveTableRelation = copy(
tableMeta = CatalogTable.normalize(tableMeta),
dataCols = dataCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index))
},
partitionCols = partitionCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index + dataCols.length))
},
tableStats = None
)
override def computeStats(): Statistics = {
tableMeta.stats.map(_.toPlanStats(output, conf.cboEnabled || conf.planStatsEnabled))
.orElse(tableStats)
.getOrElse {
throw QueryExecutionErrors.tableStatsNotSpecifiedError
}
}
override def newInstance(): HiveTableRelation = copy(
dataCols = dataCols.map(_.newInstance()),
partitionCols = partitionCols.map(_.newInstance()))
override def simpleString(maxFields: Int): String = {
val catalogTable = tableMeta.storage.serde match {
case Some(serde) => tableMeta.identifier :: serde :: Nil
case _ => tableMeta.identifier :: Nil
}
var metadata = Map(
"CatalogTable" -> catalogTable.mkString(", "),
"Data Cols" -> truncatedString(dataCols, "[", ", ", "]", maxFields),
"Partition Cols" -> truncatedString(partitionCols, "[", ", ", "]", maxFields)
)
if (prunedPartitions.nonEmpty) {
metadata += ("Pruned Partitions" -> {
val parts = prunedPartitions.get.map { part =>
val spec = part.spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
if (part.storage.serde.nonEmpty && part.storage.serde != tableMeta.storage.serde) {
s"($spec, ${part.storage.serde.get})"
} else {
s"($spec)"
}
}
truncatedString(parts, "[", ", ", "]", maxFields)
})
}
val metadataEntries = metadata.toSeq.map {
case (key, value) if key == "CatalogTable" => value
case (key, value) =>
key + ": " + StringUtils.abbreviate(value, SQLConf.get.maxMetadataStringLength)
}
val metadataStr = truncatedString(metadataEntries, "[", ", ", "]", maxFields)
s"$nodeName $metadataStr"
}
}
|
C
C $Id: pj23dp.f,v 1.5 2008-07-27 00:17:12 haley Exp $
C
C Copyright (C) 2000
C University Corporation for Atmospheric Research
C All Rights Reserved
C
C The use of this Software is governed by a License Agreement.
C
SUBROUTINE PJ23DP (COORD,CRDIO,INDIC)
C
C -- M O D I F I E D S T E R E O G R A P H I C - A L A S K A
C
IMPLICIT DOUBLE PRECISION (A-Z)
INTEGER N,J,NN
INTEGER INDIC
DIMENSION GEOG(2),PROJ(2),COORD(2),CRDIO(2),
. ACOEF(6),BCOEF(6)
C **** PARAMETERS **** A,E,ES,LON0,LAT0,X0,Y0,SINPH0,COSPH0 ************
COMMON /ERRMZ0/ IERR
INTEGER IERR
SAVE /ERRMZ0/
COMMON /PRINZ0/ IPEMSG,IPELUN,IPPARM,IPPLUN
INTEGER IPEMSG,IPELUN,IPPARM,IPPLUN
SAVE /PRINZ0/
COMMON /PC23DP/ A,LON0,X0,Y0,ACOEF,BCOEF,EC,LAT0,CCHIO,SCHIO,N
DATA HALFPI /1.5707963267948966D0/
DATA EPSLN /1.0D-10/
DATA ZERO,ONE,TWO /0.0D0,1.0D0,2.0D0/
C
C -- F O R W A R D . . .
C
IF (INDIC .EQ. 0) THEN
C
GEOG(1) = COORD(1)
GEOG(2) = COORD(2)
IERR = 0
IF (MDDADP(GEOG).GT.30.D0) THEN
IF (IPEMSG.EQ.0) WRITE (IPELUN,2020)
2020 FORMAT (/' ERROR PJ23DP'/
. ' POINT IS TOO FAR FROM CENTER OF PROJECTION')
IERR = 233
RETURN
END IF
LON = ADJLDP (GEOG(1) - LON0)
C
C CALCULATE X-PRIME AND Y-PRIME FOR OBLIQUE STEREOGRAPHIC PROJ.
C FROM LAT/LONG.
C
SINLON = SIN (LON)
COSLON = COS (LON)
ESPHI = EC *SIN(GEOG(2))
CHI=TWO*ATAN(TAN((HALFPI+GEOG(2))/TWO)*((ONE-ESPHI)/(ONE
. +ESPHI))**(EC/TWO)) - HALFPI
SCHI=SIN(CHI)
CCHI=COS(CHI)
G=SCHIO*SCHI+CCHIO*CCHI*COSLON
S=TWO/(ONE+G)
XP=S*CCHI*SINLON
YP=S*(CCHIO*SCHI-SCHIO*CCHI*COSLON)
C
C USE KNUTH ALGORITHM FOR SUMMING COMPLEX TERMS, TO CONVERT
C OBLIQUE STEREOGRAPHIC TO MODIFIED-STEREOGRAPHIC COORD.
C
R=XP+XP
S=XP*XP+YP*YP
AR=ACOEF(N)
AI=BCOEF(N)
BR=ACOEF(N-1)
BI=BCOEF(N-1)
DO 140 J=2,N
ARN=BR+R*AR
AIN=BI+R*AI
IF (J.EQ.N) GO TO 140
BR=ACOEF(N-J)-S*AR
BI=BCOEF(N-J)-S*AI
AR=ARN
AI=AIN
140 CONTINUE
BR=-S*AR
BI=-S*AI
AR=ARN
AI=AIN
X=XP*AR-YP*AI+BR
Y=YP*AR+XP*AI+BI
PROJ(1)=X*A+X0
PROJ(2)=Y*A+Y0
CRDIO(1) = PROJ(1)
CRDIO(2) = PROJ(2)
RETURN
END IF
C
C -- I N V E R S E . . .
C
IF (INDIC .EQ. 1) THEN
C
PROJ(1) = COORD(1)
PROJ(2) = COORD(2)
IERR = 0
X = (PROJ(1) - X0)/A
Y = (PROJ(2) - Y0)/A
XP=X
YP=Y
NN=0
C
C USE KNUTH ALGORITHM FOR SUMMING COMPLEX TERMS, TO CONVERT
C MODIFIED-STEREOGRAPHIC CONFORMAL TO OBLIQUE STEREOGRAPHIC
C COORDINATES (XP,YP).
C
225 R=XP+XP
S=XP*XP+YP*YP
AR=ACOEF(N)
AI=BCOEF(N)
BR=ACOEF(N-1)
BI=BCOEF(N-1)
CR=N*AR
CI=N*AI
DR=(N-1)*BR
DI=(N-1)*BI
DO 230 J=2,N
ARN=BR+R*AR
AIN=BI+R*AI
IF (J.EQ.N) GO TO 230
BR=ACOEF(N-J)-S*AR
BI=BCOEF(N-J)-S*AI
AR=ARN
AI=AIN
CRN=DR+R*CR
CIN=DI+R*CI
DR=(N-J)*ACOEF(N-J)-S*CR
DI=(N-J)*BCOEF(N-J)-S*CI
CR=CRN
CI=CIN
230 CONTINUE
BR=-S*AR
BI=-S*AI
AR=ARN
AI=AIN
FXYR=XP*AR-YP*AI+BR-X
FXYI=YP*AR+XP*AI+BI-Y
FPXYR=XP*CR-YP*CI+DR
FPXYI=YP*CR+XP*CI+DI
DEN=FPXYR*FPXYR+FPXYI*FPXYI
DXP=-(FXYR*FPXYR+FXYI*FPXYI)/DEN
DYP=-(FXYI*FPXYR-FXYR*FPXYI)/DEN
XP=XP+DXP
YP=YP+DYP
DS=ABS(DXP)+ABS(DYP)
NN=NN+1
IF (NN.LE.20) GO TO 237
IF (IPEMSG .EQ. 0) WRITE (IPELUN,235)
235 FORMAT (/' ERROR PJ23DP'/
. ' TOO MANY ITERATIONS IN ITERATING INVERSE')
IERR = 235
GO TO 238
237 IF (DS.GT.EPSLN) GO TO 225
C
C CONVERT OBLIQUE STEREOGRAPHIC COORDINATES TO LAT/LONG.
C
238 RH = SQRT (XP * XP + YP * YP)
Z = TWO * ATAN (RH / TWO)
SINZ = SIN (Z)
COSZ = COS (Z)
GEOG(1) = LON0
IF (ABS(RH) .GT. EPSLN) GO TO 240
GEOG(2) = LAT0
CRDIO(1) = GEOG(1)
CRDIO(2) = GEOG(2)
RETURN
240 CHI = ASINDP (COSZ * SCHIO + YP *SINZ * CCHIO / RH)
NN=0
PHI=CHI
250 ESPHI=EC*SIN(PHI)
DPHI=TWO*ATAN(TAN((HALFPI+CHI)/TWO)*((ONE+ESPHI)/(ONE-ESPHI))
. **(EC/TWO)) - HALFPI - PHI
PHI = PHI + DPHI
NN = NN + 1
IF (NN.LE.20) GO TO 257
IF (IPEMSG .EQ. 0) WRITE (IPELUN,255)
255 FORMAT (/' ERROR PJ23DP'/
. ' TOO MANY ITERATIONS IN CALCULATING PHI FROM CHI')
IERR = 236
GO TO 260
257 IF (ABS(DPHI).GT.EPSLN) GO TO 250
260 GEOG(2)=PHI
GEOG(1) = ADJLDP (LON0 + ATAN2(XP*SINZ, RH*CCHIO*COSZ-YP*SCHIO
. *SINZ))
CRDIO(1) = GEOG(1)
CRDIO(2) = GEOG(2)
RETURN
END IF
C
END
|
require 'rails_helper'
RSpec.describe User, type: :model do
context 'ActiveRecord associations' do
it 'has many articles' do
expect(User.reflect_on_association(:articles).macro).to be(:has_many)
end
it 'does not has just one article' do
expect(User.reflect_on_association(:articles).macro).not_to be(:has_one)
end
it 'has many votes' do
expect(User.reflect_on_association(:votes).macro).to be(:has_many)
end
it 'does not has_one vote' do
expect(User.reflect_on_association(:votes).macro).not_to be(:has_one)
end
end
end
|
import '../../styles/project/experiment-detail.scss'
import {
Q_FUNC_TYPE_OPTIONS,
SCALER_OPTIONS
} from '../../constants'
import React, { useContext, useState } from 'react'
import { Button } from '../forms'
import { ConfirmationDialog } from '../ConfirmationDialog'
import { DownloadPolicyDialog } from './DownloadPolicyDialog'
import { GlobalContext } from '../../context'
import { Progress } from 'react-sweet-progress'
const ProgressCircle = (props) => {
const { isActive, progress } = props
let progressStatus = 'success'
let progressColor = '#2ecc71'
if (isActive) {
progressStatus = 'active'
progressColor = '#3498db'
} else if (progress !== 1.0) {
progressStatus = 'error'
progressColor = '#e74c3c'
}
const percentage = Math.round(100.0 * (isActive ? progress : 1.0))
return (
<Progress
type='circle'
percent={percentage}
strokeWidth='10'
width='35'
status={progressStatus}
theme={{
error: {
color: progressColor
},
success: {
color: progressColor
},
active: {
symbol: `${percentage.toString()}%`,
color: progressColor
}
}}
/>
)
}
export const ExperimentDetail = (props) => {
const { cancelExperiment, deleteExperiment } = useContext(GlobalContext)
const [isDownloadDialogOpen, setIsDownloadDialogOpen] = useState(false)
const [isDeleting, setIsDeleting] = useState(false)
const { experiment } = props
const { metrics, isActive } = experiment
const totalEpoch = experiment.config.n_epochs
const currentEpoch = metrics.td_error ? metrics.td_error.length : 0
const progress = currentEpoch / totalEpoch
let status = 'success'
if (isActive) {
status = 'running'
} else if (progress !== 1.0) {
status = 'failed'
}
return (
<div className='experiment-detail'>
<div className='top-line'>
<ProgressCircle
isActive={isActive}
progress={progress}
/>
<span className='experiment-name'>
{experiment.name}
</span>
<span className={status}>
{status}
</span>
</div>
<div className='middle-line'>
<table>
<tr>
<th>EPOCH</th>
<td>{currentEpoch}/{totalEpoch}</td>
</tr>
<tr>
<th>Q FUNCTION</th>
<td>{Q_FUNC_TYPE_OPTIONS[experiment.config.q_func_factory]}</td>
</tr>
<tr>
<th>SCALER</th>
<td>{SCALER_OPTIONS[experiment.config.scaler]}</td>
</tr>
</table>
</div>
<div className='bottom-line'>
<Button
text='DOWNLOAD'
onClick={() => setIsDownloadDialogOpen(true)}
/>
{isActive &&
<Button
text='CANCEL'
onClick={() => cancelExperiment(experiment)}
/>}
{!isActive &&
<Button
text='DELETE'
onClick={() => setIsDeleting(true)}
/>}
</div>
<DownloadPolicyDialog
isOpen={isDownloadDialogOpen}
totalEpoch={currentEpoch}
experiment={experiment}
onClose={() => setIsDownloadDialogOpen(false)}
/>
<ConfirmationDialog
title={`Deleting ${experiment.name}.`}
message='Are you sure to delete this experiment?'
isOpen={isDeleting}
onClose={() => setIsDeleting(false)}
onConfirm={() => deleteExperiment(experiment)}
confirmText='DELETE'
cancelText='CANCEL'
/>
</div>
)
}
|
package mutex
import (
"testing"
"time"
"github.com/go-redis/redis/v8"
"github.com/go-redsync/redsync/v4"
"github.com/go-redsync/redsync/v4/redis/goredis/v8"
"github.com/stretchr/testify/require"
)
func TestRedisMutex(t *testing.T) {
r := NewRedis(redsync.New(goredis.NewPool(redis.NewClient(&redis.Options{}))))
mu := r.NewMutex("lock:foo")
err := mu.Lock(100 * time.Second)
require.Nil(t, err)
time.Sleep(10 * time.Second)
err = mu.Unlock()
require.Nil(t, err)
}
|
/* @flow */
import test from 'tape';
import { times } from '../../src/utils';
test('utils/index', (t) => {
t.test('times', (q) => {
const result = times(5, (i) => `${i}t`);
q.deepEqual(result, ['1t', '2t', '3t', '4t', '5t']);
q.end();
});
t.end();
});
|
@model DancingGoat.Models.CoffeesFilterViewModel
<h4>@Resources.DancingGoat.Coffees_CoffeeProcessing</h4>
@for (var i = 0; i < Model.AvailableProcessings.Count; ++i)
{
<span class="checkbox js-postback">
@Html.HiddenFor(m => m.AvailableProcessings[i].Value)
@Html.CheckBoxFor(m => m.AvailableProcessings[i].Selected)
@Html.LabelFor(m => m.AvailableProcessings[i].Selected, Model.AvailableProcessings[i].Text)
</span>
}
<h4>@Resources.DancingGoat.Brewers_PublicStatus</h4>
@for (var i = 0; i < Model.AvailableProductStatuses.Count; ++i)
{
<span class="checkbox js-postback">
@Html.HiddenFor(m => m.AvailableProductStatuses[i].Value)
@Html.CheckBoxFor(m => m.AvailableProductStatuses[i].Selected)
@Html.LabelFor(m => m.AvailableProductStatuses[i].Selected, Model.AvailableProductStatuses[i].Text)
</span>
} |
package com.d3.commons.notary
import com.d3.commons.model.IrohaCredential
import com.d3.commons.sidechain.SideChainEvent
import com.d3.commons.sidechain.iroha.consumer.IrohaConsumerImpl
import com.d3.commons.sidechain.iroha.consumer.IrohaConverter
import com.d3.commons.util.createPrettySingleThreadPool
import com.github.kittinunf.result.Result
import io.reactivex.Observable
import io.reactivex.schedulers.Schedulers
import jp.co.soramitsu.iroha.java.IrohaAPI
import mu.KLogging
import java.math.BigInteger
/**
* Implementation of [Notary] business logic
*/
class NotaryImpl(
private val notaryCredential: IrohaCredential,
val irohaAPI: IrohaAPI,
private val primaryChainEvents: Observable<SideChainEvent.PrimaryBlockChainEvent>
) : Notary {
/** Notary account in Iroha */
private val creator = notaryCredential.accountId
private val notaryIrohaConsumer = IrohaConsumerImpl(notaryCredential, irohaAPI)
/**
* Handles primary chain deposit event. Notaries create the ordered bunch of
* transactions: {tx1: setAccountDetail, tx2: addAssetQuantity, transferAsset}.
* SetAccountDetail insert into notary account information about the transaction (hash) for rollback.
*/
private fun onPrimaryChainDeposit(
hash: String,
time: BigInteger,
account: String,
asset: String,
amount: String,
from: String
): IrohaOrderedBatch {
logger.info { "Transfer $asset event: hash($hash) time($time) user($account) asset($asset) value ($amount)" }
val quorum = notaryIrohaConsumer.getConsumerQuorum().get()
return IrohaOrderedBatch(
arrayListOf(
IrohaTransaction(
creator,
time,
quorum,
arrayListOf(
// insert into Iroha account information for rollback
IrohaCommand.CommandSetAccountDetail(
creator,
"last_tx",
hash
)
)
),
IrohaTransaction(
creator,
time,
quorum,
arrayListOf(
IrohaCommand.CommandAddAssetQuantity(
asset,
amount
),
IrohaCommand.CommandTransferAsset(
creator,
account,
asset,
from,
amount
)
)
)
)
)
}
/**
* Handle primary chain event
*/
override fun onPrimaryChainEvent(chainInputEvent: SideChainEvent.PrimaryBlockChainEvent): IrohaOrderedBatch {
logger.info { "Notary performs primary chain event $chainInputEvent" }
return when (chainInputEvent) {
is SideChainEvent.PrimaryBlockChainEvent.OnPrimaryChainDeposit -> onPrimaryChainDeposit(
chainInputEvent.hash,
chainInputEvent.time,
chainInputEvent.user,
chainInputEvent.asset,
chainInputEvent.amount,
chainInputEvent.from
)
}
}
/**
* Relay side chain [SideChainEvent] to Iroha output
*/
override fun irohaOutput(): Observable<IrohaOrderedBatch> {
return primaryChainEvents.map { event ->
onPrimaryChainEvent(event)
}
}
/**
* Init Iroha consumer
*/
override fun initIrohaConsumer(): Result<Unit, Exception> {
logger.info { "Init Iroha consumer" }
return Result.of {
// Init Iroha Consumer pipeline
irohaOutput()
// convert from Notary model to Iroha model
.subscribeOn(Schedulers.from(createPrettySingleThreadPool("notary", "iroha-consumer")))
.subscribe(
// send to Iroha network layer
{ batch ->
val lst = IrohaConverter.convert(batch, notaryCredential.keyPair)
notaryIrohaConsumer.send(lst)
.fold(
{ logger.info { "Send to Iroha success" } },
{ ex -> logger.error("Send failure", ex) }
)
},
// on error
{ ex -> logger.error("OnError called", ex) },
// should be never called
{ logger.error { "OnComplete called" } }
)
Unit
}
}
/**
* Logger
*/
companion object : KLogging()
}
|
App: Wedding
---
Make sure to split up Wedding & Giftery clearly.
=== General ===
UserAuth via User (old table)
=== Wedding ===
WeddingInformation:
- id: PrimaryKey
- userId: ForeginKey
- markdownInfo: String
- date: String
Timeline:
- id: PrimaryKey
- wedding: ForeignKey
- time: String (datestamp?)
- markdownText: String
=== Giftery ===
GifteryList:
- id: PrimaryKey
- userId: ForeignKey
- title: String
- description: String (MD)
Gift:
- id: PrimaryKey
- title: String
- description: String
- checkable: Boolean
- checked: Boolean
GiftCheck:
- giftId: ForeignKey
- userId: ForeignKey
- id: PrimaryKey |
2020年12月08日01时数据
Status: 200
1.郑爽回应直播失控
微博热度:846920
2.成都确诊病例家中冰箱和门把手阳性
微博热度:533550
3.李现 我就是喜欢看小姐姐
微博热度:387751
4.邻居称坠楼女婴已是二次坠楼
微博热度:384302
5.潘成然被公司解约
微博热度:309180
6.毛戈平又出裸妆大法
微博热度:291956
7.印度已有超300人感染不明原因怪病
微博热度:243837
8.成都再新增1例确诊病例
微博热度:237403
9.华春莹感谢世界各国祝贺嫦五奔月
微博热度:221005
10.梅婷7岁女儿登杂志封面
微博热度:220244
11.德云社文案太会了
微博热度:179944
12.官方回应石家庄女婴坠楼事件
微博热度:175770
13.坠楼女婴父亲回应拒绝治疗
微博热度:174453
14.纪录片中国
微博热度:174379
15.焉栩嘉妈妈把他微信推给了小盒鱼
微博热度:173553
16.坐女童身上玩手机幼师被拘留
微博热度:172602
17.肖战一键变装
微博热度:172137
18.iOS14.2或导致电池续航变短
微博热度:171603
19.东莞公厕停用人脸识别供纸机
微博热度:170824
20.年过的有点突然
微博热度:170206
21.陈卓璇连名牌也站得高
微博热度:169585
22.美香
微博热度:169254
23.爸爸送给女儿的出嫁礼物
微博热度:142971
24.反向凡尔赛语录模仿大赏
微博热度:127678
25.汪涵狂野波波头
微博热度:123482
26.央视曝儿童平衡车安全风险
微博热度:119567
27.染发球员过多被判负学校回应
微博热度:110860
28.肖四
微博热度:109917
29.外交部回应美对中方统战部官员签证限制
微博热度:109709
30.北京疫情影响仨月无收入可领救助金
微博热度:109571
31.澳大利亚山火烧了七周
微博热度:109424
32.进击的巨人
微博热度:104920
33.网友初次约会点2万多火锅
微博热度:104052
34.外交部回应美将制裁12名中国官员
微博热度:99911
35.郭德纲于谦20周年专场
微博热度:97042
36.顶楼
微博热度:96426
37.丁飞俊
微博热度:95461
38.欧洲最大时装网站CEO为妻辞职
微博热度:94562
39.小风暴
微博热度:93501
40.大秦赋
微博热度:88046
41.裂开的高级打法
微博热度:87539
42.了不起的儿科医生
微博热度:83061
43.李斯小嘴叭叭的
微博热度:75446
44.女婴坠楼受伤父亲拒绝治疗
微博热度:73617
45.成都郫都区
微博热度:68751
46.杨坤称满脸通红是因晒伤
微博热度:67281
47.追光吧哥哥 艺人禁忌点
微博热度:65199
48.酸甜茄汁咕噜肉
微博热度:61729
49.河南警方揭穿免费美容骗局
微博热度:61161
50.今夕何夕
微博热度:61092
|
# This mixin provides shared behavior for experiments. Includers must implement
# `enabled?` and `publish(result)`.
#
# Override Scientist::Experiment.new to set your own class which includes and
# implements Scientist::Experiment's interface.
module Scientist::Experiment
# Whether to raise when the control and candidate mismatch.
# If this is nil, raise_on_mismatches class attribute is used instead.
attr_accessor :raise_on_mismatches
def self.included(base)
set_default(base) if base.instance_of?(Class)
base.extend RaiseOnMismatch
end
# Set this class as default scientist experiment when included.
def self.set_as_default_scientist_experiment(set_default_class)
set_default(Scientist::Default) unless set_default_class
end
# Instantiate a new experiment (using the class given to the .set_default method).
def self.new(name)
(@experiment_klass || Scientist::Default).new(name)
end
# Configure Scientist to use the given class for all future experiments
# (must implement the Scientist::Experiment interface).
#
# Called automatically when new experiments are defined.
def self.set_default(klass)
@experiment_klass = klass
end
# A mismatch, raised when raise_on_mismatches is enabled.
class MismatchError < Exception
attr_reader :name, :result
def initialize(name, result)
@name = name
@result = result
super "experiment '#{name}' observations mismatched"
end
# The default formatting is nearly unreadable, so make it useful.
#
# The assumption here is that errors raised in a test environment are
# printed out as strings, rather than using #inspect.
def to_s
super + ":\n" +
format_observation(result.control) + "\n" +
result.candidates.map { |candidate| format_observation(candidate) }.join("\n") +
"\n"
end
def format_observation(observation)
observation.name + ":\n" +
if observation.raised?
lines = observation.exception.backtrace.map { |line| " #{line}" }.join("\n")
" #{observation.exception.inspect}" + "\n" + lines
else
" #{observation.cleaned_value.inspect}"
end
end
end
module RaiseOnMismatch
# Set this flag to raise on experiment mismatches.
#
# This causes all science mismatches to raise a MismatchError. This is
# intended for test environments and should not be enabled in a production
# environment.
#
# bool - true/false - whether to raise when the control and candidate mismatch.
def raise_on_mismatches=(bool)
@raise_on_mismatches = bool
end
# Whether or not to raise a mismatch error when a mismatch occurs.
def raise_on_mismatches?
@raise_on_mismatches
end
end
# Define a block of code to run before an experiment begins, if the experiment
# is enabled.
#
# The block takes no arguments.
#
# Returns the configured block.
def before_run(&block)
@_scientist_before_run = block
end
# A Hash of behavior blocks, keyed by String name. Register behavior blocks
# with the `try` and `use` methods.
def behaviors
@_scientist_behaviors ||= {}
end
# A block to clean an observed value for publishing or storing.
#
# The block takes one argument, the observed value which will be cleaned.
#
# Returns the configured block.
def clean(&block)
@_scientist_cleaner = block
end
# Accessor for the clean block, if one is available.
#
# Returns the configured block, or nil.
def cleaner
@_scientist_cleaner
end
# Internal: Clean a value with the configured clean block, or return the value
# if no clean block is configured.
#
# Rescues and reports exceptions in the clean block if they occur.
def clean_value(value)
if @_scientist_cleaner
@_scientist_cleaner.call value
else
value
end
rescue StandardError => ex
raised :clean, ex
value
end
# A block which compares two experimental values.
#
# The block must take two arguments, the control value and a candidate value,
# and return true or false.
#
# Returns the block.
def compare(*args, &block)
@_scientist_comparator = block
end
# A block which compares two experimental errors.
#
# The block must take two arguments, the control Error and a candidate Error,
# and return true or false.
#
# Returns the block.
def compare_errors(*args, &block)
@_scientist_error_comparator = block
end
# A Symbol-keyed Hash of extra experiment data.
def context(context = nil)
@_scientist_context ||= {}
@_scientist_context.merge!(context) unless context.nil?
@_scientist_context
end
# Configure this experiment to ignore an observation with the given block.
#
# The block takes two arguments, the control observation and the candidate
# observation which didn't match the control. If the block returns true, the
# mismatch is disregarded.
#
# This can be called more than once with different blocks to use.
def ignore(&block)
@_scientist_ignores ||= []
@_scientist_ignores << block
end
# Internal: ignore a mismatched observation?
#
# Iterates through the configured ignore blocks and calls each of them with
# the given control and mismatched candidate observations.
#
# Returns true or false.
def ignore_mismatched_observation?(control, candidate)
return false unless @_scientist_ignores
@_scientist_ignores.any? do |ignore|
begin
ignore.call control.value, candidate.value
rescue StandardError => ex
raised :ignore, ex
false
end
end
end
# The String name of this experiment. Default is "experiment". See
# Scientist::Default for an example of how to override this default.
def name
"experiment"
end
# Internal: compare two observations, using the configured compare and compare_errors lambdas if present.
def observations_are_equivalent?(a, b)
a.equivalent_to? b, @_scientist_comparator, @_scientist_error_comparator
rescue StandardError => ex
raised :compare, ex
false
end
def raise_with(exception)
@_scientist_custom_mismatch_error = exception
end
# Called when an exception is raised while running an internal operation,
# like :publish. Override this method to track these exceptions. The
# default implementation re-raises the exception.
def raised(operation, error)
raise error
end
# Internal: Run all the behaviors for this experiment, observing each and
# publishing the results. Return the result of the named behavior, default
# "control".
def run(name = nil)
behaviors.freeze
context.freeze
name = (name || "control").to_s
block = behaviors[name]
if block.nil?
raise Scientist::BehaviorMissing.new(self, name)
end
unless should_experiment_run?
return block.call
end
if @_scientist_before_run
@_scientist_before_run.call
end
result = generate_result(name)
begin
publish(result)
rescue StandardError => ex
raised :publish, ex
end
if raise_on_mismatches? && result.mismatched?
if @_scientist_custom_mismatch_error
raise @_scientist_custom_mismatch_error.new(self.name, result)
else
raise MismatchError.new(self.name, result)
end
end
control = result.control
raise control.exception if control.raised?
control.value
end
# Define a block that determines whether or not the experiment should run.
def run_if(&block)
@_scientist_run_if_block = block
end
# Internal: does a run_if block allow the experiment to run?
#
# Rescues and reports exceptions in a run_if block if they occur.
def run_if_block_allows?
(@_scientist_run_if_block ? @_scientist_run_if_block.call : true)
rescue StandardError => ex
raised :run_if, ex
return false
end
# Internal: determine whether or not an experiment should run.
#
# Rescues and reports exceptions in the enabled method if they occur.
def should_experiment_run?
behaviors.size > 1 && enabled? && run_if_block_allows?
rescue StandardError => ex
raised :enabled, ex
return false
end
# Register a named behavior for this experiment, default "candidate".
def try(name = nil, &block)
name = (name || "candidate").to_s
if behaviors.include?(name)
raise Scientist::BehaviorNotUnique.new(self, name)
end
behaviors[name] = block
end
# Register the control behavior for this experiment.
def use(&block)
try "control", &block
end
# Whether or not to raise a mismatch error when a mismatch occurs.
def raise_on_mismatches?
if raise_on_mismatches.nil?
self.class.raise_on_mismatches?
else
!!raise_on_mismatches
end
end
# Provide predefined durations to use instead of actual timing data.
# This is here solely as a convenience for developers of libraries that extend Scientist.
def fabricate_durations_for_testing_purposes(fabricated_durations = {})
@_scientist_fabricated_durations = fabricated_durations
end
# Internal: Generate the observations and create the result from those and the control.
def generate_result(name)
observations = []
behaviors.keys.shuffle.each do |key|
block = behaviors[key]
fabricated_duration = @_scientist_fabricated_durations && @_scientist_fabricated_durations[key]
observations << Scientist::Observation.new(key, self, fabricated_duration: fabricated_duration, &block)
end
control = observations.detect { |o| o.name == name }
Scientist::Result.new(self, observations, control)
end
private
# In order to support marshaling, we have to make the procs marshalable. Some
# CI providers attempt to marshal Scientist mismatch errors so that they can
# be sent out to different places (logs, etc.) The mismatch errors contain
# code from the experiment. This code contains procs. These procs prevent the
# error from being marshaled. To fix this, we simple exclude the procs from
# the data that we marshal.
def marshal_dump
[@name, @result, @raise_on_mismatches]
end
def marshal_load
@name, @result, @raise_on_mismatches = array
end
end
|
<?php
namespace Symfony\Cmf\Bundle\FileEditorBundle\Controller;
use Puli\Repository\Api\ResourceRepository;
use Symfony\Component\Templating\EngineInterface;
use Symfony\Cmf\Bundle\ResourceBundle\Registry\ContainerRepositoryRegistry;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Cmf\Bundle\ColumnBrowserBundle\Column\ColumnBuilder;
use Symfony\Cmf\Component\Resource\RepositoryRegistryInterface;
use Puli\Repository\Api\EditableRepository;
use Puli\Repository\Api\Resource\BodyResource;
use Symfony\Component\HttpFoundation\RedirectResponse;
class EditorController
{
private $templating;
private $registry;
public function __construct(
RepositoryRegistryInterface $registry,
EngineInterface $templating
)
{
$this->templating = $templating;
$this->registry = $registry;
}
public function editorAction(Request $request)
{
$repositoryName = $request->get('repository', null);
$repository = $this->registry->get($repositoryName);
$path = $request->query->get('path', null);
$template = $request->get('template', 'CmfFileEditorBundle::index.html.twig');
$resource = null;
if ($repository->contains($path)) {
$resource = $repository->get($path);
}
if (!$resource) {
throw new \InvalidArgumentException(sprintf(
'Resource at "%s" does not exist',
$path
));
}
if (!$resource instanceof BodyResource) {
throw new \InvalidArgumentException(sprintf(
'Resource "%s" is not an instance of BodyResource',
get_class($resource)
));
}
$editable = $repository instanceof EditableRepository;
if ($editable && $request->getMethod() === 'POST') {
file_put_contents($resource->getFilesystemPath(), $request->request->get('body'));
return new RedirectResponse('#');
}
return $this->templating->renderResponse(
$template,
[
'editable' => $editable,
'resource' => $resource,
],
new Response()
);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.