text
stringlengths 27
775k
|
---|
import pinchSimulator from './utils/Gesture/pinchSimulator';
import sleep from './utils/sleep';
import AnyTouch from '../src/main'
const el = document.createElement('div');
const at = new AnyTouch(el);
test('pinch缩放计算是否正确?', (done) => {
let index = 0;
let expectScales = [2, 4, 6, 3, 1, 0.5, 0.2];
at.on('pinch', ({ type, scale }) => {
// scale===1 不触发pinchin/out
if (1 === scale) {
index++;
}
expect(type).toBe('pinch');
});
// 放大
at.on('pinchout', ({ scale }) => {
expect(scale).toBe(expectScales[index]);
index++;
});
// 缩小
at.on('pinchin', ({ scale }) => {
expect(scale).toBe(expectScales[index]);
index++;
});
at.on('pinchstart', ({ scale }) => {
expect(scale).toBe(expectScales[0]);
});
at.on('pinchmove', ({ scale }) => {
expect(scale).not.toBe(expectScales[0]);
expect(scale).not.toBeUndefined();
});
at.on('pinchend', ({ scale }) => {
expect(scale).toBe(expectScales[expectScales.length-1]);
});
// 模拟缩放
pinchSimulator(el, { scales: expectScales });
done();
}); |
package net.selenate.common.comms.req;
import java.net.URL;
import net.selenate.common.exceptions.SeInvalidArgumentException;
import net.selenate.common.exceptions.SeNullArgumentException;
public final class SeReqSessionDownload implements SeCommsReq {
private static final long serialVersionUID = 45749879L;
private final String url;
public SeReqSessionDownload(final String url) {
this.url = url;
validate();
}
public String getUrl() {
return url;
}
public SeReqSessionDownload withUrl(final String newUrl) {
return new SeReqSessionDownload(newUrl);
}
private void validate() {
if (url == null) {
throw new SeNullArgumentException("Url");
}
try {
new URL(url);
} catch (final Exception e) {
throw new SeInvalidArgumentException("An error occured while interpreting url as java.net.URL!", e);
}
}
@Override
public String toString() {
return String.format("SeReqSessionDownload(%s)", url);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((url == null) ? 0 : url.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SeReqSessionDownload other = (SeReqSessionDownload) obj;
if (url == null) {
if (other.url != null)
return false;
} else if (!url.equals(other.url))
return false;
return true;
}
}
|
from utils import (
create_dir,
open_source,
create_source,
create_sources,
open_sources
)
from contest import (
fetch_sources,
fetch_all_tests
)
from problem import (
fetch_tests,
check_problem
)
import argparse
import json
JUDGES = ['codeforces', 'cf']
def main():
with open('config.json') as json_data_file:
data = json.load(json_data_file)
parser = argparse.ArgumentParser(
description='Automatic testcase checker for competitive programming.')
parser.add_argument('-j', '--judge',
choices=JUDGES, required=True)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-fp', '--fetch-problem', dest='contest_id',
metavar='problem',
help='fetch a problem and create source files')
group.add_argument('-fc', '--fetch-contest', dest='contest_id_full',
metavar='contest',
help='fetch a contest and create source files')
group.add_argument('-rt', '--run-testcases', dest='contest_id_tc',
metavar='problem',
help='run fetched testcases')
args = parser.parse_args()
judge = args.judge
if judge == 'cf':
judge = 'codeforces'
editor = data['editor']
if args.contest_id is not None:
contest_id = args.contest_id[0:-1]
name = args.contest_id[-1]
create_dir(judge, contest_id)
create_source(judge, contest_id, name, '.cpp',
data['default_code']['path'])
open_source(judge, contest_id, name, '.cpp', editor)
fetch_tests(judge, data['url']['problem_prefix'][judge],
contest_id, name)
elif args.contest_id_full is not None:
contest_id = args.contest_id_full
create_dir(judge, contest_id)
sources = fetch_sources(judge, data['url']['contest_prefix'][judge])
create_sources(judge, contest_id, sources, '.cpp',
data['default_code']['path'])
open_sources(judge, contest_id, sources, '.cpp')
fetch_all_tests(judge, data['url']['contest_prefix'][judge],
contest_id, sources)
elif args.contest_id_tc is not None:
contest_id = args.contest_id_tc[0:-1]
name = args.contest_id_tc[-1]
check_problem(judge, contest_id, name,
data['compiler']['name'], data['compiler']['flags'])
if __name__ == '__main__':
main()
|
<?php
namespace EddieJaoude\Zf2Logger\Tests\Zf2LoggerTest\Log;
use EddieJaoude\Zf2Logger\Log\Logger;
/**
* Class LoggerTest
* @package EddieJaoude\Zf2Logger\Tests\Zf2LoggerTest\Log
*/
class LoggerTest extends \PHPUnit_Framework_TestCase
{
/**
* @var \EddieJaoude\Zf2Logger\Log\Logger
*/
private $logger;
public function setUp()
{
$writer = new \Zend\Log\Writer\Mock;
$this->logger = new Logger();
$this->logger->addWriter($writer);
$authenticationService = \Mockery::mock('Zend\Authentication\AuthenticationService');
$this->logger->setAuthenticationService(
$authenticationService
);
}
public function testSetGetAuthenticationService()
{
$authenticationService = \Mockery::mock('Zend\Authentication\AuthenticationService');
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->setAuthenticationService($authenticationService)
);
$this->assertEquals(
$authenticationService,
$this->logger->getAuthenticationService()
);
}
public function testSetGetRequest()
{
$request = \Mockery::mock('Zend\Http\PhpEnvironment\Request');
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->setRequest($request)
);
$this->assertEquals(
$request,
$this->logger->getRequest()
);
}
public function testLogAndDefaultExtra()
{
$message = 'test message';
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->log(Logger::EMERG, $message)
);
$this->assertEquals(0, $this->logger->getWriters()->current()->events[0]['priority']);
$this->assertEquals('EMERG', $this->logger->getWriters()->current()->events[0]['priorityName']);
$this->assertEquals($message, $this->logger->getWriters()->current()->events[0]['message']);
$this->assertEquals(
array(
'Zf2Logger' => array(
'sessionId' => '',
'host' => 'CLI',
'ip' => 'unavailable'
)
),
$this->logger->getWriters()->current()->events[0]['extra']
);
}
public function testInitialiseExtraAddition()
{
$message = 'test message';
$extra = 'extra additional information for the logger';
$initialiseExtra = 'initialise extra additional information for the logger';
$this->logger->setCustomExtra(array($initialiseExtra));
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->log(Logger::DEBUG, $message, array($extra))
);
$this->assertEquals(7, $this->logger->getWriters()->current()->events[0]['priority']);
$this->assertEquals('DEBUG', $this->logger->getWriters()->current()->events[0]['priorityName']);
$this->assertEquals($message, $this->logger->getWriters()->current()->events[0]['message']);
$this->assertEquals(
array(
'Zf2Logger' => array(
'sessionId' => '',
'host' => 'CLI',
'ip' => 'unavailable'
),
$extra,
$initialiseExtra
),
$this->logger->getWriters()->current()->events[0]['extra']
);
}
public function testExtraInfo()
{
$message = 'test message';
$extra = 'extra additional information for the logger';
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->log(Logger::DEBUG, $message, array($extra))
);
$this->assertEquals(7, $this->logger->getWriters()->current()->events[0]['priority']);
$this->assertEquals('DEBUG', $this->logger->getWriters()->current()->events[0]['priorityName']);
$this->assertEquals($message, $this->logger->getWriters()->current()->events[0]['message']);
$this->assertEquals(
array(
'Zf2Logger' => array(
'sessionId' => '',
'host' => 'CLI',
'ip' => 'unavailable'
),
$extra
),
$this->logger->getWriters()->current()->events[0]['extra']
);
}
public function testExtraAdditionalInfo()
{
$message = 'test message';
$extra = 'extra additional information for the logger';
$this->logger->addCustomExtra(
array(
'defaultInfo1' => 'additional info, sould not overwrite anything previous'
)
);
$this->assertInstanceOf(
'EddieJaoude\Zf2Logger\Log\Logger',
$this->logger->log(Logger::DEBUG, $message, array($extra))
);
$this->assertEquals(7, $this->logger->getWriters()->current()->events[0]['priority']);
$this->assertEquals('DEBUG', $this->logger->getWriters()->current()->events[0]['priorityName']);
$this->assertEquals($message, $this->logger->getWriters()->current()->events[0]['message']);
$this->assertEquals(
array(
'Zf2Logger' => array(
'sessionId' => '',
'host' => 'CLI',
'ip' => 'unavailable'
),
$extra,
array(
'defaultInfo1' => 'additional info, sould not overwrite anything previous'
)
),
$this->logger->getWriters()->current()->events[0]['extra']
);
}
}
|
# PSW-ISA
# How to run
## Linux
cd scripts
./runDB.sh
./setupDB.sh
./runBackend.sh ../backend
./runFrontend.sh ../frontend
## Windwos
cd scripts/windows
./runDB.ps1
Login to your postgresql database (port 6666, localhost, user postgres and password is admin)
create server with port 6666, localhost;
create database "ISA";
grant ALL on database "ISA" to "postgres";
In Powershell then type:
./runBackend.ps1 -BackendPath ../../backend
./runFrontend.ps1 -FrontendPath ../../frontend
./runBackendTest.ps1 -BackendPath ../../backend
|
/*
* State takes an initialState and a reducer function to maintain a centralized
* location for data.
*/
class State {
#state
#reducer
constructor(initialState, reducer) {
this.#state = initialState
this.#reducer = reducer
}
set(state) {
this.#state = state
}
get() {
return this.#state
}
dispatch(action) {
this.set(this.#reducer(this.#state, action))
return this
}
}
module.exports = { State }
|
#30 * * * * ntpdate time.nist.gov
#59 * * * * sleep 50; /usr/local/openresty/nginx/sbin/split.sh
_prefix="/usr/local/openresty/nginx"
time=`date +%Y%m%d%H`
mv ${_prefix}/logs/ma.log ${_prefix}/logs/ma/ma-${time}.log
kill -USR1 `cat ${_prefix}/logs/nginx.pid`
|
import React from "react"
import {
SectionWrapper,
Div,
FooterSection,
SocialMedia,
RawMedia,
FooterJoinLink,
CopyRight,
} from "../design/Styles"
import rotaractVideo from "../../src/images/rotaractVideo.mp4"
import facebook from "../images/facebook.png"
import instagram from "../images/instagram.png"
import linkedin from "../images/linkedin.png"
import twitter from "../images/twitter.png"
export const Footer = () => {
return (
<SectionWrapper display="flex" flexDirection="column">
<FooterSection
display="flex"
flexWrap="wrap"
justifyContent="space-between"
>
<video loop={true} autoPlay="autoplay" controls muted width="300rem">
<source src={rotaractVideo} type="video/mp4" />
</video>
<SocialMedia display="flex" flexDirection="column">
<h4>Find us on</h4>
<Div display="flex" flexWrap="wrap" justifyContent="space-between">
<a href="https://www.facebook.com/RACHYD3150">
<img src={facebook} alt="facebook"></img>
</a>
<a href="https://www.instagram.com/rotaract.hyd/">
<img src={instagram} alt="instagram"></img>
</a>
<a href="https://www.linkedin.com/company/rotaractclubofhyderabad">
<img src={linkedin} alt="linkedin"></img>
</a>
<a href="/">
<img src={twitter} alt="twitter"></img>
</a>
</Div>
</SocialMedia>
<RawMedia>
<p>[email protected]</p>
<FooterJoinLink to="/JoinUs">Join Us!</FooterJoinLink>
</RawMedia>
</FooterSection>
<CopyRight
display="flex"
flexWrap="wrap"
bgcolor={props => props.theme.colors.lightColor}
>
© {new Date().getFullYear()} Rotaract Club of Hyderabad, built with ❤️
by{" "}
<a href="https://www.linkedin.com/in/abhinav-reddy-6397b9156/">
{" "}
Abhinav Reddy
</a>
{` `}
</CopyRight>
</SectionWrapper>
)
}
|
#!/bin/bash
# The easiest method to demo is to have individual `az storage cors add` commands
# as shown here. If you wish to read in JSON and then execute az commands, you may
# be better off using a different tool than bash since it's complex
# to transform array elements into command arguments like below.
az storage cors add --services b \
--origins "http://localhost:5000" \
--methods "GET" \
--allowed-headers "x-api-key" \
--exposed-headers "x-ms-request-id" "x-ms-lease-status" \
--max-age 10
az storage cors add --services b \
--origins "http://localhost:5001" \
--methods "GET" "HEAD" \
--allowed-headers "content-type" \
--exposed-headers "x-*" \
--max-age 5
az storage cors add --services b \
--origins "http://localhost:5000" "http://localhost:5001" \
--methods "POST" \
--allowed-headers "x-api-key" "x-app-*" \
--exposed-headers "*" \
--max-age 5
az storage cors add --services b \
--origins "*" \
--methods "GET" \
--allowed-headers "*" \
--exposed-headers "*" \
--max-age 30 |
# frozen_string_literal: true
require 'rubygems/command'
require 'rubygems/query_utils'
##
# Searches for gems starting with the supplied argument.
class Gem::Commands::ListCommand < Gem::Command
include Gem::QueryUtils
def initialize
super 'list', 'Display local gems whose name matches REGEXP',
:name => //, :domain => :local, :details => false, :versions => true,
:installed => nil, :version => Gem::Requirement.default
add_query_options
end
def arguments # :nodoc:
"REGEXP regexp to look for in gem name"
end
def defaults_str # :nodoc:
"--local --no-details"
end
def description # :nodoc:
<<-EOF
The list command is used to view the gems you have installed locally.
The --details option displays additional details including the summary, the
homepage, the author, the locations of different versions of the gem.
To search for remote gems use the search command.
EOF
end
def usage # :nodoc:
"#{program_name} [REGEXP ...]"
end
end
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Pirate.Ldap
{
/// <summary>
/// Type of membership used by Person objects.
/// </summary>
public enum EmployeeType
{
/// <summary>
/// Person is not a member. Used for forum accounts.
/// </summary>
LandLubber = 0,
/// <summary>
/// Person is a member but has not payed membership fee.
/// </summary>
Sympathizer = 1,
/// <summary>
/// Person is a pirate and has payed membership fee.
/// </summary>
Pirate = 2,
/// <summary>
/// Person was once a member.
/// </summary>
Veteran = 3,
/// <summary>
/// Person was debarred and is no member any more.
/// </summary>
WalkedThePlank = 8,
/// <summary>
/// Arteficial person member.
/// </summary>
Fleet = 9
}
}
|
#pragma once
#include <memory>
#include <string>
#include <libadb/cfg/secrets.hpp>
namespace adb::api
{
class Context : public std::enable_shared_from_this<Context>
{
public:
Context(const std::string &baseUrl);
const std::string getBaseUrl() const;
const adb::cfg::SecretsData &getSecrets() const;
void overrideSecrets(const adb::cfg::SecretsData &data);
private:
const std::string baseUrl_;
adb::cfg::SecretsData secrets_;
};
} |
<?php
namespace App\Http\Controllers\Atek;
use App\Http\Controllers\Controller;
use App\Http\Controllers\Metro\ApiController;
use Carbon\Carbon;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Illuminate\Support\Facades\Validator;
class TicketController extends Controller
{
function genTicket(Request $request)
{
$validator = Validator::make($request->all(), [
'sale_or_no' => 'required'
]);
if ($validator->fails()) {
return response([
'status' => false,
'message' => 'Failed to authenticate !',
'error' => $validator->errors()
]);
}
$order = DB::table('sale_order')
->where('sale_or_no', '=', $request->input('sale_or_no'))
->first();
if (is_null($order)) {
return response([
'status' => false,
'message' => 'Failed to get Order',
'error' => 'Order does not exist !'
]);
}
$productId = $order->product_id;
if ($productId == env('PRODUCT_SJT')) return $this->genSJTTicket($order);
else if ($productId == env('PRODUCT_RJT')) return $this->genRJTTicket($order);
else if ($productId == env('PRODUCT_SV')) return $this->genSVTicket($order);
else if ($productId == env('PRODUCT_TP')) return $this->genTPTicket($order);
return response([
'status' => false,
'message' => 'unknown error!',
'error' => 'Please contact admin'
]);
}
// SJT
function genSJTTicket($order)
{
$api = new ApiController();
$response = $api->genSjtRjtTicket($order, "");
if ($response == null) {
return response([
'status' => false,
'message' => 'Failed to connect with mmopl',
'error' => 'Please check your internet connection !'
]);
}
$Response = json_decode($response, false);
if ($Response->status == "BSE") {
return response([
'status' => false,
'message' => 'Failed to generate ticket',
'error' => $Response->error
]);
}
DB::table('sjt_ms_booking')->insert([
'txn_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
'mm_ms_acc_id' => $Response->data->transactionId,
'sale_or_no' => $order->sale_or_no,
'ms_qr_no' => $Response->data->masterTxnId,
'ms_qr_exp' => Carbon::createFromTimestamp($Response->data->masterExpiry)->toDateTimeString(),
'op_type_id' => $Response->data->operatorId,
'src_stn_id' => $order->src_stn_id,
'des_stn_id' => $order->des_stn_id,
'unit' => $order->unit,
'unit_price' => ($order->sale_amt) / $order->unit,
'total_price' => $order->sale_amt,
'media_type_id' => $order->media_type_id,
'product_id' => $order->product_id,
'pass_id' => $order->pass_id,
'travel_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
]);
foreach ($Response->data->trips as $trip) {
DB::table('sjt_sl_booking')->insert([
'txn_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
'mm_sl_acc_id' => $trip->transactionId,
'mm_ms_acc_id' => $Response->data->transactionId,
'sl_qr_no' => $trip->qrCodeId,
'sl_qr_exp' => Carbon::createFromTimestamp($trip->expiryTime)->toDateTimeString(),
'qr_dir' => $Response->data->qrType,
'qr_data' => $trip->qrCodeData
]);
}
return response([
'status' => true,
'message' => 'Ticket generated successfully',
'data' => $Response->data->masterTxnId
]);
}
// RJT
function genRJTTicket($order)
{
$api = new ApiController();
$response = $api->genSjtRjtTicket($order, "");
if ($response == null) {
return response([
'status' => false,
'message' => 'Failed to connect with mmopl',
'error' => 'Please check your internet connection !'
]);
}
$Response = json_decode($response, false);
if ($Response->status == "BSE") {
return response([
'status' => false,
'message' => 'Failed to generate ticket',
'error' => $Response->error
]);
}
DB::table('rjt_ms_booking')->insert([
'txn_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
'mm_ms_acc_id' => $Response->data->transactionId,
'sale_or_no' => $order->sale_or_no,
'ms_qr_no' => $Response->data->masterTxnId,
'ms_qr_exp' => Carbon::createFromTimestamp($Response->data->masterExpiry)->toDateTimeString(),
'op_type_id' => $Response->data->operatorId,
'src_stn_id' => $order->src_stn_id,
'des_stn_id' => $order->des_stn_id,
'unit' => $order->unit,
'unit_price' => ($order->sale_amt) / $order->unit,
'total_price' => $order->sale_amt,
'media_type_id' => $order->media_type_id,
'product_id' => $order->product_id,
'pass_id' => $order->pass_id,
'travel_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
]);
foreach ($Response->data->trips as $trip) {
DB::table('rjt_sl_booking')->insert([
'txn_date' => Carbon::createFromTimestamp($Response->data->travelDate)->toDateTimeString(),
'mm_sl_acc_id' => $trip->transactionId,
'mm_ms_acc_id' => $Response->data->transactionId,
'sl_qr_no' => $trip->qrCodeId,
'sl_qr_exp' => Carbon::createFromTimestamp($trip->expiryTime)->toDateTimeString(),
'qr_dir' => $Response->data->qrType,
'qr_data' => $trip->qrCodeData
]);
}
return response([
'status' => true,
'message' => 'Ticket generated successfully',
'data' => $Response->data->masterTxnId
]);
}
}
|
import csv
if __name__ == "__main__":
"""
Parse the csv resulting from a call to kenken.gather
and print the algorithms sorted by various criteria in markdown format
"""
path = input("filepath: ")
with open(path, "r") as csvfile:
data = list(csvfile)
metrics = {}
for values in data[1:]:
values = values.replace('\n', '').replace(' ', '').split(',')
element = (values[0], int(values[1]))
metrics[element] = tuple([values[2]] + list(map(float, values[3:])))
priorities = {
"constraint check count": lambda value: value[1][0],
"assignment count": lambda value: value[1][1],
"completion time": lambda value: value[1][2]
}
colored = lambda word: "<span style=\"color: #f45c42\">" + word + "</span>"
for size in range(3, 10):
print("### **Kenken puzzles of size", size, ":**")
entries = [entry for entry in metrics.items() if entry[0][1] == size]
for name, priority in priorities.items():
algorithms = [algorithm for (algorithm, _), _ in sorted(entries, key=priority)]
print("The algorithms sorted by", colored(name), "are", algorithms)
print() |
import { stripHtml } from '@matters/matters-html-formatter'
import * as cheerio from 'cheerio'
import cloneDeep from 'lodash/cloneDeep'
import flow from 'lodash/flow'
export const countWords = (html: string) => {
const matches = stripHtml(html).match(/[\u4e00-\u9fcc]|\w+/g)
return matches ? matches.length : 0
}
/**
* Strip specific class from html string
*/
export const stripClass = (html: string, name: string) => {
const $ = cheerio.load(html, { decodeEntities: false, xmlMode: true })
$(`.${name}`).removeClass(name)
return $.html()
}
/**
* Correct self-closing tag
*/
export const correctSelfClosingHtmlTag = (name: string) => (html: string) => {
const pattern = new RegExp(`<${name}(.*?)\s*\/>`, 'g')
const replacement = `<${name}$1></${name}>`
return (html || '').replace(pattern, replacement)
}
/**
* Correct sepecific nested br tag produced by third-party lib.
*/
export const correctNestedBrTag = () => (html: string) => {
// forcely transform html string to make sure input's formats are inconsistent
const options = { decodeEntities: false, xmlMode: true }
const $pre = cheerio.load(html, options)
// process transformed html string
const $ = cheerio.load($pre.html(), options)
const base = '<br class="smart">'
const selector = 'br.smart'
let output = html
const outers = $('br.smart').filter((i, dom) => {
if (!dom) {
return false
}
const node = $(dom).parent('br.smart')
if (!node) {
return false
}
return node.length === 0
})
outers.each((i, dom) => {
const node = $(dom)
if (!dom || !node) {
return
}
let skip = false
let curr: any = node
const nodes: any[] = []
// gather sub nodes
while (curr) {
const temp = cloneDeep(curr)
temp.find('br.smart').remove()
nodes.push({ content: temp.html() })
const sub = curr.children(selector).toArray()
if (sub && sub.length > 1) {
skip = true
break
}
curr = sub && sub.length ? $(sub[0]) : undefined
}
if (skip || nodes.length === 0) {
return
}
// replace entire parapgraph
const content = (node.html() || '').replace(
'<br class="smart"/>',
'<br class="smart" />'
)
const match = `${base}${content}</br>`
const replacement = nodes.map((sub) => `${base}${sub.content}`).join('')
output = output.replace(match, replacement)
})
return output
}
/**
* Pipe for pre-processing html tag.
*/
export const correctHtml = (html: string) => {
const pipe = flow(correctSelfClosingHtmlTag('iframe'), correctNestedBrTag())
return pipe(html)
}
|
package main
/*
Copyright (C) 2020 Manetu Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import (
"io/ioutil"
"strings"
"github.com/confluentinc/confluent-kafka-go/kafka"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
)
// SaslConfig properties
type SaslConfig struct {
Mechanism string `yaml:"mechanism"`
Username string `yaml:"username"`
Password string `yaml:"password"`
}
// CfgInner properties
type CfgInner struct {
BootstrapServers []string `yaml:"bootstrapServers"`
SecurityProtocol string `yaml:"securityProtocol"`
Sasl SaslConfig `yaml:"sasl"`
}
// Config contains unmarshalled kafka yaml properties
type Config struct {
Kafka CfgInner `yaml:"kafka"`
}
// ReadConfig reads a config file of this format:
// kafka:
// bootstrapServers:
// - kafk:9092
// securityProtocol: SASL_SSL
// sasl:
// mechanism: PLAIN
// username: USERNAME
// password: PASSWORD
// and returns the populated data structure
func ReadConfig(cfgFile string) (*Config, error) {
b, e := ioutil.ReadFile(cfgFile)
if e != nil {
return nil, errors.WithStack(e)
}
k := Config{}
e = yaml.Unmarshal(b, &k)
if e != nil {
return nil, errors.WithStack(e)
}
//should at least have bootstrap servers
if len(k.Kafka.BootstrapServers) == 0 {
return nil, errors.New("kafka: config: bad config")
}
return &k, nil
}
func securityConfig(cfg *Config, configMap *kafka.ConfigMap) {
// The following settings will override any existing ones
bssList := strings.Join(cfg.Kafka.BootstrapServers, ",")
configMap.SetKey("bootstrap.servers", bssList)
switch cfg.Kafka.SecurityProtocol {
case "SASL_SSL":
configMap.SetKey("security.protocol", cfg.Kafka.SecurityProtocol)
configMap.SetKey("sasl.mechanisms", cfg.Kafka.Sasl.Mechanism)
configMap.SetKey("sasl.username", cfg.Kafka.Sasl.Username)
configMap.SetKey("sasl.password", cfg.Kafka.Sasl.Password)
default:
}
}
|
#!/usr/bin/perl
open IN, "xxd -i fpga_bitmap|";
open OUT, ">fpga_bitmap.h";
while(<IN>) {
s/unsigned char/const PROGMEM unsigned char/;
s/unsigned int/unsigned long/;
print OUT;
} |
#!/bin/bash
unlockKeychain() {
KCP=$1
if [ "" = "$KCP" ]; then
echo "No password specified for keychain unlock - aborting"
exit 1
fi
KC="$HOME/Library/Keychains/login.keychain"
/usr/bin/security -v list-keychains -s "$KC"
/usr/bin/security -v default-keychain -d user -s "$KC" || failed "Cant make login keychain $KC the default"
/usr/bin/security -v unlock-keychain -p "$KCP" "$KC" || failed "Cant unlock keychain"
}
unlockKeychain $USER
docker build -t scornflake/scheduler-client:latest -f Dockerfile .
docker tag scornflake/scheduler-client:latest registry.shinywhitebox.com/scheduler-client:latest
docker push registry.shinywhitebox.com/scheduler-client:latest |
-- migrate:up
ALTER TABLE public.links
ALTER COLUMN url TYPE VARCHAR ( 2048 );
-- migrate:down
ALTER TABLE public.links
ALTER COLUMN url TYPE VARCHAR ( 255 );
|
#!/usr/local/bin/bash
# set -x
YEAR=`date +"%Y"`
POSTS=($(grep title: _posts/$YEAR* | cut -d "/" -f 2 | cut -d "." -f 1))
IFS=$'\n';
TITLES=($(grep title: _posts/$YEAR* | cut -d "/" -f 2 | cut -d "." -f 2 | cut -d "\"" -f2| grep Day))
for t in $(seq ${#POSTS[*]}); do
echo "- [${TITLES[$t-1]}](http://www.dockeradvent.com/${POSTS[$t-1]})"
done
|
<?php
namespace Laminas\Db\Sql\Predicate;
use Laminas\Db\Sql\Literal as BaseLiteral;
class Literal extends BaseLiteral implements PredicateInterface
{
}
|
This sample demonstrates conversion of an MNIST network in ONNX format to
a TensorRT network. The network used in this sample can be found at https://github.com/onnx/models/tree/master/mnist
(model.onnx)
|
package deploy
import (
"github.com/YTF0/xiaomei_bb/release"
"github.com/lovego/config/config"
)
func GetCommonArgs(svcName, env, tag string) []string {
args := []string{`-e`, config.EnvVar + `=` + env}
service := release.GetService(env, svcName)
args = append(args, service.Options...)
args = append(args, service.ImageName(tag))
args = append(args, service.Command...)
return args
}
|
# BEGIN-SCRIPT-BLOCK
#
# Script-Filter:
# true
#
# Script-Variables:
# $command word "show version"
# END-SCRIPT-BLOCK
import requests, json, re
from infoblox_netmri.easy import NetMRIEasy
# This values will be provided by NetMRI before execution
defaults = {
"api_url": api_url,
"http_username": http_username,
"http_password": http_password,
"job_id": job_id,
"device_id": device_id,
"batch_id": batch_id
}
# Create NetMRI context manager. It will close session after execution
with NetMRIEasy(**defaults) as easy:
vtpstatus = easy.send_command('show vtp status')
regexp = re.compile(r"VTP Operating Mode\s*: (.*)")
if regexp.search(vtpstatus):
#print ('matched')
status = re.search('(?<=VTP Operating Mode\s.)(.*)', vtpstatus, re.MULTILINE).group()
if re.search(r'Server', status):
issue_id = easy.generate_issue("info", "siftest",**{
"Host":device_devicename,
"IPAddress":device_deviceipdotted,
"noclue1":'test1',
"noclue2":'test2',
"device_id": device_id,
"batch_id": batch_id
})
else:
print ('no match')
|
CREATE TABLE `votes` (
`id` BIGINT NOT NULL AUTO_INCREMENT COMMENT 'unique identifier',
`document` VARCHAR(11) NOT NULL COMMENT 'cooperate document',
`vote` INT(1) NOT NULL COMMENT '1 - YES, 2 - NO',
`register_date` TIMESTAMP NULL COMMENT 'date register on poll',
`id_schedule` BIGINT NOT NULL COMMENT 'identifier a shedule',
PRIMARY KEY (`id`),
CONSTRAINT `fk_schedule` FOREIGN KEY (`id_schedule`) REFERENCES `schedule` (`id`),
CONSTRAINT `uc_schedule_document` UNIQUE (`id_schedule`, `document` )
)
COMMENT='this table maintains the votes of a schedule';
|
# -*- coding: utf-8 -*-
"""
TODO: Please check readme.txt file first!
--
This Python2.7 program is to reproduce Figure-1 and Figure-4. In this test,
we compare GraphStoIHT with three baseline methods including IHT, StoIHT, and
GraphIHT. IHT is proposed in [3]. StoIHT is proposed in [1]. GraphIHT is
proposed [4] with head/tail projections in [2].
References:
[1] Nguyen, Nam, Deanna Needell, and Tina Woolf. "Linear convergence of
stochastic iterative greedy algorithms with sparse constraints."
IEEE Transactions on Information Theory 63.11 (2017): 6869-6895.
[2] Hegde, Chinmay, Piotr Indyk, and Ludwig Schmidt. "A nearly-linear time
framework for graph-structured sparsity." International Conference on
Machine Learning. 2015.
[3] Blumensath, Thomas, and Mike E. Davies. "Iterative hard thresholding
for compressed sensing." Applied and computational harmonic analysis
27.3 (2009): 265-274.
[4] Hegde, Chinmay, Piotr Indyk, and Ludwig Schmidt. "Fast recovery from
a union of subspaces." Advances in Neural Information Processing
Systems. 2016.
[5] Lovász, László. "Random walks on graphs: A survey." Combinatorics,
Paul erdos is eighty 2.1 (1993): 1-46.
[6] Needell, Deanna, and Joel A. Tropp. "CoSaMP: Iterative signal recovery
from incomplete and inaccurate samples."
Applied and computational harmonic analysis 26.3 (2009): 301-321.
"""
import os
import time
import random
import pickle
import multiprocessing
from itertools import product
import numpy as np
try:
import sparse_module
try:
from sparse_module import wrap_head_tail_bisearch
except ImportError:
print('cannot find wrap_head_tail_bisearch method in sparse_module')
sparse_module = None
exit(0)
except ImportError:
print('\n'.join([
'cannot find the module: sparse_module',
'try run: \'python setup.py build_ext --inplace\' first! ']))
def algo_head_tail_bisearch(
edges, x, costs, g, root, s_low, s_high, max_num_iter, verbose):
""" This is the wrapper of head/tail-projection proposed in [2].
:param edges: edges in the graph.
:param x: projection vector x.
:param costs: edge costs in the graph.
:param g: the number of connected components.
:param root: root of subgraph. Usually, set to -1: no root.
:param s_low: the lower bound of the sparsity.
:param s_high: the upper bound of the sparsity.
:param max_num_iter: the maximum number of iterations used in
binary search procedure.
:param verbose: print out some information.
:return: 1. the support of the projected vector
2. the projected vector
"""
prizes = x * x
# to avoid too large upper bound problem.
if s_high >= len(prizes) - 1:
s_high = len(prizes) - 1
re_nodes = wrap_head_tail_bisearch(
edges, prizes, costs, g, root, s_low, s_high, max_num_iter, verbose)
proj_w = np.zeros_like(x)
proj_w[re_nodes[0]] = x[re_nodes[0]]
return re_nodes[0], proj_w
def simu_grid_graph(width, height, rand_weight=False):
""" Generate a grid graph with size, width x height. Totally there will be
width x height number of nodes in this generated graph.
:param width: the width of the grid graph.
:param height: the height of the grid graph.
:param rand_weight: the edge costs in this generated grid graph.
:return: 1. list of edges
2. list of edge costs
"""
np.random.seed()
if width < 0 and height < 0:
print('Error: width and height should be positive.')
return [], []
width, height = int(width), int(height)
edges, weights = [], []
index = 0
for i in range(height):
for j in range(width):
if (index % width) != (width - 1):
edges.append((index, index + 1))
if index + width < int(width * height):
edges.append((index, index + width))
else:
if index + width < int(width * height):
edges.append((index, index + width))
index += 1
edges = np.asarray(edges, dtype=int)
# random generate costs of the graph
if rand_weight:
weights = []
while len(weights) < len(edges):
weights.append(random.uniform(1., 2.0))
weights = np.asarray(weights, dtype=np.float64)
else: # set unit weights for edge costs.
weights = np.ones(len(edges), dtype=np.float64)
return edges, weights
def sensing_matrix(n, x, norm_noise=0.0):
""" Generate sensing matrix (design matrix). This generated sensing
matrix is a Gaussian matrix, i.e., each entry ~ N(0,\sigma/\sqrt(n)).
Please see more details in equation (1.2) shown in reference [6].
:param n: the number of measurements required.
:param x: the input signal.
:param norm_noise: plus ||norm_noise|| noise on the measurements.
:return: 1. the design matrix
2. the vector of measurements
3. the noised vector.
"""
p = len(x)
x_mat = np.random.normal(loc=0.0, scale=1.0, size=(n * p)) / np.sqrt(n)
x_mat = x_mat.reshape((n, p))
y_tr = np.dot(x_mat, x)
noise_e = np.random.normal(loc=0.0, scale=1.0, size=len(y_tr))
y_e = y_tr + (norm_noise / np.linalg.norm(noise_e)) * noise_e
return x_mat, y_tr, y_e
def random_walk(edges, s, init_node=None, restart=0.0):
""" The random walk on graphs. Please see details in reference [5].
:param edges: the edge list of the graph.
:param s: the sparsity ( number of nodes) in the true subgraph.
:param init_node: initial point of the random walk.
:param restart: with restart.
:return: 1. list of nodes walked.
2. list of edges walked.
"""
np.random.seed()
adj, nodes = dict(), set()
for edge in edges: # construct the adjacency matrix.
uu, vv = int(edge[0]), int(edge[1])
nodes.add(uu)
nodes.add(vv)
if uu not in adj:
adj[uu] = set()
adj[uu].add(vv)
if vv not in adj:
adj[vv] = set()
adj[vv].add(uu)
if init_node is None:
# random select an initial node.
rand_start_point = random.choice(list(nodes))
init_node = list(adj.keys())[rand_start_point]
if init_node not in nodes:
print('Error: the initial_node is not in the graph!')
return [], []
if not (0.0 <= restart < 1.0):
print('Error: the restart probability not in (0.0,1.0)')
return [], []
if not (0 <= s <= len(nodes)):
print('Error: the number of nodes not in [0,%d]' % len(nodes))
return [], []
subgraph_nodes, subgraph_edges = set(), set()
next_node = init_node
subgraph_nodes.add(init_node)
if s <= 1:
return subgraph_nodes, subgraph_edges
# get a connected subgraph with s nodes.
while len(subgraph_nodes) < s:
next_neighbors = list(adj[next_node])
rand_nei = random.choice(next_neighbors)
subgraph_nodes.add(rand_nei)
subgraph_edges.add((next_node, rand_nei))
subgraph_edges.add((rand_nei, next_node))
next_node = rand_nei # go to next node.
if random.random() < restart:
next_node = init_node
return list(subgraph_nodes), list(subgraph_edges)
def algo_iht(x_mat, y_tr, max_epochs, lr, s, x_star, x0, tol_algo):
""" Iterative Hard Thresholding Method proposed in reference [3]. The
standard iterative hard thresholding method for compressive sensing.
:param x_mat: the design matrix.
:param y_tr: the array of measurements.
:param max_epochs: the maximum epochs (iterations) allowed.
:param lr: the learning rate (should be 1.0).
:param s: the sparsity parameter.
:param x_star: the true signal.
:param x0: x0 is the initial point.
:param tol_algo: tolerance parameter for early stopping.
:return: 1. the final estimation error,
2. number of epochs(iterations) used,
3. and the run time.
"""
start_time = time.time()
x_hat = x0
(n, p) = x_mat.shape
x_tr_t = np.transpose(x_mat)
xtx = np.dot(x_tr_t, x_mat)
xty = np.dot(x_tr_t, y_tr)
num_epochs = 0
for epoch_i in range(max_epochs):
num_epochs += 1
bt = x_hat - lr * (np.dot(xtx, x_hat) - xty)
bt[np.argsort(np.abs(bt))[0:p - s]] = 0. # thresholding step
x_hat = bt
# early stopping for diverge cases due to the large learning rate
if np.linalg.norm(x_hat) >= 1e3: # diverge cases.
break
if np.linalg.norm(y_tr - np.dot(x_mat, x_hat)) <= tol_algo:
break
x_err = np.linalg.norm(x_hat - x_star)
run_time = time.time() - start_time
return x_err, num_epochs, run_time
def algo_sto_iht(x_mat, y_tr, max_epochs, lr, s, x_star, x0, tol_algo, b):
""" Stochastic Iterative Hard Thresholding Method proposed in [1].
:param x_mat: the design matrix.
:param y_tr: the array of measurements.
:param max_epochs: the maximum epochs (iterations) allowed.
:param lr: the learning rate (should be 1.0).
:param s: the sparsity parameter.
:param x_star: the true signal.
:param x0: x0 is the initial point.
:param tol_algo: tolerance parameter for early stopping.
:param b: block size
:return: 1. the final estimation error,
2. number of epochs(iterations) used,
3. and the run time.
"""
np.random.seed()
start_time = time.time()
x_hat = x0
(n, p) = x_mat.shape
x_tr_t = np.transpose(x_mat)
b = n if n < b else b
num_blocks = int(n) / int(b)
prob = [1. / num_blocks] * num_blocks
num_epochs = 0
for epoch_i in range(max_epochs):
num_epochs += 1
for _ in range(num_blocks):
ii = np.random.randint(0, num_blocks)
block = range(b * ii, b * (ii + 1))
xtx = np.dot(x_tr_t[:, block], x_mat[block])
xty = np.dot(x_tr_t[:, block], y_tr[block])
gradient = - 2. * (xty - np.dot(xtx, x_hat))
bt = x_hat - (lr / (prob[ii] * num_blocks)) * gradient
bt[np.argsort(np.abs(bt))[0:p - s]] = 0.
x_hat = bt
# early stopping for diverge cases due to the large learning rate
if np.linalg.norm(x_hat) >= 1e3: # diverge cases.
break
if np.linalg.norm(y_tr - np.dot(x_mat, x_hat)) <= tol_algo:
break
x_err = np.linalg.norm(x_hat - x_star)
run_time = time.time() - start_time
return x_err, num_epochs, run_time
def algo_graph_iht(
x_mat, y_tr, max_epochs, lr, x_star, x0, tol_algo, edges, costs, s,
g=1, root=-1, gamma=0.1, proj_max_num_iter=50, verbose=0):
""" Graph Iterative Hard Thresholding proposed in [4] and projection
operator is proposed in [2].
:param x_mat: the design matrix.
:param y_tr: the array of measurements.
:param max_epochs: the maximum epochs (iterations) allowed.
:param lr: the learning rate (should be 1.0).
:param x_star: x_star is the true signal.
:param x0: x0 is the initial point.
:param tol_algo: tolerance parameter for early stopping.
:param edges: edges in the graph.
:param costs: edge costs
:param s: sparsity
:param g: number of connected component in the true signal.
:param root: the root included in the result (default -1: no root).
:param gamma: to control the upper bound of sparsity.
:param proj_max_num_iter: maximum number of iterations of projection.
:param verbose: print out some information.
:return: 1. the final estimation error,
2. number of epochs(iterations) used,
3. and the run time.
"""
start_time = time.time()
x_hat = np.copy(x0)
xtx = np.dot(np.transpose(x_mat), x_mat)
xty = np.dot(np.transpose(x_mat), y_tr)
# graph projection para
h_low = int(len(x0) / 2)
h_high = int(h_low * (1. + gamma))
t_low = int(s)
t_high = int(s * (1. + gamma))
num_epochs = 0
for epoch_i in range(max_epochs):
num_epochs += 1
grad = -1. * (xty - np.dot(xtx, x_hat))
head_nodes, proj_gradient = algo_head_tail_bisearch(
edges, grad, costs, g, root, h_low, h_high,
proj_max_num_iter, verbose)
bt = x_hat - lr * proj_gradient
tail_nodes, proj_bt = algo_head_tail_bisearch(
edges, bt, costs, g, root, t_low, t_high,
proj_max_num_iter, verbose)
x_hat = proj_bt
# early stopping for diverge cases due to the large learning rate
if np.linalg.norm(x_hat) >= 1e3: # diverge cases.
break
if np.linalg.norm(y_tr - np.dot(x_mat, x_hat)) <= tol_algo:
break
x_err = np.linalg.norm(x_hat - x_star)
run_time = time.time() - start_time
return x_err, num_epochs, run_time
def algo_graph_sto_iht(
x_mat, y_tr, max_epochs, lr, x_star, x0, tol_algo, edges, costs, s, b,
g=1, root=-1, gamma=0.1, proj_max_num_iter=50, verbose=0):
""" Graph Stochastic Iterative Hard Thresholding.
:param x_mat: the design matrix.
:param y_tr: the array of measurements.
:param max_epochs: the maximum epochs (iterations) allowed.
:param lr: the learning rate (should be 1.0).
:param x_star: the true signal.
:param x0: x0 is the initial point.
:param tol_algo: tolerance parameter for early stopping.
:param edges: edges in the graph.
:param costs: edge costs
:param s: sparsity
:param b: the block size
:param g: number of connected component in the true signal.
:param root: the root included in the result (default -1: no root).
:param gamma: to control the upper bound of sparsity.
:param proj_max_num_iter: maximum number of iterations of projection.
:param verbose: print out some information.
:return: 1. the final estimation error,
2. number of epochs(iterations) used,
3. and the run time.
"""
np.random.seed()
start_time = time.time()
x_hat = np.copy(x0)
x_tr_t = np.transpose(x_mat)
# graph projection para
h_low = int(len(x0) / 2)
h_high = int(h_low * (1. + gamma))
t_low = int(s)
t_high = int(s * (1. + gamma))
(n, p) = x_mat.shape
# if block size is larger than n,
# just treat it as a single block (batch)
b = n if n < b else b
num_blocks = int(n) / int(b)
prob = [1. / num_blocks] * num_blocks
num_epochs = 0
for epoch_i in range(max_epochs):
num_epochs += 1
for _ in range(num_blocks):
ii = np.random.randint(0, num_blocks)
block = range(b * ii, b * (ii + 1))
xtx = np.dot(x_tr_t[:, block], x_mat[block])
xty = np.dot(x_tr_t[:, block], y_tr[block])
gradient = -2. * (xty - np.dot(xtx, x_hat))
head_nodes, proj_grad = algo_head_tail_bisearch(
edges, gradient, costs, g, root, h_low, h_high,
proj_max_num_iter, verbose)
bt = x_hat - (lr / (prob[ii] * num_blocks)) * proj_grad
tail_nodes, proj_bt = algo_head_tail_bisearch(
edges, bt, costs, g, root,
t_low, t_high, proj_max_num_iter, verbose)
x_hat = proj_bt
# early stopping for diverge cases due to the large learning rate
if np.linalg.norm(x_hat) >= 1e3: # diverge cases.
break
if np.linalg.norm(y_tr - np.dot(x_mat, x_hat)) <= tol_algo:
break
x_err = np.linalg.norm(x_hat - x_star)
run_time = time.time() - start_time
return x_err, num_epochs, run_time
def print_helper(method, trial_i, s, n, num_epochs, err, run_time):
print('%13s trial_%03d s: %02d n: %03d epochs: %03d '
'rec_error: %.4e run_time: %.4e' %
(method, trial_i, s, n, num_epochs, err, run_time))
def run_single_test(data):
np.random.seed()
s, n, p, b = data['s'], data['n'], data['p'], data['b']
lr = data['lr']
x0 = data['x0']
x_star = data['x_star']
trial_i = data['trial_i']
tol_algo = data['tol_algo']
max_epochs = data['max_epochs']
x_mat, y_tr, _ = sensing_matrix(n=n, x=data['x_star'])
edges = data['proj_para']['edges']
costs = data['proj_para']['costs']
rec_error = []
# ------------- IHT ----------------
err, num_epochs, run_time = algo_iht(
x_mat=x_mat, y_tr=y_tr, max_epochs=max_epochs, lr=lr, s=s,
x_star=x_star, x0=x0, tol_algo=tol_algo)
rec_error.append(('iht', err))
print_helper('iht', trial_i, s, n, num_epochs, err, run_time)
# ------------- StoIHT -------------
err, num_epochs, run_time = algo_sto_iht(
x_mat=x_mat, y_tr=y_tr, max_epochs=max_epochs, lr=lr, s=s,
x_star=x_star, x0=x0, tol_algo=tol_algo, b=b)
rec_error.append(('sto-iht', err))
print_helper('sto-iht', trial_i, s, n, num_epochs, err, run_time)
# ------------- GraphIHT -----------
err, num_epochs, run_time = algo_graph_iht(
x_mat=x_mat, y_tr=y_tr, max_epochs=max_epochs, lr=lr, x_star=x_star,
x0=x0, tol_algo=tol_algo, edges=edges, costs=costs, s=s)
rec_error.append(('graph-iht', err))
print_helper('graph-iht', trial_i, s, n, num_epochs, err, run_time)
# ------------- GraphStoIHT --------
err, num_epochs, run_time = algo_graph_sto_iht(
x_mat=x_mat, y_tr=y_tr, max_epochs=max_epochs, lr=lr, x_star=x_star,
x0=x0, tol_algo=tol_algo, edges=edges, costs=costs, s=s, b=b)
rec_error.append(('graph-sto-iht', err))
print_helper('graph-sto-iht', trial_i, s, n, num_epochs, err, run_time)
return trial_i, n, s, rec_error
def run_test(p, lr, height, max_epochs, width, tol_algo, tol_rec, s_list,
n_list, trim_ratio, num_cpus, num_trials, method_list,
save_data_path):
np.random.seed()
start_time = time.time()
input_data_list = []
saved_data = dict()
for (s, n, trial_i) in product(s_list, n_list, range(num_trials)):
print('data pair: (trial_%03d, s: %02d, n: %03d)' % (trial_i, s, n))
b = int(np.fmin(s, n))
edges, costs = simu_grid_graph(height=height, width=width)
# initial node is located in the center of the grid graph.
init_node = (height / 2) * width + height / 2
sub_graphs = {s: random_walk(edges, s, init_node, 0.) for s in s_list}
x_star = np.zeros(p) # using standard Gaussian signal.
x_star[sub_graphs[s][0]] = np.random.normal(loc=0.0, scale=1.0, size=s)
data = {'lr': lr,
'max_epochs': max_epochs,
'trial_i': trial_i,
's': s,
'n': n,
'n_list': n_list,
's_list': s_list,
'p': p,
'b': b,
'x_star': x_star,
'x0': np.zeros(p),
'subgraph': sub_graphs[s][0],
'tol_algo': tol_algo,
'height': height,
'width': width,
'tol_rec': tol_rec,
'subgraph_edges': sub_graphs[s][1],
'verbose': 0,
# parameters used in head and tail projection.
'proj_para': {'edges': edges, 'costs': costs}}
if s not in saved_data:
saved_data[s] = data
input_data_list.append(data)
pool = multiprocessing.Pool(processes=num_cpus)
results_pool = pool.map(run_single_test, input_data_list)
pool.close()
pool.join()
sum_results = {
method: {s: np.zeros((num_trials, len(n_list))) for s in s_list}
for method in method_list}
for trial_i, n, s, re in results_pool:
n_ind = list(n_list).index(n)
for method, val in re:
sum_results[method][s][trial_i][n_ind] = val
# try to trim 5% of the results (rounding when necessary).
num_trim = int(round(trim_ratio * num_trials))
trim_results = {
method: {s: np.zeros(shape=(num_trials - 2 * num_trim, len(n_list)))
for s in s_list} for method in method_list}
for method, s in product(method_list, s_list):
re = sum_results[method][s]
# remove 5% best and 5% worst.
trimmed_re = np.sort(re, axis=0)[num_trim:num_trials - num_trim, :]
trim_results[method][s] = trimmed_re
for method in method_list:
for s in s_list:
re = trim_results[method][s]
re[re > tol_rec] = 0.
# cases that successfully recovered.
re[re != 0.0] = 1.0
trim_results[method][s] = re
print('save results to file: %s' % save_data_path)
pickle.dump({'trim_results': trim_results,
'sum_results': sum_results,
'saved_data': saved_data}, open(save_data_path, 'wb'))
print('total run time of %02d trials: %.2f seconds.' %
(num_trials, time.time() - start_time))
def show_test(s_list, n_list, method_list, label_list, save_data_path):
import matplotlib.pyplot as plt
from matplotlib import rc
from pylab import rcParams
plt.rcParams["font.family"] = "Times New Roman"
plt.rcParams["font.size"] = 16
rc('text', usetex=True)
rcParams['figure.figsize'] = 8, 6
color_list = ['b', 'g', 'm', 'r']
marker_list = ['X', 'o', 'P', 's']
results = pickle.load(open(save_data_path))['trim_results']
fig, ax = plt.subplots(2, 2, sharex='all', sharey='all')
for ii, jj in product(range(2), range(2)):
ax[ii, jj].grid(b=True, which='both', color='gray',
linestyle='dotted', axis='both')
ax[ii, jj].spines['right'].set_visible(False)
ax[ii, jj].spines['top'].set_visible(False)
ax[1, 0].set_xticks(np.arange(0, max(n_list) + 1, 50))
ax[1, 1].set_xticks(np.arange(0, max(n_list) + 1, 50))
ax[0, 0].set_yticks([0.0, 0.2, 0.4, 0.6, 0.8, 1.0])
ax[1, 0].set_yticks([0.0, 0.2, 0.4, 0.6, 0.8, 1.0])
for s in s_list:
print(' '.join(method_list))
re_mat = np.zeros(shape=(len(n_list), 4))
for method_ind, method in enumerate(method_list):
for ind, _ in enumerate(
np.mean(results[method_list[method_ind]][s], axis=0)):
re_mat[ind][method_ind] = _
for ind, _ in enumerate(n_list):
row = [str(_)]
row.extend([str('%.3f' % _) for _ in re_mat[ind]])
print(', '.join(row))
caption_list = ['(a) ', '(b) ', '(c) ', '(d) ']
for m_ind, s in enumerate(s_list):
ii, jj = m_ind / 2, m_ind % 2
for method_ind, method in enumerate(method_list):
re = np.mean(results[method_list[method_ind]][s], axis=0)
ax[ii, jj].plot(n_list, re, c=color_list[method_ind],
markerfacecolor='none',
linestyle='-', marker=marker_list[method_ind],
markersize=6., markeredgewidth=1,
linewidth=1, label=label_list[method_ind])
ax[ii, jj].set_title(r"%s $\displaystyle s=%d$" %
(caption_list[m_ind], s), fontsize=16)
ttl = ax[ii, jj].title
ttl.set_position([.5, 0.97])
for i in range(2):
ax[1, i].set_xlabel(r"$\displaystyle m$", labelpad=-0.5)
ax[i, 0].set_ylabel(r"Probability of Recovery")
ax[0, 0].legend(loc='lower right', framealpha=1.,
bbox_to_anchor=(1.0, 0.0),
fontsize=14., frameon=True, borderpad=0.1,
labelspacing=0.1, handletextpad=0.1, markerfirst=True)
plt.subplots_adjust(wspace=0.05, hspace=0.2)
save_data_path = save_data_path.replace('pkl', 'png')
print('save fig to: %s' % save_data_path)
plt.savefig(save_data_path, dpi=600, bbox_inches='tight', pad_inches=0,
format='png')
plt.close()
def generate_figures(root_p, save_data_path):
import networkx as nx
import matplotlib.pyplot as plt
from matplotlib import rc
plt.rcParams["font.family"] = "Times New Roman"
plt.rcParams["font.size"] = 18
rc('text', usetex=True)
data = pickle.load(open(save_data_path))['saved_data']
edges = data[20]['proj_para']['edges']
height, width = data[20]['height'], data[20]['width']
p = data[20]['p']
plt.figure(figsize=(5.0, 5.0))
for s in data:
pos, graph = dict(), nx.Graph()
black_edges = []
red_edges = []
red_edge_list = []
for edge in edges:
graph.add_edge(edge[0], edge[1])
if (edge[0], edge[1]) in data[s]['subgraph_edges']:
red_edges.append('r')
red_edge_list.append((edge[0], edge[1]))
else:
black_edges.append('k')
for index, (i, j) in enumerate(product(range(height), range(width))):
graph.add_node(index)
pos[index] = (j, height - i)
print('generate subgraph, which has %02d nodes.' % s)
nx.draw_networkx_nodes(
graph, pos, node_size=15, nodelist=range(p), linewidths=.5,
node_color='w', edgecolors='k', font_size=6)
x_values = np.random.normal(loc=0.0, scale=1.0,
size=len(data[s]['subgraph']))
nx.draw_networkx_nodes(
graph, pos, node_size=15, nodelist=data[s]['subgraph'],
linewidths=.5, node_color=x_values, cmap='jet',
edgecolors='k', font_size=6)
nx.draw_networkx_edges(
graph, pos, alpha=0.4, width=0.5, edge_color='k', font_size=6)
nx.draw_networkx_edges(
graph, pos, alpha=0.8, width=2.0, edgelist=red_edge_list,
edge_color='r', font_size=6)
plt.axis('off')
fig = plt.gcf()
fig.set_figheight(1.4)
fig.set_figwidth(1.4)
plt.subplots_adjust(0, 0, 1, 1, 0, 0)
for ax in fig.axes:
ax.axis('off')
ax.margins(0.02, 0.02)
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
f_name = root_p + 'results_exp_sr_test01_s_%02d.png' % s
fig.savefig(f_name, dpi=600, pad_inches=0.0, format='png')
plt.close()
def main():
# list of methods considered
method_list = ['iht', 'sto-iht', 'graph-iht', 'graph-sto-iht']
label_list = ['IHT', 'StoIHT', 'GraphIHT', 'GraphStoIHT']
# 4 different sparsity parameters considered.
s_list = np.asarray([8, 20, 28, 36])
# number of measurements list
n_list = np.arange(5, 251, 5)
# try 50 different trials
num_trials = 50
# tolerance of the algorithm
tol_algo = 1e-7
# tolerance of the recovery.
tol_rec = 1e-6
# the dimension of the grid graph.
p = 256
# the trimmed ratio (5% of the best and worst have been removed).
trim_ratio = 0.05
# height and width of the grid graph.
height, width = 16, 16
# maximum number of epochs allowed for all methods.
max_epochs = 500
# learning rate ( consistent with Needell's paper)
lr = 1.0
# TODO config the path by yourself.
root_p = 'results/'
if not os.path.exists(root_p):
os.mkdir(root_p)
save_data_path = root_p + 'results_exp_sr_test01.pkl'
if len(os.sys.argv) <= 1:
print('\n'.join(['please use one of the following commands: ',
'1. python exp_sr_test01.py run_test 50',
'2. python exp_sr_test01.py show_test',
'3. python exp_sr_test01.py gen_figures']))
exit(0)
command = os.sys.argv[1]
if command == 'run_test':
num_cpus = int(os.sys.argv[2])
run_test(p=p,
lr=lr,
height=height,
width=width,
max_epochs=max_epochs,
tol_algo=tol_algo,
tol_rec=tol_rec,
s_list=s_list,
n_list=n_list,
trim_ratio=trim_ratio,
num_cpus=num_cpus,
num_trials=num_trials,
method_list=method_list,
save_data_path=save_data_path)
elif command == 'show_test':
show_test(s_list=s_list,
n_list=n_list,
method_list=method_list,
label_list=label_list,
save_data_path=save_data_path)
elif command == 'gen_figures':
generate_figures(root_p=root_p,
save_data_path=save_data_path)
else:
print('\n'.join(['you can try: ',
'1. python exp_sr_test01.py run_test 50',
'2. python exp_sr_test01.py show_test',
'3. python exp_sr_test01.py gen_figures']))
if __name__ == '__main__':
main()
|
## Managing Resources in Storage System Tests
Whenever possible (i.e. when your test does not mutate buckets), you should use
the default bucket available at `StorageTestCase::$bucket`. If you must create a
bucket, use `StorageTestCase::createBucket()` in order to correctly configure
the deletion queue.
Because buckets cannot be deleted unless they are empty, it is sometimes
difficult to ensure that deletion is queued in the correct order. Due to this,
objects should NOT be added to the deletion queue. Instead, they should be
created inside a bucket which was created using
`StorageTestCase::createBucket()`.
When the deletion queue is processed, all buckets created using the
`StorageTestCase::createBucket()` method will be emptied of all objects residing
in them prior to the deletion of the bucket itself.
|
// SPDX-FileCopyrightText: 2021 Lars Geyer-Blaumeiser <[email protected]>
// SPDX-License-Identifier: MIT
package de.lgblaumeiser.ptm.service
import de.lgblaumeiser.ptm.service.analysis.ActivityComputer
import de.lgblaumeiser.ptm.service.analysis.HourComputer
import de.lgblaumeiser.ptm.service.analysis.ProjectComputer
class AnalysisService(val activityService: ActivityService, val bookingService: BookingService) {
private val projectAnalysis = ProjectComputer(activityService, bookingService)
private val activityAnalysis = ActivityComputer(activityService, bookingService)
private val hourAnalysis = HourComputer(bookingService)
fun runProjectAnalysis(username: String, firstDay: String, firstDayAfter: String) =
projectAnalysis.analyze(username, firstDay, firstDayAfter).map {
ProjectAnalyisResultElement(
projectId = it.projectId,
projectName = it.projectName,
minutes = it.minutesString(),
percentage = it.percentageString(),
comment = it.comment
)
}.toList()
fun runActivityAnalysis(username: String, firstDay: String, firstDayAfter: String) =
activityAnalysis.analyze(username, firstDay, firstDayAfter).map {
ActivityAnalysisResultElement(
projectId = it.projectId,
projectName = it.projectName,
activityId = it.activityId!!,
activityName = it.activityName!!,
minutes = it.minutesString(),
percentage = it.percentageString(),
comment = it.comment
)
}.toList()
fun runHourAnalysis(username: String, firstDay: String, firstDayAfter: String) =
hourAnalysis.analyze(username, firstDay, firstDayAfter).map{
HourAnalysisResultElement(
bookingday = it.bookingdayString(),
starttime = it.starttimeString(),
endtime = it.endtimeString(),
presence = it.presenceString(),
worktime = it.worktimeString(),
breaktime = it.breaktimeString(),
total = it.totalString(),
overtime = it.overtimeString(),
comment = it.comment
)
}.toList()
}
data class ProjectAnalyisResultElement(
val projectId: String,
val projectName: String,
val minutes: String,
val percentage: String,
val comment: String
)
data class ActivityAnalysisResultElement(
val projectId: String,
val projectName: String,
val activityId: String,
val activityName: String,
val minutes: String,
val percentage: String,
val comment: String
)
data class HourAnalysisResultElement(
val bookingday: String,
val starttime: String,
val endtime: String,
val presence: String,
val worktime: String,
val breaktime: String,
val total: String,
val overtime: String,
val comment: String
) |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
/// <summary>
/// 基于运行系统时间的时钟,可获得对应的时间,或者设置Timer
/// </summary>
class SystemTimer
{
//----------------------------------------------------------------------------
// 时间获取
//----------------------------------------------------------------------------
/// <summary>
/// 获得系统的时间,用long表示
/// </summary>
/// <returns></returns>
public static long GetSysTime()
{
return DateTime.UtcNow.ToFileTimeUtc();//
}
/// <summary>
/// 获得前后时间差
/// </summary>
public static double GetTimeSpanSeconds(DateTime timeBefore,DateTime timeAfter)
{
TimeSpan span = timeAfter -timeBefore ;
return span.TotalSeconds;
}
/// <summary>
/// 获得前后时间差
/// </summary>
public static double GetTimeSpanSeconds(long timeBefore, long timeAfter)
{
//FileTimeUtc将生成的是1秒=10,000,000纳秒,以1601年为起始点计时;
//(time1/3600/10000000/24/365)+1601)
double delta = ((double)timeAfter - timeBefore)/10000000;
return delta;
}
/// <summary>
/// 将int秒的时间转为00:00:00格式
/// </summary>
/// <param name="value">秒</param>
/// <returns></returns>
public static string TimeToStr(int value)
{
int hours = value / 3600;
value = value % 3600;
int min = value / 60;
value = value % 60;
int secs = value;
return string.Format(
"{0}:{1}:{2}",
hours.ToString("00"),
min.ToString("00"),
secs.ToString("00")
);
}
/// <summary>
/// 将00:00:00的字符串转为int时间
/// </summary>
/// <returns>返回秒为单位的时间</returns>
public static int StrToTime(string str)
{
string[] timeStrs = str.Split(':');
int hours, mins, secs;
int.TryParse(timeStrs[0], out hours);
int.TryParse(timeStrs[1], out mins);
int.TryParse(timeStrs[2], out secs);
int result= hours * 3600 + mins * 60 + secs;
return result;
}
//----------------------------------------------------------------------------
// 计时器部分
//----------------------------------------------------------------------------
/// <summary>
/// The delegater for coroutine
/// </summary>
static MonoBehaviour g_goCoroutineDelegater;
static void InitCoroutine()
{
if(g_goCoroutineDelegater==null)
{
g_goCoroutineDelegater = new GameObject("CoroutineDelegater"). AddComponent<MonoBehaviour>();
}
}
/// <summary>
/// 设置一个延迟动作,(无需Mono类) Sets the action after.
/// </summary>
public static void SetDelayTimer(CallbackFunc cbAction, float timeDelta)
{
InitCoroutine();
g_goCoroutineDelegater.StartCoroutine(GameObjFunc.IDelayDoSth(cbAction,timeDelta));
}
/// <summary>
/// 设置一个每间隔时间段更新的Timer
/// </summary>
/// <param name="cbUpdateAction">update action.</param>
/// <param name="timeDelta">Time delta.</param>
public static void SetUpdateTimer(CallbackFunc cbUpdateAction, float timeDelta)
{
InitCoroutine();
g_goCoroutineDelegater.StartCoroutine(GameObjFunc.IUpdateDo(cbUpdateAction,timeDelta));
}
}
|
/**
Утилиты
**/
function z_ajax(ajax_params, params) {
// !!! Не доделал !!!
// Загружает по ajax данные.
// Отслеживает и ошибочные статусы, и "error" при корректном статусе
// Обработка результатов осуществялется через .then, .catch, .finally
// .finally вызывается в любом случае
// Если then вызывает исключение, оно передается в .catch
let func_obj = {
_then_func: null,
_catch_func: null,
_finally_func: null,
then (func) {
func_obj._then_func = func;
return func_obj
},
catch (func) {
func_obj._catch_func = func;
return func_obj
},
finally (func) {
func_obj._finally_func = func;
return func_obj
}
};
let run_finally = ()=>{
let cbk = func_obj._finally_func;
if (cbk!==undefined && cbk!==null) {
return cbk()
}
};
let init_func=null;
ajax_params.dataType = "json";
if (typeof params === "undefined" || params===null) {
params={}
}
if (typeof ajax_params.init_func !== "undefined") {
init_func = ajax_params.init_func
}
if (typeof ajax_params.no_check_answer !== "undefined") {
params.no_answer = ajax_params.no_check_answer
}
(new Promise((resolve, reject)=> {
if (init_func !== null) {
let res_init = init_func(resolve, reject);
if (typeof res_init !=="undefined" && res_init !== null) {
let res = res_init.resolve;
if (typeof res !== undefined) {
resolve(res);
return;
}
res = res_init.reject;
if (typeof res !== undefined) {
reject(res);
return;
}
return;
}
};
$.ajax(ajax_params)
.done((data) => {
if (params.no_answer || data.answer == "success") {
let res = done_func(data);
if (typeof res === "undefined" || res == null) {
if (finall_func !== null) finall_func();
resolve(data);
return
} else {
let eres = null;
if (err_func !== null) {
eres = err_func('user', res, res)
} else {
eres = res
};
if (finall_func !== null) finall_func();
reject(eres);
return
}
} else {
let eres = null;
if (err_func !== null) {
eres = err_func('data', data.error, data)
} else {
eres = data.error
};
if (finall_func !== null) finall_func();
reject(eres);
return
}
})
.fail((err) => {
let eres = null;
if (err_func !== null) {
eres = err_func('server', err.statusText, err)
} else {
eres = err.statusText
};
if (finall_func !== null) finall_func();
reject(eres);
return
})
}))
.then((data)=>{
let cbk = func_obj._then_func;
if (cbk!==undefined && cbk!==null) {
cbk(data)
}
run_finally()
},(err)=>{
let cbk = func_obj._catch_func;
if (cbk!==undefined && cbk!==null) {
cbk(data)
}
run_finally()
})
return func_obj;
}
function z_load_ajax(ajax_params, done_func, err_func, finall_func, params) {
// Возвращает Promise, внутри которого вызывается AJAX через JQuery
// с параметрами ajax_params. Проверяется answer==success
// вызываются либо done_func (если все успешно), либо err_func.
// если done_func вернула что-то, то вместо resolve() вызывается err_func с этим "что-то" и reject
// в reject передается результат err_func
// ajax_params.no_check_answer - не проверять anwser
// err_func(mode, err_text, err)
// done_func(data)
// finall_func()
// можно задать каллбеки внутри ajax_params, params.no_answer как no_check_answer
// в ajax_params можно задать init_func, которая вызывается до ajax. init_func(resolve, reject)
// возвращает null - продолжать работу,
// {resolve: <data>} или {reject:<data>} - соответственно вызвать resolve или reject промиса
// с указаными агруменами в качестве значения. Если возвращает не null, но значения undefined -
// resolve / reject не вызывается, просто прекращается промис. То есть уже были вызваны.
var init_func=null;
var chk_callback = (cbk) => {
if (typeof cbk==="undefined") return null;
return cbk
}
ajax_params.dataType = "json";
if (typeof params === "undefined" || params===null) {
params={}
}
done_func = chk_callback(done_func)
if (typeof ajax_params.done_func !== "undefined") {
done_func = ajax_params.done_func
}
err_func = chk_callback(err_func)
if (typeof ajax_params.err_func !== "undefined") {
err_func = ajax_params.err_func
}
finall_func = chk_callback(finall_func)
if (typeof ajax_params.finall_func !== "undefined") {
finall_func = ajax_params.finall_func
}
if (typeof ajax_params.init_func !== "undefined") {
init_func = ajax_params.init_func
}
if (typeof ajax_params.no_check_answer !== "undefined") {
params.no_answer = ajax_params.no_check_answer
}
return new Promise((resolve, reject) => {
if (init_func !== null) {
let res_init = init_func(resolve, reject);
if (typeof res_init !=="undefined" && res_init !== null) {
let res = res_init.resolve;
if (typeof res !== "undefined") {
if (finall_func !== null) finall_func();
resolve(res);
return;
}
res = res_init.reject;
if (typeof res !== "undefined") {
if (finall_func !== null) finall_func();
console.log("init function reject");
reject(res);
return;
}
return;
}
};
$.ajax(ajax_params)
.done((data) => {
if (params.no_answer || data.answer == "success") {
let res = done_func(data);
if (typeof res === "undefined" || res == null) {
if (finall_func !== null) finall_func();
resolve(data);
return
} else {
let eres = null;
if (err_func !== null) {
eres = err_func('user', res, res)
} else {
eres = res
};
if (finall_func !== null) finall_func();
reject(eres);
return
}
} else {
let eres = null;
if (err_func !== null) {
eres = err_func('data', data.error, data)
} else {
eres = data.error
};
if (finall_func !== null) finall_func();
reject(eres);
return
}
})
.fail((err) => {
let eres = null;
if (err_func !== null) {
eres = err_func('server', err.statusText, err)
} else {
eres = err.statusText
};
if (finall_func !== null) finall_func();
reject(eres);
return
})
})
};
const storeLoadMixin = {
// Vue mixin для загрузки данных через action store.
// определяет в data элемент data_loading
// и метод store_load_data
//
data () {return {
data_loading: false
}},
methods: {
store_load_data (action, params, done_func, err_func, final_func) {
// done_func, err_func, final_func - необязательные
this.data_loading = true;
this.$store.dispatch(action, params)
.then((res)=>{
this.data_loading = false;
if (typeof done_func !=="undefined") done_func(res);
if (typeof final_func !=="undefined") final_func();
},(err)=>{
if (typeof err_func !=="undefined") {
err_func(err);
} else {
alert(err);
};
this.data_loading = false;
if (typeof final_func !=="undefined") final_func();
})
}
}
};
function z_load_ajax_0(ajax_params, done_func, err_func, finall_func, params) {
// Возвращает Promise, внутри которого вызывается AJAX через JQuery
// с параметрами ajax_params. Проверяется answer==success
// вызываются либо done_func (если все успешно), либо err_func.
// если done_func вернула что-то, то вместо resolve() вызывается err_func с этим "что-то" и reject
// в reject передается результат err_func
// params.no_answer - не проверять anwser
// err_func(mode, err_text, err)
// done_func(data)
ajax_params.dataType = "json";
if (typeof params === "undefined" || params===null) {
params={}
}
return new Promise((resolve, reject) => {
$.ajax(ajax_params)
.done((data) => {
if (params.no_answer || data.answer == "success") {
let res = done_func(data);
if (typeof res === "undefined" || res == null) {
finall_func();
resolve();
return
} else {
let eres = err_func('user', res, res)
finall_func();
reject(eres);
return
}
} else {
let eres = err_func('data', data.error, data)
finall_func();
reject(eres);
return
}
})
.fail((err) => {
let eres = err_func('server', err.statusText, err)
finall_func();
reject(eres);
return
})
})
};
|
part of axmvvm.models;
/// A subscription to be used with the MessageService.
class Subscription {
final String _name;
final Function(Object parameter) _messageHandler;
Subscription(this._name, this._messageHandler);
/// The name of the subscription to listen to messages for.
///
/// Any messages sent with this name will be delivered to the message handler.
String get name => _name;
/// The method to call when a message is recieved for the name.
///
/// The [parameter] containes the payload of the message.
Function(Object parameter) get messageHandler => _messageHandler;
}
|
using JT809.Protocol.Extensions;
using JT809.Protocol.MessageBody;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Text;
namespace JT809.Protocol.Formatters.MessageBodyFormatters
{
public class JT809_0x9003_Formatter : IJT809Formatter<JT809_0x9003>
{
public JT809_0x9003 Deserialize(ReadOnlySpan<byte> bytes, out int readSize)
{
int offset = 0;
JT809_0x9003 jT809_0X9003 = new JT809_0x9003();
jT809_0X9003.VerifyCode = (JT809BinaryExtensions.ReadUInt32Little(bytes, ref offset));
readSize = offset;
return jT809_0X9003;
}
public int Serialize(ref byte[] bytes, int offset, JT809_0x9003 value)
{
offset += JT809BinaryExtensions.WriteUInt32Little(bytes, offset, value.VerifyCode);
return offset;
}
}
}
|
use std::collections::BTreeMap;
use std::cmp::{Ord, PartialOrd, Ordering};
use std::alloc::{alloc, dealloc, Layout};
use std::ptr::NonNull;
// I wrote my own Box + vtable after spending hours battling:
// * std::Any and it's 'static requirement
// * * This makes the casting safer and easier, but requires 'static making it a non-starter
// * The lack of clarity the compiler has into Box<dyn Trait> objects
// * * Specifically, "this type object may not live long enough"
// * Cleanly retrieving actual values and references to said values out of the erased types
// * * The more I worked at this, the more I feel like I violated aliasing and referencing rules
// * * This also was hairy with trying to move out of the Box<dyn Trait> objects
// Overall, this is certainly not a production-ready library. I would not be surprised at all if
// my type erasure currently allows me to violate all sorts of lifetime requirements. This is just
// intended as a proof-of-concept of how type-erased structs can improve compile times
struct Vtable {
layout: fn() -> Layout,
ord: fn(NonNull<u8>, NonNull<u8>) -> Ordering,
drop: fn(NonNull<u8>)
}
fn fixed_layout<T>() -> Layout {
let size = std::mem::size_of::<T>();
let align = std::mem::align_of::<T>();
let size = if size == 0 { 1 } else { size };
let align = if align == 0 { 1 } else { align };
Layout::from_size_align(size, align).expect("Invalid type layouts")
}
fn no_ord(_: NonNull<u8>, _: NonNull<u8>) -> Ordering {
unimplemented!()
}
fn ord<T: Ord>(a: NonNull<u8>, b: NonNull<u8>) -> Ordering {
let a = a.cast::<T>();
let b = b.cast::<T>();
let (ar, br) = unsafe {
(a.as_ref(), b.as_ref())
};
ar.cmp(br)
}
fn do_drop<T>(val: NonNull<u8>) {
let val = val.cast::<T>().as_ptr();
let to_drop: T = unsafe {
std::ptr::read(val)
};
drop(to_drop);
}
trait VtableForType<K: Ord + Eq, V> {
const KTABLE: Vtable = Vtable {
layout: fixed_layout::<K>,
ord: ord::<K>,
drop: do_drop::<K>,
};
const VTABLE: Vtable = Vtable {
layout: fixed_layout::<V>,
ord: no_ord,
drop: do_drop::<V>,
};
}
struct UsableKeyBox {
ptr: NonNull<u8>,
table: &'static Vtable,
}
impl UsableKeyBox {
fn new<V>(val: V, table: &'static Vtable) -> UsableKeyBox {
// This layout is used since it will always allocate extra for a zero sized type
let layout = (table.layout)();
let ptr = NonNull::new(unsafe {alloc(layout)}).expect("Allocation returned null");
let real_ptr = ptr.as_ptr() as *mut V;
unsafe {
std::ptr::write(real_ptr, val);
}
UsableKeyBox {
ptr,
table
}
}
#[inline(never)]
fn free_mem(self) {
let layout = (self.table.layout)();
unsafe { dealloc(self.ptr.as_ptr(), layout); }
}
fn claim<V>(self) -> V {
let ptr = self.ptr.cast::<V>().as_ptr();
let rval = unsafe {
std::ptr::read(ptr)
};
self.free_mem();
rval
}
}
impl Drop for UsableKeyBox {
fn drop(&mut self) {
(self.table.drop)(self.ptr);
let layout = (self.table.layout)();
unsafe { dealloc(self.ptr.as_ptr(), layout); }
}
}
impl Ord for UsableKeyBox {
fn cmp(&self, other: &UsableKeyBox)-> Ordering {
assert!(self.table as *const _ == other.table as *const _);
(self.table.ord)(self.ptr, other.ptr)
}
}
impl PartialOrd for UsableKeyBox {
fn partial_cmp(&self, other: &UsableKeyBox)-> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for UsableKeyBox {}
impl PartialEq for UsableKeyBox {
fn eq(&self, other: &UsableKeyBox) -> bool {
match self.cmp(other) {
Ordering::Equal => true,
_ => false,
}
}
}
#[derive(Default)]
struct InnerTreeMap {
map: BTreeMap<UsableKeyBox, UsableKeyBox>,
}
impl InnerTreeMap {
#[inline(never)]
fn new() -> InnerTreeMap {
Self::default()
}
#[inline(never)]
fn insert(&mut self, key: UsableKeyBox, val: UsableKeyBox) -> Option<UsableKeyBox> {
self.map.insert(key, val)
}
}
#[derive(Default)]
pub struct ErasedBTreeMap<K: Ord + Eq, V> {
inner: InnerTreeMap,
_marker: std::marker::PhantomData<std::collections::HashMap<K, V>>,
}
impl<K: Ord + Eq, V> VtableForType<K, V> for ErasedBTreeMap<K, V> {}
impl<K: Ord + Eq, V> ErasedBTreeMap<K, V> {
pub fn new() -> Self {
ErasedBTreeMap {
inner: InnerTreeMap::new(),
_marker: std::marker::PhantomData,
}
}
pub fn insert(&mut self, key: K, val: V) -> Option<V> {
let key = UsableKeyBox::new(key, &<Self as VtableForType<K, V>>::KTABLE);
let val = UsableKeyBox::new(val, &<Self as VtableForType<K, V>>::VTABLE);
self.inner.insert(key, val).map(|ukb| ukb.claim())
}
}
|
import * as React from 'react'
import styled, { css, StyledComponent } from 'styled-components'
import { Label } from '../Label'
import { fromTheme, StyledProps } from '../utils/styled'
import { preciseEm } from '../utils/styled/isolated'
import { Container } from './Container'
const ALIGNED_WIDTH = 20 // em
export interface FieldProps {
horizontal?: boolean
align?: true | string
nowrap?: boolean
}
export type StyledFieldProps = FieldProps & StyledProps
type FieldType = StyledComponent<'fieldset', any, StyledFieldProps, never> & {
Container: typeof Container
}
// Would like to use styled.fieldset but can't due to
// https://github.com/w3c/csswg-drafts/issues/321
export const Field = styled.div<StyledFieldProps>`
display: flex;
flex-flow: row wrap;
flex-direction: ${props => props.horizontal ? 'row' : 'column'};
align-items: ${props => props.horizontal ? 'center' : 'unset'};
flex: 1;
white-space: ${props => props.nowrap ? 'nowrap' : 'unset'};
&:not(:last-child) {
margin-bottom: ${fromTheme(theme => theme.global.baseSpacing)}em;
}
> ${Label}:first-child {
padding-bottom: ${props => props.horizontal
? 'unset'
: css`${fromTheme(theme => theme.global.baseSpacing / 2)}em`};
margin-right: ${props => props.horizontal
? css`${fromTheme(theme => theme.global.baseSpacing)}em`
: 'unset'};
flex-grow: 0;
flex-shrink: 0;
flex-basis: ${props => (props.horizontal && props.align)
? (props.align === true) ? `${preciseEm(ALIGNED_WIDTH)}em` : props.align
: 'auto'
};
> ${Label} {
padding-bottom: unset;
}
}
${props => props.horizontal && css`
> * {
flex-grow: 1;
}
> *:not(${Label}):not(:last-child) {
margin-bottom: ${fromTheme(theme => theme.global.baseSpacing)}em;
}
`}
` as FieldType
Field.Container = Container
Field.displayName = 'Field'
|
#include <gtest/gtest.h>
// Requires C++20
TEST(LambdaTypes, StatelessLambdas) {
auto x = [] {}; // A lambda without captures
auto y = x; // Assignable
decltype(y) z; // Default-constructible
static_assert(std::is_same_v<decltype(x), decltype(y)>); // passes
static_assert(std::is_same_v<decltype(x), decltype(z)>); // passes
} |
use crate::{encode_functype, encoders, ComponentSection, ComponentSectionId, EntityType, ValType};
/// Represents a module type.
#[derive(Debug, Clone, Default)]
pub struct ModuleType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl ModuleType {
/// Creates a new module type.
pub fn new() -> Self {
Self::default()
}
/// Define a function in this module type.
pub fn function<P, R>(&mut self, params: P, results: R) -> &mut Self
where
P: IntoIterator<Item = ValType>,
P::IntoIter: ExactSizeIterator,
R: IntoIterator<Item = ValType>,
R::IntoIter: ExactSizeIterator,
{
self.bytes.push(0x01);
encode_functype(&mut self.bytes, params, results);
self.num_added += 1;
self.types_added += 1;
self
}
/// Defines an import in this module type.
pub fn import(&mut self, module: &str, name: &str, ty: EntityType) -> &mut Self {
self.bytes.push(0x02);
self.bytes.extend(encoders::str(module));
self.bytes.extend(encoders::str(name));
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Defines an export in this module type.
pub fn export(&mut self, name: &str, ty: EntityType) -> &mut Self {
self.bytes.push(0x07);
self.bytes.extend(encoders::str(name));
ty.encode(&mut self.bytes);
self.num_added += 1;
self
}
/// Gets the number of types that have been added to this module type.
pub fn type_count(&self) -> u32 {
self.types_added
}
fn encode(&self, bytes: &mut Vec<u8>) {
bytes.extend(encoders::u32(self.num_added));
bytes.extend(self.bytes.iter().copied());
}
}
/// Represents a component type.
#[derive(Debug, Clone, Default)]
pub struct ComponentType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl ComponentType {
/// Creates a new component type.
pub fn new() -> Self {
Self::default()
}
/// Define a type in this component type.
///
/// The returned encoder must be finished before adding another definition.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder {
self.bytes.push(0x01);
self.num_added += 1;
self.types_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Defines an import in this component type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn import(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x02);
self.bytes.extend(encoders::str(name));
self.bytes.extend(encoders::u32(ty));
self.num_added += 1;
self
}
/// Defines an export in this component type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn export(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x07);
self.bytes.extend(encoders::str(name));
self.bytes.extend(encoders::u32(ty));
self.num_added += 1;
self
}
/// Defines an alias to an outer type in this component type.
pub fn alias_outer_type(&mut self, count: u32, index: u32) -> &mut Self {
self.bytes.push(0x09);
self.bytes.push(0x02);
self.bytes.push(0x05);
self.bytes.extend(encoders::u32(count));
self.bytes.extend(encoders::u32(index));
self.num_added += 1;
self.types_added += 1;
self
}
/// Gets the number of types that have been added or aliased in this component type.
pub fn type_count(&self) -> u32 {
self.types_added
}
fn encode(&self, bytes: &mut Vec<u8>) {
bytes.extend(encoders::u32(self.num_added));
bytes.extend(self.bytes.iter().copied());
}
}
/// Represents an instance type.
#[derive(Debug, Clone, Default)]
pub struct InstanceType {
bytes: Vec<u8>,
num_added: u32,
types_added: u32,
}
impl InstanceType {
/// Creates a new instance type.
pub fn new() -> Self {
Self::default()
}
/// Define a type in this instance type.
///
/// The returned encoder must be finished before adding another definition.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder {
self.bytes.push(0x01);
self.num_added += 1;
self.types_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Defines an export in this instance type.
///
/// The type is expected to be an index to a previously defined or aliased type.
pub fn export(&mut self, name: &str, ty: u32) -> &mut Self {
self.bytes.push(0x07);
self.bytes.extend(encoders::str(name));
self.bytes.extend(encoders::u32(ty));
self.num_added += 1;
self
}
/// Defines an alias to an outer type in this instance type.
pub fn alias_outer_type(&mut self, count: u32, index: u32) -> &mut Self {
self.bytes.push(0x09);
self.bytes.push(0x02);
self.bytes.push(0x05);
self.bytes.extend(encoders::u32(count));
self.bytes.extend(encoders::u32(index));
self.num_added += 1;
self.types_added += 1;
self
}
/// Gets the number of types that have been added or aliased in this instance type.
pub fn type_count(&self) -> u32 {
self.types_added
}
fn encode(&self, bytes: &mut Vec<u8>) {
bytes.extend(encoders::u32(self.num_added));
bytes.extend(self.bytes.iter().copied());
}
}
/// Used to encode types.
#[derive(Debug)]
pub struct TypeEncoder<'a>(&'a mut Vec<u8>);
impl<'a> TypeEncoder<'a> {
/// Define a module type.
pub fn module(self, ty: &ModuleType) {
self.0.push(0x4f);
ty.encode(self.0);
}
/// Define a component type.
pub fn component(self, ty: &ComponentType) {
self.0.push(0x4e);
ty.encode(self.0);
}
/// Define an instance type.
pub fn instance(self, ty: &InstanceType) {
self.0.push(0x4d);
ty.encode(self.0);
}
/// Define a function type.
pub fn function<'b, P, T>(self, params: P, result: impl Into<InterfaceTypeRef>)
where
P: IntoIterator<Item = (Option<&'b str>, T)>,
P::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let params = params.into_iter();
self.0.push(0x4c);
self.0
.extend(encoders::u32(u32::try_from(params.len()).unwrap()));
for (name, ty) in params {
match name {
Some(name) => {
self.0.push(0x01);
self.0.extend(encoders::str(name));
}
None => self.0.push(0x00),
}
ty.into().encode(self.0);
}
result.into().encode(self.0);
}
/// Define a value type.
pub fn value(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x4b);
ty.into().encode(self.0);
}
/// Define an interface type.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn interface_type(self) -> InterfaceTypeEncoder<'a> {
InterfaceTypeEncoder(self.0)
}
}
/// Represents a primitive interface type.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PrimitiveInterfaceType {
/// The type is the unit type.
Unit,
/// The type is a boolean.
Bool,
/// The type is a signed 8-bit integer.
S8,
/// The type is an unsigned 8-bit integer.
U8,
/// The type is a signed 16-bit integer.
S16,
/// The type is an unsigned 16-bit integer.
U16,
/// The type is a signed 32-bit integer.
S32,
/// The type is an unsigned 32-bit integer.
U32,
/// The type is a signed 64-bit integer.
S64,
/// The type is an unsigned 64-bit integer.
U64,
/// The type is a 32-bit floating point number.
Float32,
/// The type is a 64-bit floating point number.
Float64,
/// The type is a Unicode character.
Char,
/// The type is a string.
String,
}
impl PrimitiveInterfaceType {
fn encode(&self, bytes: &mut Vec<u8>) {
match self {
Self::Unit => bytes.push(0x7f),
Self::Bool => bytes.push(0x7e),
Self::S8 => bytes.push(0x7d),
Self::U8 => bytes.push(0x7c),
Self::S16 => bytes.push(0x7b),
Self::U16 => bytes.push(0x7a),
Self::S32 => bytes.push(0x79),
Self::U32 => bytes.push(0x78),
Self::S64 => bytes.push(0x77),
Self::U64 => bytes.push(0x76),
Self::Float32 => bytes.push(0x75),
Self::Float64 => bytes.push(0x74),
Self::Char => bytes.push(0x73),
Self::String => bytes.push(0x72),
}
}
}
/// Represents a reference to an interface type.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum InterfaceTypeRef {
/// The reference is to a primitive type.
Primitive(PrimitiveInterfaceType),
/// The reference is to a type index.
///
/// The type index must be to an interface type.
Type(u32),
}
impl InterfaceTypeRef {
fn encode(&self, bytes: &mut Vec<u8>) {
match self {
Self::Primitive(ty) => ty.encode(bytes),
Self::Type(index) => bytes.extend(encoders::s33(*index as i64)),
}
}
}
impl From<PrimitiveInterfaceType> for InterfaceTypeRef {
fn from(ty: PrimitiveInterfaceType) -> Self {
Self::Primitive(ty)
}
}
/// Used for encoding interface types.
#[derive(Debug)]
pub struct InterfaceTypeEncoder<'a>(&'a mut Vec<u8>);
impl InterfaceTypeEncoder<'_> {
/// Define a primitive interface type.
pub fn primitive(self, ty: PrimitiveInterfaceType) {
ty.encode(self.0);
}
/// Define a record type.
pub fn record<'a, F, T>(self, fields: F)
where
F: IntoIterator<Item = (&'a str, T)>,
F::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let fields = fields.into_iter();
self.0.push(0x71);
self.0
.extend(encoders::u32(fields.len().try_into().unwrap()));
for (name, ty) in fields {
self.0.extend(encoders::str(name));
ty.into().encode(self.0);
}
}
/// Define a variant type.
pub fn variant<'a, C, T>(self, cases: C)
where
C: IntoIterator<Item = (&'a str, T, Option<u32>)>,
C::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let cases = cases.into_iter();
self.0.push(0x70);
self.0
.extend(encoders::u32(cases.len().try_into().unwrap()));
for (name, ty, default_to) in cases {
self.0.extend(encoders::str(name));
ty.into().encode(self.0);
if let Some(default) = default_to {
self.0.push(0x01);
self.0.extend(encoders::u32(default));
} else {
self.0.push(0x00);
}
}
}
/// Define a list type.
pub fn list(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x6f);
ty.into().encode(self.0);
}
/// Define a tuple type.
pub fn tuple<I, T>(self, types: I)
where
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let types = types.into_iter();
self.0.push(0x6E);
self.0
.extend(encoders::u32(types.len().try_into().unwrap()));
for ty in types {
ty.into().encode(self.0);
}
}
/// Define a flags type.
pub fn flags<'a, I>(self, names: I)
where
I: IntoIterator<Item = &'a str>,
I::IntoIter: ExactSizeIterator,
{
let names = names.into_iter();
self.0.push(0x6D);
self.0
.extend(encoders::u32(names.len().try_into().unwrap()));
for name in names {
self.0.extend(encoders::str(name));
}
}
/// Define an enum type.
pub fn enum_type<'a, I>(self, tags: I)
where
I: IntoIterator<Item = &'a str>,
I::IntoIter: ExactSizeIterator,
{
let tags = tags.into_iter();
self.0.push(0x6C);
self.0.extend(encoders::u32(tags.len().try_into().unwrap()));
for tag in tags {
self.0.extend(encoders::str(tag));
}
}
/// Define a union type.
pub fn union<I, T>(self, types: I)
where
I: IntoIterator<Item = T>,
I::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
let types = types.into_iter();
self.0.push(0x6B);
self.0
.extend(encoders::u32(types.len().try_into().unwrap()));
for ty in types {
ty.into().encode(self.0);
}
}
/// Define an option type.
pub fn option(self, ty: impl Into<InterfaceTypeRef>) {
self.0.push(0x6A);
ty.into().encode(self.0);
}
/// Define an expected type.
pub fn expected(self, ok: impl Into<InterfaceTypeRef>, error: impl Into<InterfaceTypeRef>) {
self.0.push(0x69);
ok.into().encode(self.0);
error.into().encode(self.0);
}
}
/// An encoder for the type section of WebAssembly components.
///
/// # Example
///
/// ```rust
/// use wasm_encoder::{Component, ComponentTypeSection, PrimitiveInterfaceType};
///
/// let mut types = ComponentTypeSection::new();
///
/// types.function(
/// [
/// (Some("a"), PrimitiveInterfaceType::String),
/// (Some("b"), PrimitiveInterfaceType::String)
/// ],
/// PrimitiveInterfaceType::String
/// );
///
/// let mut component = Component::new();
/// component.section(&types);
///
/// let bytes = component.finish();
/// ```
#[derive(Clone, Debug, Default)]
pub struct ComponentTypeSection {
bytes: Vec<u8>,
num_added: u32,
}
impl ComponentTypeSection {
/// Create a new component type section encoder.
pub fn new() -> Self {
Self::default()
}
/// The number of types in the section.
pub fn len(&self) -> u32 {
self.num_added
}
/// Determines if the section is empty.
pub fn is_empty(&self) -> bool {
self.num_added == 0
}
/// Encode a type into this section.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn ty(&mut self) -> TypeEncoder<'_> {
self.num_added += 1;
TypeEncoder(&mut self.bytes)
}
/// Define a module type in this type section.
pub fn module(&mut self, ty: &ModuleType) -> &mut Self {
self.ty().module(ty);
self
}
/// Define a component type in this type section.
pub fn component(&mut self, ty: &ComponentType) -> &mut Self {
self.ty().component(ty);
self
}
/// Define an instance type in this type section.
pub fn instance(&mut self, ty: &InstanceType) -> &mut Self {
self.ty().instance(ty);
self
}
/// Define a function type in this type section.
pub fn function<'a, P, T>(
&mut self,
params: P,
result: impl Into<InterfaceTypeRef>,
) -> &mut Self
where
P: IntoIterator<Item = (Option<&'a str>, T)>,
P::IntoIter: ExactSizeIterator,
T: Into<InterfaceTypeRef>,
{
self.ty().function(params, result);
self
}
/// Define a value type in this type section.
pub fn value(&mut self, ty: impl Into<InterfaceTypeRef>) -> &mut Self {
self.ty().value(ty);
self
}
/// Define an interface type in this type section.
///
/// The returned encoder must be finished before adding another type.
#[must_use = "the encoder must be used to encode the type"]
pub fn interface_type(&mut self) -> InterfaceTypeEncoder<'_> {
self.ty().interface_type()
}
}
impl ComponentSection for ComponentTypeSection {
fn id(&self) -> u8 {
ComponentSectionId::Type.into()
}
fn encode<S>(&self, sink: &mut S)
where
S: Extend<u8>,
{
let num_added = encoders::u32(self.num_added);
let n = num_added.len();
sink.extend(
encoders::u32(u32::try_from(n + self.bytes.len()).unwrap())
.chain(num_added)
.chain(self.bytes.iter().copied()),
);
}
}
|
<?php declare(strict_types=1);
namespace Lengow\Connector\Storefront\Controller;
use Shopware\Core\Framework\Routing\Annotation\RouteScope;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Lengow\Connector\Service\LengowAccess;
use Lengow\Connector\Service\LengowConfiguration;
use Lengow\Connector\Service\LengowExport;
use Lengow\Connector\Service\LengowLog;
use Lengow\Connector\Service\LengowTranslation;
/**
* Class LengowExportController
* @package Lengow\Connector\Storefront\Controller
* @RouteScope(scopes={"storefront"})
*/
class LengowExportController extends LengowAbstractFrontController
{
/**
* @var LengowExport lengow export service
*/
private $lengowExport;
/**
* LengowExportController constructor
*
* @param LengowAccess $lengowAccess lengow access service
* @param LengowConfiguration $lengowConfiguration lengow configuration service
* @param LengowLog $lengowLog lengow log service
* @param LengowExport $lengowExport lengow export service
*/
public function __construct(
LengowAccess $lengowAccess,
LengowConfiguration $lengowConfiguration,
LengowLog $lengowLog,
LengowExport $lengowExport
)
{
parent::__construct($lengowAccess, $lengowConfiguration, $lengowLog);
$this->lengowExport = $lengowExport;
}
/**
* Export Process
*
* @param Request $request Http request
*
* @Route("/lengow/export", name="frontend.lengow.export", methods={"GET"})
*
* @return Response
*/
public function export(Request $request): Response
{
$salesChannelName = $this->getSalesChannelName($request);
if ($salesChannelName === null) {
$errorMessage = $this->lengowLog->decodeMessage(
'log.export.specify_sales_channel',
LengowTranslation::DEFAULT_ISO_CODE
);
return new Response($errorMessage, Response::HTTP_BAD_REQUEST);
}
$accessErrorMessage = $this->checkAccess($request, true);
if ($accessErrorMessage !== null) {
return new Response($accessErrorMessage, Response::HTTP_FORBIDDEN);
}
$exportArgs = $this->createGetArgArray($request);
$this->lengowExport->init($exportArgs);
if ($exportArgs[LengowExport::PARAM_GET_PARAMS]) {
return new Response($this->lengowExport->getExportParams());
}
if ($exportArgs[LengowExport::PARAM_MODE]) {
return new Response((string) $this->modeSize($exportArgs[LengowExport::PARAM_MODE]));
}
$this->lengowExport->exec();
return new Response();
}
/**
* Get all parameters from request
* List params
* string mode Number of products exported
* string format Format of exported files ('csv','yaml','xml','json')
* bool stream Stream file (1) or generate a file on server (0)
* int offset Offset of total product
* int limit Limit number of exported product
* bool selection Export product selection (1) or all products (0)
* bool out_of_stock Export out of stock product (1) Export only product in stock (0)
* bool inactive Export inactive product (1) or not (0)
* bool variation Export product variation (1) or not (0)
* string product_ids List of product id separate with comma (1,2,3)
* int sales_channel_id Export a specific store with store id
* string currency Convert prices with a specific currency
* string language Translate content with a specific language
* bool log_output See logs (1) or not (0)
* bool update_export_date Change last export date in data base (1) or not (0)
* bool get_params See export parameters and authorized values in json format (1) or not (0)
*
* @param Request $request Http request
*
* @return array
*/
protected function createGetArgArray(Request $request): array
{
return [
LengowExport::PARAM_MODE => $request->query->get(LengowExport::PARAM_MODE),
LengowExport::PARAM_FORMAT => $request->query->get(LengowExport::PARAM_FORMAT),
LengowExport::PARAM_STREAM => $request->query->get(LengowExport::PARAM_STREAM) !== null
? $request->query->get(LengowExport::PARAM_STREAM) === '1'
: null,
LengowExport::PARAM_OFFSET => $request->query->get(LengowExport::PARAM_OFFSET) !== null
? (int) $request->query->get(LengowExport::PARAM_OFFSET)
: null,
LengowExport::PARAM_LIMIT => $request->query->get(LengowExport::PARAM_LIMIT) !== null
? (int) $request->query->get(LengowExport::PARAM_LIMIT)
: null,
LengowExport::PARAM_SELECTION => $request->query->get(LengowExport::PARAM_SELECTION) !== null
? $request->query->get(LengowExport::PARAM_SELECTION) === '1'
: null,
LengowExport::PARAM_OUT_OF_STOCK => $request->query->get(LengowExport::PARAM_OUT_OF_STOCK) !== null
? $request->query->get(LengowExport::PARAM_OUT_OF_STOCK) === '1'
: null,
LengowExport::PARAM_VARIATION => $request->query->get(LengowExport::PARAM_VARIATION) !== null
? $request->query->get(LengowExport::PARAM_VARIATION) === '1'
: null,
LengowExport::PARAM_INACTIVE => $request->query->get(LengowExport::PARAM_INACTIVE) !== null
? $request->query->get(LengowExport::PARAM_INACTIVE) === '1'
: null,
LengowExport::PARAM_PRODUCT_IDS => $request->query->get(LengowExport::PARAM_PRODUCT_IDS),
LengowExport::PARAM_SALES_CHANNEL_ID => $request->query->get(LengowExport::PARAM_SALES_CHANNEL_ID),
LengowExport::PARAM_CURRENCY => $request->query->get(LengowExport::PARAM_CURRENCY),
LengowExport::PARAM_LANGUAGE => $request->query->get(LengowExport::PARAM_LANGUAGE ),
LengowExport::PARAM_LOG_OUTPUT => $request->query->get(LengowExport::PARAM_LOG_OUTPUT) !== null
? $request->query->get(LengowExport::PARAM_LOG_OUTPUT) === '1'
: null,
LengowExport::PARAM_UPDATE_EXPORT_DATE => $request->query->get(
LengowExport::PARAM_UPDATE_EXPORT_DATE
) !== null ? $request->query->get(LengowExport::PARAM_UPDATE_EXPORT_DATE) === '1' : null,
LengowExport::PARAM_GET_PARAMS => $request->query->get( LengowExport::PARAM_GET_PARAMS) !== null
? $request->query->get( LengowExport::PARAM_GET_PARAMS) === '1'
: null,
];
}
/**
* Get mode size
*
* @param string $mode size mode
*
* @return int
*/
protected function modeSize(string $mode): int
{
if ($mode === 'size') {
return $this->lengowExport->getTotalExportProduct();
}
if ($mode === 'total') {
return $this->lengowExport->getTotalProduct();
}
return 0;
}
}
|
using UnityEditor;
namespace UnityToCustomEngineExporter.Editor
{
public class BoolEditorProperty
{
private readonly string _name;
private readonly string _key;
public BoolEditorProperty(string key, string name, bool value)
{
_key = key;
_name = name;
Value = value;
}
public bool Value { get; set; }
public void Toggle()
{
Value = EditorGUILayout.Toggle(_name, Value);
}
public void Load()
{
if (EditorPrefs.HasKey(_key))
Value = EditorPrefs.GetBool(_key);
}
public void Save()
{
EditorPrefs.SetBool(_key, Value);
}
}
} |
package renetik.android.framework.store.property.value
import renetik.android.framework.json.data.CSJsonObject
import renetik.android.framework.store.CSStoreInterface
import kotlin.reflect.KClass
class CSJsonListValueStoreEventProperty<T : CSJsonObject>(
store: CSStoreInterface,
key: String,
val type: KClass<T>,
val default: List<T> = emptyList(),
listenStoreChanged: Boolean = false,
onApply: ((value: List<T>) -> Unit)? = null
) : CSValueStoreEventProperty<List<T>>(store, key, listenStoreChanged, onApply) {
override val defaultValue = default
override var _value = load()
override fun get(store: CSStoreInterface) = store.getJsonList(key, type) ?: default
override fun set(store: CSStoreInterface, value: List<T>) = store.set(key, value)
} |
@extends('layouts.lyttest')
@section('content')
<table>
<thead>
<th>ID</th>
</thead>
<tbody>
<tr><td>1</td></tr>
</tbody>
</table>
@endsection
@section('title','hijo') |
define void @Microsoft__Quantum__Testing__QIR__TestOperationCalls__body() #0 {
entry:
%doNothing = call %Callable* @Microsoft__Quantum__Testing__QIR__ReturnDoNothing__body(i64 1)
call void @__quantum__rt__capture_update_alias_count(%Callable* %doNothing, i32 1)
call void @__quantum__rt__callable_update_alias_count(%Callable* %doNothing, i32 1)
%aux = call %Qubit* @__quantum__rt__qubit_allocate()
call void @Microsoft__Quantum__Testing__QIR__CNOT__body(%Qubit* %aux, %Qubit* %aux)
%0 = call %Array* @__quantum__rt__array_create_1d(i32 8, i64 1)
%1 = call i8* @__quantum__rt__array_get_element_ptr_1d(%Array* %0, i64 0)
%2 = bitcast i8* %1 to %Qubit**
store %Qubit* %aux, %Qubit** %2, align 8
call void @Microsoft__Quantum__Testing__QIR__Empty__body(%Array* %0)
%3 = call %Array* @__quantum__rt__array_create_1d(i32 8, i64 1)
%4 = call i8* @__quantum__rt__array_get_element_ptr_1d(%Array* %3, i64 0)
%5 = bitcast i8* %4 to %Qubit**
store %Qubit* %aux, %Qubit** %5, align 8
%6 = call %Tuple* @__quantum__rt__tuple_create(i64 ptrtoint (i1** getelementptr (i1*, i1** null, i32 1) to i64))
%7 = bitcast %Tuple* %6 to { %Array* }*
%8 = getelementptr inbounds { %Array* }, { %Array* }* %7, i32 0, i32 0
store %Array* %3, %Array** %8, align 8
call void @__quantum__rt__callable_invoke(%Callable* %doNothing, %Tuple* %6, %Tuple* null)
%9 = call %Array* @__quantum__rt__array_create_1d(i32 8, i64 1)
%10 = call i8* @__quantum__rt__array_get_element_ptr_1d(%Array* %9, i64 0)
%11 = bitcast i8* %10 to %Qubit**
store %Qubit* %aux, %Qubit** %11, align 8
%12 = call %Array* @__quantum__rt__array_create_1d(i32 8, i64 1)
%13 = call i8* @__quantum__rt__array_get_element_ptr_1d(%Array* %12, i64 0)
%14 = bitcast i8* %13 to %Qubit**
store %Qubit* %aux, %Qubit** %14, align 8
call void @Microsoft__Quantum__Testing__QIR__DoNothing__ctl(%Array* %9, %Array* %12)
call void @__quantum__rt__array_update_reference_count(%Array* %0, i32 -1)
call void @__quantum__rt__array_update_reference_count(%Array* %3, i32 -1)
call void @__quantum__rt__tuple_update_reference_count(%Tuple* %6, i32 -1)
call void @__quantum__rt__array_update_reference_count(%Array* %9, i32 -1)
call void @__quantum__rt__array_update_reference_count(%Array* %12, i32 -1)
call void @__quantum__rt__qubit_release(%Qubit* %aux)
%15 = call %Callable* @__quantum__rt__callable_create([4 x void (%Tuple*, %Tuple*, %Tuple*)*]* @Microsoft__Quantum__Testing__QIR__ReturnDoNothing, [2 x void (%Tuple*, i32)*]* null, %Tuple* null)
call void @Microsoft__Quantum__Testing__QIR__TakesSingleTupleArg__body(i64 2, %Callable* %15)
call void @__quantum__rt__capture_update_alias_count(%Callable* %doNothing, i32 -1)
call void @__quantum__rt__callable_update_alias_count(%Callable* %doNothing, i32 -1)
call void @__quantum__rt__capture_update_reference_count(%Callable* %doNothing, i32 -1)
call void @__quantum__rt__callable_update_reference_count(%Callable* %doNothing, i32 -1)
call void @__quantum__rt__capture_update_reference_count(%Callable* %15, i32 -1)
call void @__quantum__rt__callable_update_reference_count(%Callable* %15, i32 -1)
ret void
}
|
from django.db import models
from django.conf import settings
# Create your models here.
class Catagories(models.Model):
title = models.CharField(max_length=150, db_index=True)
detail = models.TextField(max_length=5000, blank=True, null=True)
photo = models.ImageField(
upload_to='catagories_photos/', verbose_name=u"Add image (optional)",
blank=True, null=True
)
class Feedback(models.Model):
fullname = models.CharField(max_length=150, db_index=True)
email = models.CharField(max_length=150, blank=True, null=True)
question = models.CharField(max_length=150, blank=True, null=True)
class Partner(models.Model):
name = models.CharField(max_length=150, db_index=True)
email = models.CharField(max_length=150, db_index=True)
photo = models.ImageField(
upload_to='catagories_photos/', verbose_name=u"Add image (optional)",
blank=True, null=True
)
number = models.CharField(max_length=150, db_index=True)
facebook = models.CharField(max_length=150, db_index=True) |
### java-lib
This project module is used to generate a .module and .pom from
the `java-library` and `maven-publish` as an output guide.
`./gradlew -b java-lib/build.gradle.kts publish` |
require 'spec_helper'
RSpec.shared_examples :capybara_methods_proxy do
let(:session) { double(:session) }
before do
allow(Capybara).to receive(:current_session) { session }
allow(Howitzer).to receive(:driver) { driver_name }
allow(session).to receive(:current_url) { 'google.com' }
end
describe '#driver' do
it 'should proxy #driver method' do
expect(session).to receive(:driver).once
reciever.driver
end
end
describe '#text' do
it 'should proxy #text method' do
expect(session).to receive(:text).once
reciever.text
end
end
context 'when capybara session method' do
it 'should proxy method' do
expect(session).to receive(:visit).once
reciever.visit
end
end
context 'when capybara modal method' do
it 'should proxy method' do
expect(session).to receive(:dismiss_prompt).with(:some_text).once
reciever.dismiss_prompt(:some_text)
end
end
describe '#click_alert_box' do
subject { reciever.click_alert_box(flag_value) }
context 'when flag true and correct driver specified' do
let(:flag_value) { true }
let(:page) { double }
let(:alert) { double }
let(:driver) { double }
let(:browser) { double }
let(:switch_to) { double }
let(:driver_name) { 'selenium' }
it do
expect(session).to receive(:driver).ordered.and_return(driver)
expect(driver).to receive(:browser).ordered.and_return(browser)
expect(browser).to receive(:switch_to).ordered.and_return(switch_to)
expect(switch_to).to receive(:alert).ordered.and_return(alert)
expect(alert).to receive(:accept).once
subject
end
end
context 'when flag false and correct driver specified' do
let(:flag_value) { false }
let(:page) { double }
let(:alert) { double }
let(:driver) { double }
let(:browser) { double }
let(:switch_to) { double }
let(:driver_name) { 'selenium' }
it do
expect(session).to receive(:driver).ordered.and_return(driver)
expect(driver).to receive(:browser).ordered.and_return(browser)
expect(browser).to receive(:switch_to).ordered.and_return(switch_to)
expect(switch_to).to receive(:alert).ordered.and_return(alert)
expect(alert).to receive(:dismiss).once
subject
end
end
context 'when flag true and wrong driver specified' do
let(:flag_value) { true }
let(:page) { double }
let(:driver_name) { 'ff' }
it do
expect(session).to receive(:evaluate_script).with('window.confirm = function() { return true; }')
subject
end
end
context 'when flag false and wrong driver specified' do
let(:driver_name) { 'ff' }
let(:flag_value) { false }
let(:page) { double }
it do
expect(session).to receive(:evaluate_script).with('window.confirm = function() { return false; }')
subject
end
end
end
end
|
package kt
class KotlinCodeBuilder {
private val imports: MutableList<String> = mutableListOf()
private val classes: MutableList<KotlinClassBuilder> = mutableListOf()
fun imports(file: String) {
imports.add("import $file")
}
fun writeClass(name: String, superName: String? = null, fn: KotlinClassBuilder.() -> Unit) {
val builder = KotlinClassBuilder(name, superName)
fn(builder)
classes.add(builder)
}
override fun toString()
= "${imports.joinToString("\n")}\n\n" +
classes.joinToString("\n\n")
}
class KotlinClassBuilder(
private val name: String,
private val superName: String?
) {
private val constructorParameters: MutableList<String> = mutableListOf()
private val fields: MutableList<String> = mutableListOf()
private val methods: MutableList<String> = mutableListOf()
fun parameter(name: String, type: String, pub: Boolean = false) {
constructorParameters.add(("private ".takeIf { !pub } ?: "") + "val $name: $type")
}
fun field(name: String, value: String, pub: Boolean = false) {
fields.add(("private ".takeIf { !pub } ?: "") + "val $name = $value")
}
fun mutableField(name: String, value: String) {
fields.add("private var $name = $value")
}
fun method(name: String, returns: String, vararg parameters: Pair<String, String>,
fn: KotlinBlockBuilder.() -> Unit) {
val generator = KotlinBlockBuilder()
fn(generator)
methods.add("fun $name(${parameters.joinToString { (n, t) -> "$n: $t" }}): $returns {" +
"\n\t${generator.toString().replace("\n", "\n\t")}\n}")
}
fun overrideMethod(name: String, returns: String, vararg parameters: Pair<String, String>,
fn: KotlinBlockBuilder.() -> Unit) {
val generator = KotlinBlockBuilder()
fn(generator)
methods.add("override fun $name(${parameters.joinToString { (n, t) -> "$n: $t" }}): $returns {" +
"\n\t${generator.toString().replace("\n", "\n\t")}\n}")
}
override fun toString(): String {
val paramsString = constructorParameters.joinToString(", ")
val fieldsString = fields.joinToString("\n\t")
val methodsString = methods.joinToString("\n\n").replace("\n", "\n\t")
return "class $name($paramsString)${superName?.prependIndent(": ") ?: ""} {\n" +
"\t" + fieldsString + "\n\n" +
"\t" + methodsString + "\n" +
"}"
}
}
class KotlinBlockBuilder {
private val output = StringBuilder()
fun writeLine(s: String) {
output.append("$s\n")
}
fun block(s: String, fn: KotlinBlockBuilder.() -> Unit) {
val generator = KotlinBlockBuilder()
fn(generator)
output.append(s)
output.append(" {\n\t${generator
.toString()
.replace("\n", "\n\t")}\n}\n\n")
}
override fun toString() = output.toString().trim()
}
|
#include <iostream>
#include <tool/Input.hpp>
namespace tool {
void InputState::reset() {
this->released = false;
this->pressed = false;
}
std::ostream &operator<<(std::ostream &os, const InputState &inputState) {
os << "Pressed: " << (inputState.pressed ? "true" : "false") << std::endl;
os << "Released: " << (inputState.released ? "true" : "false") << std::endl;
os << "Held: " << (inputState.held ? "true" : "false") << std::endl;
return os;
}
void InputState::update(uint8_t state) {
if (state == SDL_PRESSED) {
this->pressed = true;
this->held = true;
}
else if (state == SDL_RELEASED) {
this->released = true;
this->held = false;
}
}
void Input::handleMouseMotion(const SDL_MouseMotionEvent &event) {
this->mouseMotion = { event.xrel, event.yrel };
}
void Input::handleMouseButton(const SDL_MouseButtonEvent &event) {
if (!this->buttons.count(event.button)) {
this->buttons.emplace(event.button, InputState());
}
this->buttons[event.button].update(event.state);
}
void Input::handleMouseWheel(const SDL_MouseWheelEvent &event) {
if (event.direction == SDL_MOUSEWHEEL_NORMAL) {
this->wheelMotion = { event.x, event.y };
}
else {
this->wheelMotion = { -event.x, -event.y };
}
}
void Input::handleKeyboard(const SDL_KeyboardEvent &event) {
if (!this->keys.count(event.keysym.scancode)) {
this->keys.emplace(event.keysym.scancode, InputState());
}
this->keys[event.keysym.scancode].update(event.state);
}
void Input::handleWindowEvent(SDL_WindowEvent event) {
this->end = event.event == SDL_WINDOWEVENT_CLOSE;
}
void Input::reset() {
for (auto &entry: this->buttons) {
entry.second.reset();
}
for (auto &entry: this->keys) {
entry.second.reset();
}
this->mouseMotion = { 0, 0 };
this->wheelMotion = { 0, 0 };
}
void Input::handleInput(const SDL_Event &event) {
switch (event.type) {
case SDL_MOUSEMOTION:
this->handleMouseMotion(event.motion);
break;
case SDL_MOUSEBUTTONDOWN:
case SDL_MOUSEBUTTONUP:
this->handleMouseButton(event.button);
break;
case SDL_MOUSEWHEEL:
this->handleMouseWheel(event.wheel);
break;
case SDL_KEYDOWN:
case SDL_KEYUP:
this->handleKeyboard(event.key);
break;
case SDL_WINDOWEVENT:
this->handleWindowEvent(event.window);
break;
case SDL_QUIT:
this->end = true;
break;
}
}
GLboolean Input::isPressedButton(uint8_t button) const {
return this->buttons.count(button) && this->buttons.at(button).pressed;
}
GLboolean Input::isPressedKey(SDL_Scancode key) const {
return this->keys.count(key) && this->keys.at(key).pressed;
}
GLboolean Input::isReleasedButton(uint8_t button) const {
return this->buttons.count(button) && this->buttons.at(button).released;
}
GLboolean Input::isReleasedKey(SDL_Scancode key) const {
return this->keys.count(key) && this->keys.at(key).released;
}
GLboolean Input::isHeldButton(uint8_t button) const {
return this->buttons.count(button) && this->buttons.at(button).held;
}
GLboolean Input::isHeldKey(SDL_Scancode key) const {
return this->keys.count(key) && this->keys.at(key).held;
}
InputState Input::getInputStateButton(uint8_t button) const {
if (!this->buttons.count(button)) {
return InputState();
}
return this->buttons.at(button);
}
InputState Input::getInputStateKey(SDL_Scancode key) const {
if (!this->keys.count(key)) {
return InputState();
}
return this->keys.at(key);
}
GLboolean Input::ended() const {
return this->end;
}
glm::vec2 Input::getRelativeMotion() const {
return this->mouseMotion;
}
glm::vec2 Input::getWheelMotion() const {
return this->wheelMotion;
}
}
|
export const keys = Object.freeze({
APP_NAME: 'APP_NAME',
FLAGS: 'FLAGS',
RADIX_API_ENVIRONMENT: 'RADIX_API_ENVIRONMENT',
RADIX_CLUSTER_BASE: 'RADIX_CLUSTER_BASE',
RADIX_ENVIRONMENT: 'RADIX_ENVIRONMENT',
RADIX_CLUSTER_NAME: 'RADIX_CLUSTER_NAME',
RADIX_CLUSTER_TYPE: 'RADIX_CLUSTER_TYPE',
});
export const keySources = Object.freeze({
RADIX_CONFIG_BODY: 'RADIX_CONFIG_BODY',
RADIX_CONFIG_JSON: 'RADIX_CONFIG_JSON',
RADIX_CONFIG_URL: 'RADIX_CONFIG_URL',
});
|
-- 天气表
DROP TABLE IF EXISTS `weather_history`;
CREATE TABLE `weather_history` (
`id` int NOT NULL,
`SORT_NUM` int DEFAULT NULL,
`LAST_UPD_TIME` datetime DEFAULT NULL,
`LAST_UPD_USER` int DEFAULT NULL,
`CREAT_TIME` datetime DEFAULT NULL,
`CREATOR` int DEFAULT NULL COMMENT '上报人',
`LAST_UPD_IP` varchar(100) DEFAULT NULL,
`WSDVER` int DEFAULT NULL,
`crt_time` datetime(0) NULL DEFAULT NULL COMMENT '时间',
`weatherdes` varchar(255) COMMENT '天气描述',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
|
#!/bin/bash
#SBATCH --gres=gpu:1
#SBATCH -c 6
#SBATCH --mail-type=FAIL,END
if [[ ! -z ${SLURM_JOBID+z} ]];
then
echo "Setting up SLURM env"
setcuda 10.2
conda activate python37
else
echo "Not a SLURM job"
fi
set -o errexit
set -o pipefail
set -o nounset
CONFIG_FILE=${1-./configs/config.env}
set -o allexport
source $CONFIG_FILE
set +o allexport
echo ">> Config: "
cat $CONFIG_FILE | tee -a $LOG_FILE
TRAIN_FOLDER=${TRAIN_FOLDER-./data/semmacape/416_empty/}
TEST_FOLDER=${TEST_FOLDER-/share/projects/semmacape/Data_Semmacape_2/416_non_empty_filtered/}
echo "Starting script"
echo $(date)
python examples/main.py \
--train_folder $TRAIN_FOLDER \
--test_folder $TEST_FOLDER \
--train_limit $TRAIN_LIMIT \
--test_limit $TEST_LIMIT \
--params_path $PARAMS_PATH \
--oe_folder ./data/coco/ \
--oe_frequency ${OE_FREQUENCY-2} \
--n_epochs ${N_EPOCHS-0} \
--ae_n_epochs ${AE_N_EPOCHS-0} \
--pretrain \
--n_svdds ${N_SVDDS-1} \
--iou_threshold $IOU_THRESHOLD \
--min_area $MIN_AREA \
--use_nms \
${EXTRA_FLAGS-} \
| tee -a $LOG_FILE
echo $(date)
|
import {Workbook} from 'exceljs';
import {ExportDataInterface} from '../types/common/export-data.interface';
export interface FillerInterface {
fill(workbook: Workbook, data: ExportDataInterface): Promise<void>;
}
|
CREATE TABLE v3_0_ballot_candidates (ballot_id INTEGER,
candidate_id INTEGER);
|
---
published: true
layout: post
date: '2017-03-21 11:25 +0100'
title: geany-plugin-overview
tags: linux
---
In Stretch
sudo apt install geany-plugin-overview

Stretch compared to Jessie
[https://forums.bunsenlabs.org/viewtopic.php?id=3344](https://forums.bunsenlabs.org/viewtopic.php?id=3344)
|
import 'dart:async';
import 'dart:math';
import 'package:aliyun_oss_dart_sdk/src/client_exception.dart';
import 'package:aliyun_oss_dart_sdk/src/common/oss_log.dart';
import 'package:aliyun_oss_dart_sdk/src/common/utils/extension_util.dart';
import 'package:aliyun_oss_dart_sdk/src/exception/lib_exception.dart';
import 'package:aliyun_oss_dart_sdk/src/service_exception.dart';
import 'oss_retry_type.dart';
class OSSRetryHandler {
int maxRetryCount = 2;
OSSRetryHandler(int maxRetryCount) {
setMaxRetryCount(maxRetryCount);
}
void setMaxRetryCount(int maxRetryCount) {
this.maxRetryCount = maxRetryCount;
}
/// TODO:与异常类型有关,需要重点重写
OSSRetryType shouldRetry(Exception e, int currentRetryCount) {
if (currentRetryCount >= maxRetryCount) {
return OSSRetryType.OSSRetryTypeShouldNotRetry;
}
if (e is OSSClientException) {
if (e.isCanceledException()) {
return OSSRetryType.OSSRetryTypeShouldNotRetry;
}
Exception localException = e;
if (localException is InterruptedException &&
(localException is! TimeoutException)) {
OSSLog.logError("[shouldRetry] - is interrupted!");
return OSSRetryType.OSSRetryTypeShouldNotRetry;
} else if (localException is ArgumentError) {
return OSSRetryType.OSSRetryTypeShouldNotRetry;
}
OSSLog.logDebug("shouldRetry - " + e.toString());
return OSSRetryType.OSSRetryTypeShouldRetry;
} else if (e is OSSServiceException) {
if (e.errorCode.equalsIgnoreCase("RequestTimeTooSkewed")) {
return OSSRetryType.OSSRetryTypeShouldFixedTimeSkewedAndRetry;
} else if (e.statusCode >= 500) {
return OSSRetryType.OSSRetryTypeShouldRetry;
} else {
return OSSRetryType.OSSRetryTypeShouldNotRetry;
}
} else {
return OSSRetryType.OSSRetryTypeShouldNotRetry;
}
}
int timeInterval(int currentRetryCount, OSSRetryType retryType) {
switch (retryType) {
case OSSRetryType.OSSRetryTypeShouldRetry:
return (pow(2, currentRetryCount) * 200).toInt();
default:
return 0;
}
}
}
|
import test from 'ava';
import chalk from 'chalk';
import { info, erro, warn } from './log';
test('info', t => {
t.is(info('teste'), `${chalk.bgBlue(' INFO ')}: teste`);
});
test('info without args', t => {
t.is(info(), `${chalk.bgBlue(' INFO ')}: `);
});
test('erro', t => {
t.is(erro('teste'), `${chalk.bgRed(' ERRO ')}: teste`);
});
test('erro without args', t => {
t.is(erro(), `${chalk.bgRed(' ERRO ')}: `);
});
test('warn', t => {
t.is(warn('teste'), `${chalk.keyword('orange').inverse(' WARN ')}: teste`);
});
test('warn without args', t => {
t.is(warn(), `${chalk.keyword('orange').inverse(' WARN ')}: `);
}); |
import * as fromActions from './chain-list-page.actions';
import * as fromReducers from './chain-list-page.reducers';
describe('chain-list-page: reducers', () => {
it('should return with the initial state.', () => {
expect(fromReducers.reducer(undefined, new fromActions.NoopChainAction())).toBe(fromReducers.initialState);
});
it('should return with the previous state', () => {
const previousState = { items: [], loading: false, error: ''};
expect(fromReducers.reducer(previousState, new fromActions.NoopChainAction())).toBe(previousState);
});
it('should set loading true', () => {
const previousState = { items: [], loading: false, error: ''};
expect(fromReducers.reducer(previousState, new fromActions.LoadChainsAction())
).toEqual({
...previousState,
loading: true,
});
});
it('should set the error message and set loading false', () => {
const previousState = { items: [], loading: true, error: ''};
expect(fromReducers.reducer(previousState, new fromActions.LoadChainsFailAction({
message: 'Something went wrong.'
}))
).toEqual({
...previousState,
loading: false,
error: 'Something went wrong.'
});
});
it('should set the items and set loading false', () => {
const items = [{
id: 'id1',
name: 'Chain 1'
}, {
id: 'id2',
name: 'Chain 2'
}];
const previousState = { items, loading: true, error: ''};
expect(
fromReducers.reducer(previousState, new fromActions.LoadChainsSuccessAction(items))
).toEqual({
...previousState,
loading: false,
items
});
});
it('should delete the chain', () => {
const itemList = [{
id: 'id1',
name: 'Chain 1'
}, {
id: 'id2',
name: 'Chain 2'
}];
const previousState = { items: itemList, loading: true, error: ''};
expect(
fromReducers.reducer(previousState, new fromActions.DeleteChainSuccessAction('id2'))
).toEqual({
...previousState,
loading: false,
items: [{
id: 'id1',
name: 'Chain 1'
}]
});
});
it('should set the delete error message and set loading false', () => {
const previousState = { items: [], loading: true, error: ''};
expect(fromReducers.reducer(previousState, new fromActions.DeleteChainFailAction({
message: 'Something went wrong.'
}))
).toEqual({
...previousState,
loading: false,
error: 'Something went wrong.'
});
});
it('should create a chain', () => {
const itemList = [{
id: 'id1',
name: 'Chain 1'
}];
const previousState = { items: itemList, loading: true, error: ''};
expect(
fromReducers.reducer(previousState, new fromActions.CreateChainSuccessAction({id: 'id2', name: 'Chain 2'}))
).toEqual({
...previousState,
loading: false,
items: [{
id: 'id1',
name: 'Chain 1'
}, {
id: 'id2',
name: 'Chain 2'
}]
});
});
it('should set the create error message and set loading false', () => {
const previousState = { items: [], loading: true, error: ''};
expect(fromReducers.reducer(previousState, new fromActions.CreateChainFailAction({
message: 'Something went wrong.'
}))
).toEqual({
...previousState,
loading: false,
error: 'Something went wrong.'
});
});
});
describe('chain-list-page: selectors', () => {
it('should return with substate', () => {
const expected = {
items: [],
loading: false,
error: ''
};
const state = { 'chain-list-page': expected };
expect(fromReducers.getChainListPageState(state)).toEqual(expected);
});
it('should return with types', () => {
const expected = [];
const state = {
'chain-list-page': {
items: expected
}
};
expect(fromReducers.getChains(state)).toEqual(expected);
});
});
|
module Jekyll
class Site
# Introduce complement to {::Jekyll::Site#find_converter_instance} for generators.
def find_generator_instance type
generators.find {|candidate| type === candidate } || (raise %(No Generators found for #{type}))
end
end
end unless Jekyll::Site.method_defined? :find_generator_instance
|
using SnapsLibrary;
using Newtonsoft.Json;
class Ch10_07_PizzaPicker
{
class PizzaDetails
{
public int CheeseAndTomatoCount = 0;
public int pepperoniCount = 0;
public int chickenCount = 0;
public int vegetarianCount = 0;
}
public void StartProgram()
{
string SAVE_NAME = "pizzaChoice.json";
PizzaDetails pizzaDetails;
string json = SnapsEngine.FetchStringFromRoamingStorage(SAVE_NAME);
if (json == null)
{
// No stored pizza details - make an empty one
pizzaDetails = new PizzaDetails();
}
else
{
// Read the pizza counts from last time
pizzaDetails = JsonConvert.DeserializeObject<PizzaDetails>(json);
}
SnapsEngine.SetTitleString("Select Pizza");
// repeatedly ask for pizza selections
while (true)
{
string toppingChoice = SnapsEngine.SelectFrom5Buttons("Cheese and Tomato",
"Pepperoni",
"Chicken", "Vegetarian",
"Show Totals");
if (toppingChoice == "Cheese and Tomato")
pizzaDetails.CheeseAndTomatoCount = pizzaDetails.CheeseAndTomatoCount + 1;
if (toppingChoice == "Pepperoni")
pizzaDetails.pepperoniCount = pizzaDetails.pepperoniCount + 1;
if (toppingChoice == "Chicken")
pizzaDetails.chickenCount = pizzaDetails.chickenCount + 1;
if (toppingChoice == "Vegetarian")
pizzaDetails.vegetarianCount = pizzaDetails.vegetarianCount + 1;
if (toppingChoice == "Show Totals")
{
string result = "Order Totals:\n" +
pizzaDetails.CheeseAndTomatoCount.ToString() + " Cheese and Tomato\n" +
pizzaDetails.pepperoniCount.ToString() + " Pepperoni\n" +
pizzaDetails.chickenCount.ToString() + " Chicken\n" +
pizzaDetails.vegetarianCount.ToString() + " Vegetarian\n";
SnapsEngine.DisplayString(result);
string reply = SnapsEngine.SelectFrom2Buttons("Done", "Reset");
if (reply == "Reset")
{
pizzaDetails.CheeseAndTomatoCount = 0;
pizzaDetails.pepperoniCount = 0;
pizzaDetails.chickenCount = 0;
pizzaDetails.vegetarianCount = 0;
}
// clear the total display from the screen
SnapsEngine.DisplayString("");
}
json = JsonConvert.SerializeObject(pizzaDetails);
SnapsEngine.SaveStringToRoamingStorage(itemName: SAVE_NAME, itemValue: json);
}
}
} |
---
title: Flashlight
---
# Flashlight
The flashlight is a simple item that is simply used to light up a small area. It never runs out of battery.
**It is not a light source for specific types of ghosts that care about lights.**
## During the hunt
When the hunt begins, your flashlight will flicker.
## Tips
- turn it off during the hunt to hide from the ghost better
- place it on your hips to always light up the area
|
namespace Miruken.AspNetCore.SignalR.Test.Site
{
using Api;
using Callback;
using Microsoft.Extensions.Logging;
using Tests;
public class PlayerListener : Handler
{
private readonly ILogger _logger;
public PlayerListener(ILogger logger)
{
_logger = logger;
}
[Handles]
public void Added(
PlayerResponse response,
HubConnectionInfo connectionInfo,
IHandler composer)
{
var player = response.Player;
_logger.LogInformation($"Player {player.Id} created ({player.Name})");
if (player.Name == "Pele")
composer.DisconnectHub(connectionInfo.Url);
}
[Handles]
public void Reconnecting(HubReconnecting reconnecting)
{
var connectionInfo = reconnecting.ConnectionInfo;
_logger.LogInformation($"Client {connectionInfo.Id} reconnecting to {connectionInfo.Url}");
}
[Handles]
public void Reconnected(HubReconnected reconnected)
{
var connectionInfo = reconnected.ConnectionInfo;
_logger.LogInformation($"Client {connectionInfo.Id} reconnected to {connectionInfo.Url}");
}
[Handles]
public void Closed(HubClosed closed)
{
var connectionInfo = closed.ConnectionInfo;
_logger.LogInformation($"Client disconnected from {connectionInfo.Url}");
}
}
}
|
import mediaGallery from 'media-gallery';
describe('In media gallery', () => {
beforeEach((done) => {
mediaGallery.clear().then(done);
});
describe('When I call "count" after saving 2 files', () => {
let count;
beforeEach((done) => {
mediaGallery.save('somefile1.mp3', createRandomArrayBuffer(32))
.then(() => mediaGallery.save('somefile2.mp3', createRandomArrayBuffer(32)))
.then(() => mediaGallery.count())
.then(c => count = c)
.then(done);
});
it('should return 2', () => {
expect(count).toBe(2);
});
});
describe('When I call "clear" after saving 2 files', () => {
let count;
beforeEach((done) => {
mediaGallery.save('somefile1.mp3', createRandomArrayBuffer(32))
.then(() => mediaGallery.save('somefile2.mp3', createRandomArrayBuffer(32)))
.then(() => mediaGallery.clear())
.then(() => mediaGallery.count())
.then(c => count = c)
.then(done);
});
it('should have a count of 0', () => {
expect(count).toBe(0);
});
});
describe('When I call "save" with a file name and reload the buffer', () => {
let inputArrayBuffer = createRandomArrayBuffer(32);
let outputArrayBuffer;
beforeEach((done) => {
mediaGallery.save('somefile.mp3', inputArrayBuffer)
.then(() => mediaGallery.load('somefile.mp3'))
.then(buffer => outputArrayBuffer = buffer)
.then(done);
});
it('should return the same data', () => {
expect(areEqual(inputArrayBuffer, outputArrayBuffer)).toBe(true);
});
});
describe('When I call "load" with a file name that does not exist', () => {
let arrayBuffer;
beforeEach((done) => {
mediaGallery.load('somefile.mp3')
.then(buffer => arrayBuffer = buffer)
.then(done);
});
it('should return undefined', () => {
expect(arrayBuffer).toBeUndefined();
});
});
});
///// Helper functions
function createRandomArrayBuffer(length) {
const buffer = new ArrayBuffer(length);
const array = new Int8Array(buffer);
for (let i = 0; i < length; i += 1) {
array[i] = Math.floor(Math.random() * 256);
}
return buffer;
}
function areEqual(arrayBuffer1, arrayBuffer2) {
if (arrayBuffer1.byteLength !== arrayBuffer2.byteLength) {
return false;
}
let dv1 = new Int8Array(arrayBuffer1);
let dv2 = new Int8Array(arrayBuffer2);
for (let i = 0; i !== arrayBuffer1.byteLength; i += 1) {
if (dv1[i] !== dv2[i]) {
return false;
}
}
return true;
}
|
require ('./lib/config.js');
module.exports = {
checker: require ('./lib/checker.js'),
sendMail: require ('./lib/helper.js').sendMail
}
|
package creek
import (
"compress/gzip"
"fmt"
"io"
"os"
"path/filepath"
"sync"
"time"
)
// Logger defines our custom Logger type.
type Logger struct {
Filename string // the file to log to
MaxSize int64 // max file size in MB
file *os.File
size int64
mu sync.Mutex
}
// New creates a new creek logger.
func New(filename string, maxSize int64) *Logger {
return &Logger{
Filename: filename,
MaxSize: maxSize,
}
}
// Write satisfies the io.Writer interface.
func (l *Logger) Write(p []byte) (n int, err error) {
// Lock the mutex.
l.mu.Lock()
defer l.mu.Unlock()
writeLen := int64(len(p))
// If the data to write exceeds our max file size, error out.
if writeLen > l.maxSize() {
return 0, fmt.Errorf("Write length %d exceeds maximum file size %d", writeLen, l.maxSize())
}
// Get current log file.
if l.file == nil {
if err = l.openExistingOrNew(len(p)); err != nil {
return 0, err
}
}
// If writing the new data will go over our max file size, rotate the log file.
if l.size+writeLen > l.maxSize() {
if err := l.rotate(); err != nil {
return 0, err
}
}
// Write to the log file.
n, err = l.file.Write(p)
l.size += int64(n)
return n, err
}
// close closes the log file if it's open.
func (l *Logger) close() error {
if l.file == nil {
return nil
}
err := l.file.Close()
l.file = nil
return err
}
// rotate rotates the log file.
func (l *Logger) rotate() error {
// Close the current log file.
if err := l.close(); err != nil {
return err
}
// Open a new log file.
if err := l.openNew(); err != nil {
return err
}
return nil
}
// compressLogFile compresses a log file.
func compressLogFile(name string) {
// Open the given log file for reading.
file, err := os.Open(name)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not open log file for compression: %s\n", err)
return
}
defer file.Close()
// Create a file to save the compressed data to.
filegz, err := os.OpenFile(name+".gz", os.O_CREATE|os.O_WRONLY, os.FileMode(0644))
if err != nil {
fmt.Fprintf(os.Stderr, "Could not create file for compression: %s\n", err)
return
}
defer filegz.Close()
// Create new gzip Writer.
gz := gzip.NewWriter(filegz)
defer gz.Close()
// Compress the log file.
if _, err = io.Copy(gz, file); err != nil {
fmt.Fprintf(os.Stderr, "Error compressing log file: %s\n", err)
return
}
// Remove the old file.
if err = os.Remove(name); err != nil {
fmt.Fprintf(os.Stderr, "Error removing old log file: %s\n", err)
return
}
return
}
// openExistingOrNew tries to open the existing log file.
func (l *Logger) openExistingOrNew(writeLen int) error {
// Get or create the log file.
info, err := os.Stat(l.Filename)
if os.IsNotExist(err) {
return l.openNew()
}
if err != nil {
return fmt.Errorf("Error getting log file info: %s", err)
}
// See if we should rotate the log file.
if info.Size()+int64(writeLen) >= l.maxSize() {
return l.rotate()
}
// Try to open the current log file.
file, err := os.OpenFile(l.Filename, os.O_APPEND|os.O_WRONLY, os.FileMode(0644))
if err != nil {
// If we fail to open, just ignore and open a new one.
return l.openNew()
}
l.file = file
l.size = info.Size()
return nil
}
// openNew tries to open a new log file, creating a backup if one
// already exists.
func (l *Logger) openNew() error {
// Create the log file directories.
err := os.MkdirAll(filepath.Dir(l.Filename), 0744)
if err != nil {
return fmt.Errorf("Could not create directories for new log file: %s", err)
}
mode := os.FileMode(0644)
info, err := os.Stat(l.Filename)
if err == nil {
// Copy mode from the old log file.
mode = info.Mode()
// Rename existing log file as backup.
backup := backupName(l.Filename)
if err := os.Rename(l.Filename, backup); err != nil {
return fmt.Errorf("Could not rename log file: %s", err)
}
// Compress the backup log file.
go compressLogFile(backup)
}
file, err := os.OpenFile(l.Filename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, mode)
if err != nil {
return fmt.Errorf("Could not open new log file: %s", err)
}
// Update the instance file info.
l.file = file
l.size = 0
return nil
}
// backupName returns a new backup name for a log file.
func backupName(name string) string {
// Get the parts of the filepath.
dir := filepath.Dir(name)
filename := filepath.Base(name)
ext := filepath.Ext(filename)
prefix := filename[:len(filename)-len(ext)]
// Get a timestamp in RFC3339 format (2006-01-02T15:04:05Z07:00).
timestamp := time.Now().UTC().Format(time.RFC3339)
// Return the full path and filename with timestamp.
return filepath.Join(dir, fmt.Sprintf("%s-%s%s", prefix, timestamp, ext))
}
// maxSize returns the maximum size in bytes of the log file before
// rolling over.
func (l *Logger) maxSize() int64 {
megabyte := int64(1024 * 1024)
return l.MaxSize * megabyte
}
|
// $Id$
//! \file
//! minimal decoders
/*************** base primitives ***********************/
//! base64
ssize_t codecs_from_base64(const uint8_t* data,size_t sz,uint8_t** ret,uint32_t* err);
//! quoted-printable
ssize_t codecs_from_qp(const uint8_t* data,size_t sz,uint8_t** ret,uint32_t* err);
//! uuencoded /TODO!
ssize_t codecs_from_uu(const uint8_t* data,size_t sz,uint8_t** ret,uint32_t* err);
//! convert charset
ssize_t codecs_to_utf8(const uint8_t* data,size_t sz,const char* from,uint8_t** ret,uint32_t* err);
//! direct supply of codec type and charset
ssize_t codecs_from(int32_t enc,const char* cs,const uint8_t* data,size_t sz,uint8_t** ret);
//! parse forms as =?koi8-r?q?something?=
ssize_t codecs_word(const uint8_t* data,size_t sz,uint8_t** ret);
|
---
layout: post
title: 肉食者鄙,未能远谋
---
> 我什么都不知道。——川
在伟大的党中央作出关闭著名违法网站Gmail这一重大决定之前,我几乎就要成为一只自干五了。
不久,在人民群众的大力拥护下,Wikipedia, Google Scholar, Google Code, GitHub等海外反华势力的主要据点相继被英明的党中央铲除了。
最神奇的是,Python的官网和Ruby Gems也从中国消失了?
据我推测, Python被封杀是因为驻华分裂势力使用的通讯工具GoAgent是用Python编写的? 唉,真是醉了。
后来, Wikipedia, Github和Python的官网都相继解封了。
据传闻, 是因为Github对我朝软件产业的发展造成了一定影响?
不过, 我在内网还是连我自己的电子邮件都看不了, 体现了社会主义伟大的优越性。
好吧,没关系。大不了不用Gmail了,邮箱而已,我们有网易163。
谷歌学术搜索也不用了,我们有CNKI和万方。
用不着Facebook, 我们有QQ空间。
不用Twitter, 我们有新浪微博。
没有Youtube, 我们还有Youku。
只要你不介意花Q币买些难看的黄钻壁纸,
不介意你在一年当中某几天发的微博其实只有你自己能看到,
不介意半分钟的视频里两分钟的广告。
那么,在新中国成立65周年之际,我们可以有把握地说,中华民族伟大复兴圆梦的时间越来越近了。
能如此接近中华民族伟大复兴目标,是在中国共产党领导下、在全国各族人民努力下,通过走中国特色社会主义道路实现的,彰显的是马克思主义的真理性、社会主义的优越性。这是中国共产党人对于拥有五千年历史的中国所作出的一大突出贡献,将永远彪炳史册。
今天,对于中国道路、中国模式、中国崛起、中国力量等,不仅中国人在广泛谈论,国际社会也是津津乐道。面对未来,我们更应坚定中国特色社会主义道路自信、理论自信、制度自信。
回望近代历史上中华民族屈辱的历史,展望未来“两个一百年”时刻的光明前景,作为这个国家的一分子,我们应该对新中国成立65年来取得的伟大成就深感欣慰,对未来的发展前景满怀期待。
我们每一个人都应为此努力、为此点赞!
|
/*
Copyright 2022 rev1e
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
use std::io::{self, Write};
use colored::Colorize;
use crate::{game::{cell::{Cells, CellType}, position::Position}, config::Config};
pub const LETTERS: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
// handles i/o
pub struct Display<'a> {
config: &'a Config
}
impl<'a> Display<'a> {
pub fn new(config: &'a Config) -> Self {
Self {
config
}
}
pub fn clear_screen(&self) {
print!("{esc}[2J{esc}[1;1H", esc = 27 as char);
}
pub fn get_input(&self, mines_left: i32) -> String {
// print prompt
print!("({} mines left) -> ", mines_left);
io::stdout().lock().flush().unwrap();
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
// remove \n
input.pop();
input
}
pub fn render_board(&self, map: &Cells) {
print!(" ");
for x in 0..(self.config.width - 1) {
print!("{} ", LETTERS.chars().nth(x).unwrap());
}
println!("{}", LETTERS.chars().nth(self.config.width - 1).unwrap());
print!(" ");
for _ in 0..(self.config.width * 2) {
print!("-");
}
print!("\n");
for y in 0..self.config.height {
print!("{:02} | ", y);
for x in 0..self.config.width {
let cell = map.idx(Position::new(x, y));
if cell.flag {
print!("{} ", "!".red());
continue;
}
if cell.hidden {
print!("# ");
continue;
}
match cell.ctype {
CellType::Mine => print!("{} ", "*".red()),
CellType::Empty => print!("{} ", ".".bright_black()),
CellType::Number(n) => {
if n < 3 {
print!("{} ", format!("{}", n).bright_green());
} else if n < 5 {
print!("{} ", format!("{}", n).yellow());
} else {
print!("{} ", format!("{}", n).bright_red());
}
}
}
}
println!("| {:02}", y);
}
print!(" ");
for _ in 0..(self.config.width * 2) {
print!("-");
}
print!("\n");
print!(" ");
for x in 0..(self.config.width - 1) {
print!("{} ", LETTERS.chars().nth(x).unwrap());
}
println!("{}", LETTERS.chars().nth(self.config.width - 1).unwrap());
}
pub fn print_help(&self) {
println!("Help:");
println!("<pos> -> guess");
println!("f <pos>, flag <pos>, f<pos> -> flag position");
println!("r -> reveal all possible");
println!("quit, exit, q -> exit game");
println!("help, h, ? -> print this message");
print!("Press enter to continue..");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut String::new()).unwrap();
}
}
|
module Chess
class Game
attr_accessor :board, :player1, :player2
def initialize(player1, player2)
@player1 = player1
@player2 = player2
@board = Board.new
@board.new_game
end
def coordinate_parser(coord)
y = nil
x = nil
coord.split(//)
if coord[0] == 'a'
x = 0
elsif coord[0] == 'b'
x = 1
elsif coord[0] == 'c'
x = 2
elsif coord[0] == 'd'
x = 3
elsif coord[0] == 'e'
x = 4
elsif coord[0] == 'f'
x = 5
elsif coord[0] == 'g'
x = 6
elsif coord[0] == 'h'
x = 7
else
puts "Error invalid x coordinates"
end
if coord[1] == '8'
y = 0
elsif coord[1] == '7'
y = 1
elsif coord[1] == '6'
y = 2
elsif coord[1] == '5'
y = 3
elsif coord[1] == '4'
y = 4
elsif coord[1] == '3'
y = 5
elsif coord[1] == '2'
y = 6
elsif coord[1] == '1'
y = 7
else
puts "Error invalid y coordinates"
end
standard_coordinates = [y, x]
return standard_coordinates
end
def pawn_promote?
board.board[0].each do |cell|
if (cell.instance_of? Pawn) && cell.color == "white"
return true
end
end
board.board[7].each do |cell|
if (cell.instance_of? Pawn) && cell.color == "black"
return true
end
end
false
end
def pawn_promotion(pawn_coord) #could not figure out how to test this one
y = pawn_coord[0]
x = pawn_coord[1]
color = @board.board[y][x].color
puts "Your pawn can be promoted!"
puts "What do you want to promote your pawn to?"
puts "Queen"
puts "Rook"
puts "Bishop"
puts "Knight"
promote_to = $stdin.gets.chomp
if promote_to == "Queen"
@board.board[y][x] = Queen.new(color)
elsif promote_to == "Rook"
@board.board[y][x] = Rook.new(color)
elsif promote_to == "Bishop"
@board.board[y][x] = Bishop.new(color)
elsif promote_to == "Knight"
@board.board[y][x] = Knight.new(color)
else
puts "Please input a valid pawn_promotion"
pawn_promotion(pawn_coord)
end
end
end
end
|
package ru.maximkulikov.goodgame.api.chatmodels;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.Data;
/**
* @author Maxim Kulikov
* @since 10.01.2017
*/
@Data
@JsonIgnoreProperties(ignoreUnknown = true)
public class ReqPrivateMessageContainer implements ReqChatObject {
private String type = "send_private_message";
private ReqPrivateMessage data;
/**
* @param channelId channel_id
* @param userId user_id
* @param text text
*/
public ReqPrivateMessageContainer(final String channelId, final String userId, final String text) {
this.data = new ReqPrivateMessage(channelId, userId, text);
}
}
|
-module(swidden_middleware).
-export([failure/2, failure/3]).
failure(Req, Type) when is_binary(Type) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type}), Req).
failure(Req, Type, Reason) when is_binary(Type) andalso is_map(Reason) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type, error_reason => Reason}), Req).
|
import 'package:flutter/material.dart';
import 'package:rad_onc_project/data/global_data.dart' as datas;
import 'package:rad_onc_project/data/particle_data.dart' as particles;
import 'package:rad_onc_project/functions/preferences_functions.dart'
as funcPrefs;
import 'package:rad_onc_project/functions/spline_functions.dart' as splines;
import 'package:rad_onc_project/widgets/rad_app_bar.dart';
import 'package:rad_onc_project/widgets/text_fields.dart' as fields;
class MUCalcApp extends StatefulWidget {
static const String routeName = '/mu-calc-app';
const MUCalcApp({Key? key}) : super(key: key);
static const List<int> flexVertical = [7, 2, 1, 1];
static const String defaultDose = '200';
static const String defaultFieldSize = '10';
static const String defaultDepth = '1.5';
static const String defaultBlock = '0';
static const List<double> limitsDose = [0, 1000];
static const List<double> limitsFieldSize = [5, 35];
static const List<double> limitsBlock = [0, 80];
static const List<double> limitsDepth = [0, 35];
static const double fractionWidthButton = 0.8;
static const double radiusButton = 10;
static const int durationSnack = 600;
@override
_MUCalcAppState createState() => _MUCalcAppState();
}
class _MUCalcAppState extends State<MUCalcApp> {
Map<String, Map<String, List<double>>> _mapPdd = {};
int _iParticle = 0;
TextEditingController _controllerDose =
TextEditingController(text: MUCalcApp.defaultDose);
TextEditingController _controllerX =
TextEditingController(text: MUCalcApp.defaultFieldSize);
TextEditingController _controllerY =
TextEditingController(text: MUCalcApp.defaultFieldSize);
TextEditingController _controllerDepth =
TextEditingController(text: MUCalcApp.defaultDepth);
TextEditingController _controllerBlock = TextEditingController(text: '');
bool _isBlock = false;
Future<void> initPreferences() async {
_mapPdd = await funcPrefs.readPreferences(context);
setState(() {});
}
@override
void initState() {
initPreferences();
super.initState();
}
@override
void dispose() {
_controllerDose.dispose();
_controllerX.dispose();
_controllerY.dispose();
_controllerDepth.dispose();
_controllerBlock.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
bool canBuild = _mapPdd.isNotEmpty;
if (canBuild) {
String particle = particles.listStrParticle[_iParticle];
String depthUnits = _mapPdd[particle]!
.keys
.where((element) => element.contains('-'))
.first
.split('-')[1];
TextStyle styleLabel = Theme.of(context).textTheme.headline1!;
return SafeArea(
child: Scaffold(
resizeToAvoidBottomInset: false,
appBar: RadAppBar(
strAppTitle: datas.mapAppNames[2]![3],
),
body: Column(
children: [
Expanded(
flex: MUCalcApp.flexVertical[0],
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
Row(
children: [
Expanded(
flex: 2,
child: Text(
'Particle:',
style: styleLabel,
)),
Expanded(
flex: 1,
child: DropdownButton<int>(
value: _iParticle,
items: particles.listStrParticle
.where((element) => element.endsWith('X'))
.map((e) => DropdownMenuItem(
value: particles.listStrParticle.indexOf(e),
child: Text(
e,
style:
Theme.of(context).textTheme.headline2,
textAlign: TextAlign.center,
)))
.toList(),
dropdownColor: Colors.blueGrey,
onChanged: (index) {
setState(() {
_iParticle = index!;
});
},
),
),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Expanded(
child: Text(
'Dose [cGy]:',
style: styleLabel,
)),
Expanded(
child:
fields.textFieldDose(context, _controllerDose)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Expanded(
flex: 2,
child: Text(
'Fields [cm]:',
style: styleLabel,
)),
Expanded(
flex: 1,
child: fields.textFieldFraction(
context, _controllerX)),
Expanded(
flex: 1,
child: fields.textFieldFraction(
context, _controllerY)),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Flexible(
child: Container(
color: Colors.grey,
child: Checkbox(
value: _isBlock,
onChanged: (val) {
setState(() {
_isBlock = val!;
if (_isBlock) {
_controllerBlock.text =
MUCalcApp.defaultBlock;
} else {
_controllerBlock.text = '';
}
});
},
),
),
),
Expanded(
flex: 4,
child: Text(
'Block [%]:',
style: styleLabel,
textAlign: TextAlign.center,
),
),
Expanded(
flex: 3,
child: IgnorePointer(
ignoring: !_isBlock,
child: fields.textFieldFraction(
context, _controllerBlock),
),
),
],
),
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Expanded(
child: Text(
'Depth [$depthUnits]:',
style: styleLabel,
)),
Expanded(
child: fields.textFieldDose(
context, _controllerDepth)),
],
),
],
),
),
Spacer(
flex: MUCalcApp.flexVertical[1],
),
Expanded(
flex: MUCalcApp.flexVertical[2],
child: Container(
width: MediaQuery.of(context).size.width *
MUCalcApp.fractionWidthButton,
decoration: BoxDecoration(
color: Colors.green,
borderRadius: BorderRadius.circular(MUCalcApp.radiusButton),
),
child: TextButton(
onPressed: () async {
await computeOutputs(
context,
particles.listStrParticle[_iParticle],
_mapPdd,
_controllerDose,
_controllerX,
_controllerY,
_controllerBlock,
_controllerDepth,
_isBlock,
);
},
child: Text(
'Compute',
style: Theme.of(context).textTheme.headline2,
),
),
),
),
Spacer(
flex: MUCalcApp.flexVertical[3],
),
],
),
),
);
} else {
return Container();
}
}
}
double getEquivalentSquare(double length1, double length2) {
return 2 * length1 * length2 / (length1 + length2);
}
double getEffectiveEquivalentSquare(double fractionAreaOpen, double eqSqr) {
return eqSqr * fractionAreaOpen;
}
double? getFieldSizeInterpolatedPddN(
Map<String, Map<String, List<double>>> mapPdd,
String particle,
double effEqSqr,
double depth) {
List<String> listFieldsUnits = mapPdd[particle]!.keys.toList();
List<double> fieldSizes = listFieldsUnits
.where((element) => !element.contains('-'))
.map((e) => double.parse(e))
.toList();
int iField1 = fieldSizes.indexWhere((element) => element > effEqSqr);
int iField0 = iField1 - 1;
if (iField1 != -1 && iField0 != -1) {
double fieldSize0 = fieldSizes[iField0];
double fieldSize1 = fieldSizes[iField1];
double linearWeight0 =
1 - ((effEqSqr - fieldSize0) / (fieldSize1 - fieldSize0));
List<dynamic> pddParameters0 =
funcPrefs.getPddParameters(mapPdd, particle, iField0);
List<double> pddDepths0 = pddParameters0[2];
List<double> pddValues0 = pddParameters0[3];
List<dynamic> pddParameters1 =
funcPrefs.getPddParameters(mapPdd, particle, iField1);
List<double> pddDepths1 = pddParameters1[2];
List<double> pddValues1 = pddParameters1[3];
double? pddN0 =
splines.getInterpolatedNFromLists(pddDepths0, pddValues0, depth);
double? pddN1 =
splines.getInterpolatedNFromLists(pddDepths1, pddValues1, depth);
if (pddN0 != null && pddN1 != null) {
return linearWeight0 * pddN0 + (1 - linearWeight0) * pddN1;
} else {
return null;
}
} else {
return null;
}
}
double getBaseMU(double dose, double sC, double sP, double nPdd) {
return dose / (sC * sP * (nPdd / 100));
}
bool areInputsValid(List<TextEditingController> controllers) {
try {
controllers.forEach((element) {
double.parse(element.text);
});
} catch (e) {
return false;
}
return true;
}
List<bool> inputsWithinLimits(
TextEditingController controllerDose,
TextEditingController controllerX,
TextEditingController controllerY,
TextEditingController controllerBlock,
bool isBlock,
TextEditingController controllerDepth) {
List<bool> withinLimits = [];
double n = double.parse(controllerDose.text);
withinLimits
.add(n >= MUCalcApp.limitsDose[0] && n <= MUCalcApp.limitsDose[1]);
n = double.parse(controllerX.text);
withinLimits.add(
n >= MUCalcApp.limitsFieldSize[0] && n <= MUCalcApp.limitsFieldSize[1]);
n = double.parse(controllerY.text);
withinLimits.add(
n >= MUCalcApp.limitsFieldSize[0] && n <= MUCalcApp.limitsFieldSize[1]);
if (isBlock) {
n = double.parse(controllerBlock.text);
withinLimits
.add(n >= MUCalcApp.limitsBlock[0] && n <= MUCalcApp.limitsBlock[1]);
} else {
withinLimits.add(true);
}
n = double.parse(controllerDepth.text);
withinLimits
.add(n >= MUCalcApp.limitsDepth[0] && n <= MUCalcApp.limitsDepth[1]);
return withinLimits;
}
Future<void> computeOutputs(
BuildContext context,
String particle,
Map<String, Map<String, List<double>>> mapPdd,
TextEditingController controllerDose,
TextEditingController controllerX,
TextEditingController controllerY,
TextEditingController controllerBlock,
TextEditingController controllerDepth,
bool isBlock,
) async {
bool isError = false;
String strError = '';
if (!areInputsValid(isBlock
? [
controllerDose,
controllerX,
controllerY,
controllerBlock,
controllerDepth
]
: [controllerDose, controllerX, controllerY, controllerDepth])) {
isError = true;
strError = 'Invalid inputs';
} else {
List<bool> withinLimits = inputsWithinLimits(controllerDose, controllerX,
controllerY, controllerBlock, isBlock, controllerDepth);
List<String> limitErrors = [
'Dose out of limit',
'Field size out of limit',
'Field size out of limit',
'Block out of limit',
'Depth out of limit'
];
if (!withinLimits.every((element) => element)) {
isError = true;
strError = limitErrors[withinLimits.indexOf(false)];
}
}
if (!isError) {
double eqSqr = getEquivalentSquare(
double.parse(controllerX.text), double.parse(controllerY.text));
double effEqSqr = getEffectiveEquivalentSquare(
isBlock ? 1 - double.parse(controllerBlock.text) / 100 : 1, eqSqr);
double? scatterCollimator = splines.getInterpolatedNFromLists(
particles.scatterCollimator[particle]!.keys.toList(),
particles.scatterCollimator[particle]!.values.toList(),
eqSqr);
double? scatterPatient = splines.getInterpolatedNFromLists(
particles.scatterPatient[particle]!.keys.toList(),
particles.scatterPatient[particle]!.values.toList(),
effEqSqr);
if (scatterCollimator != null && scatterPatient != null) {
double? nPdd = getFieldSizeInterpolatedPddN(
mapPdd, particle, effEqSqr, double.parse(controllerDepth.text));
if (nPdd != null) {
double baseMU = getBaseMU(double.parse(controllerDose.text),
scatterCollimator, scatterPatient, nPdd);
await showDialog(
context: context,
builder: (context) {
return Dialog(
child: Container(
decoration: BoxDecoration(
color: Colors.black,
border: Border.all(color: Colors.green, width: 4),
),
width: MediaQuery.of(context).size.width * 0.8,
height: MediaQuery.of(context).size.height * 0.6,
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: [
Text(
'Eq-Sqr [cm]\n${eqSqr.toStringAsFixed(2)}',
style: Theme.of(context).textTheme.headline1,
textAlign: TextAlign.center,
),
Text(
'Eff-Eq-Sqr [cm]\n${effEqSqr.toStringAsFixed(2)}',
style: Theme.of(context).textTheme.headline1,
textAlign: TextAlign.center,
),
Divider(
color: Colors.green,
thickness: 2,
),
Text(
'Sc: ${scatterCollimator.toStringAsFixed(3)}',
style: Theme.of(context).textTheme.headline1,
textAlign: TextAlign.center,
),
Text(
'Sp: ${scatterPatient.toStringAsFixed(3)}',
style: Theme.of(context).textTheme.headline1,
textAlign: TextAlign.center,
),
Text(
'PDD [%]: ${nPdd.toStringAsFixed(1)}',
style: Theme.of(context).textTheme.headline1,
textAlign: TextAlign.center,
),
Divider(
color: Colors.green,
thickness: 2,
),
Text(
'Base MU: ${baseMU.toStringAsFixed(0)}',
style: Theme.of(context).textTheme.headline2,
textAlign: TextAlign.center,
),
],
),
),
);
},
);
} else {
isError = true;
strError = 'Invalid PDD computed';
}
} else {
isError = true;
strError = 'Incorrect scatter computed';
}
}
if (isError) {
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
content: Text(
strError,
style: Theme.of(context).textTheme.headline1,
),
duration: Duration(milliseconds: MUCalcApp.durationSnack),
),
);
}
}
|
package com.birdsgenesis.dto.nft;
import com.birdsgenesis.dto.meta.Metadata;
import com.birdsgenesis.dto.meta.NftAttribute;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class Bird extends Nft {
private NftAttribute background;
private NftAttribute eyes;
private NftAttribute headWear;
private NftAttribute beaks;
private NftAttribute body;
private NftAttribute bodyWear;
private NftAttribute earFeathers;
private NftAttribute extras;
public Bird(Metadata metadata) throws IllegalAccessException {
super(metadata);
}
}
|
import { mapStateToProps } from 'pages/Series';
describe('containers/series', () => {
it('should return the right stuff', () => {
expect(mapStateToProps({ series: {
shows: {
items: [],
},
} })).toMatchSnapshot();
});
});
|
import classes from './Cart.module.css';
import { useContext } from 'react';
import Modal from '../UI/Modal';
import CardItem from './CartItem/CartItem';
import { useSelector } from 'react-redux';
import { useDispatch } from 'react-redux';
import { cartActions } from '../../store/cart-slice';
const Cart = (props) => {
const dispatch = useDispatch();
const cartItemsState = useSelector((state) => state.cart.items);
console.log('CART -----> ', cartItemsState);
const cartAmt = useSelector((state) => state.cart.totalAmount);
console.log('CART AMT: ', cartAmt);
const totalAmt = `$${cartAmt.toFixed(2)}`;
const hasItems = Object.keys(cartItemsState).length > 0;
const cardItemRemoveHandler = (id) => {
dispatch(
cartActions.removeItem({
data: { id },
})
);
};
const cardItemAddHandler = (props) => {
dispatch(
cartActions.addItem({
data: {
id: props.id,
name: props.name,
amount: 1,
price: props.price,
},
})
);
};
const cartItems = (
<ul className={classes['cart-items']}>
{Object.values(cartItemsState).map((item) => (
<CardItem
key={item.id}
name={item.name}
amount={item.amount}
price={item.price}
onRemove={cardItemRemoveHandler.bind(null, item.id)}
onAdd={cardItemAddHandler.bind(null, item)}
/>
))}
</ul>
);
const CartVisiblityHandler = () => {
props.cartHandler(false);
};
let buttonMsg = 'Order';
if (!props.login) {
buttonMsg = 'Login in to order';
}
const checkOutHandler = () => {
props.loginHandler();
};
return (
<Modal cartHandler={props.cartHandler}>
{cartItems}
<div className={classes.total}>
<span>Total Amount</span>
<span>{totalAmt}</span>
</div>
<div className={classes.actions}>
<button
onClick={CartVisiblityHandler}
className={classes['button--alt']}
>
Close
</button>
{hasItems && (
<button onClick={checkOutHandler} className={classes.button}>
{buttonMsg}
</button>
)}
</div>
</Modal>
);
};
export default Cart;
|
package sensordata
//tag::logic[]
import akka.grpc.scaladsl.ServerReflection
//end::logic[]
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.directives.RouteDirectives
import cloudflow.akkastream._
//tag::logic[]
import cloudflow.akkastream.util.scaladsl.GrpcServerLogic
//end::logic[]
import cloudflow.streamlets._
import cloudflow.streamlets.proto.ProtoOutlet
//tag::logic[]
import sensordata.grpc.{ SensorData, SensorDataService, SensorDataServiceHandler }
//end::logic[]
//tag::logic[]
class SensorDataIngress extends AkkaServerStreamlet {
// ...
//end::logic[]
val out = ProtoOutlet[SensorData]("out", RoundRobinPartitioner)
def shape = StreamletShape.withOutlets(out)
//tag::logic[]
override def createLogic = new GrpcServerLogic(this) {
override def handlers() =
List(SensorDataServiceHandler.partial(new SensorDataServiceImpl(sinkRef(out))), ServerReflection.partial(List(SensorDataService)))
}
//end::logic[]
}
|
import Cascade from '../cascade/Cascade';
import { observable } from '../cascade/Decorators';
import { Component } from './Component';
class ViewModel {
runs: number = 0;
@observable info: string = 'test';
}
interface IParentProps {
viewModel: ViewModel;
}
class Parent extends Component<IParentProps> {
render() {
return (
<div id="parent">
<Child id="child" info={this.props.viewModel.info}>
text
</Child>
</div>
);
}
}
interface IChildProps {
id: string;
info: string;
}
class Child extends Component<IChildProps> {
render() {
return <div id={this.props.id}>Custom Component - {this.props.info}</div>;
}
}
describe('Component', function () {
it('should update when observables change', async function () {
var viewModel = new ViewModel();
var container = document.createElement('div');
var runs: string[] = [];
//document.body.appendChild(container);
Cascade.render(container, <Parent viewModel={viewModel} />);
var child = container.querySelector('#child');
runs.push((child.childNodes[1] as Text).data);
await Cascade.set(viewModel, 'info', 'abcd');
var child = container.querySelector('#child');
runs.push((child.childNodes[1] as Text).data);
expect(runs[0]).toBe('test');
expect(runs[1]).toBe('abcd');
});
});
|
package de.trundicho.warp.reader.core.view.api.timer;
import de.trundicho.warp.reader.core.controller.WarpUpdater;
public interface WarpTimer extends WarpReaderTimer {
void doNextWarp(WarpUpdater warpUpdater);
}
|
#!/usr/bin/env bash
# Variables
PLIST_FILE=$APPCENTER_SOURCE_DIRECTORY/StatusChecker.iOS/Info.plist
APPVERSION_FILE=$APPCENTER_SOURCE_DIRECTORY/appversion.txt
# ./Variables
# Replacing Info.plist
if [ -e "$PLIST_FILE" ]
then
echo "Updating configuration in Info.plist"
sed -i '' 's/$(CFBundleIdentifier)/'$PLIST_CFBUNDLEIDENTIFIER'/' $PLIST_FILE
if [ -e "$APPVERSION_FILE" ]
then
read -r appVersionNumber<$APPVERSION_FILE
echo "Updating App VersionNumber ["$appVersionNumber"]"
sed -i '' 's/$(AppVersionNumber)/'$appVersionNumber'/' $PLIST_FILE
fi
fi
# ./Replacing Info.plist |
export 'src/blocks/async.dart';
export 'src/blocks/for.dart';
export 'src/blocks/html.dart';
export 'src/blocks/if.dart';
export 'src/blocks/key.dart';
export 'src/blocks/slot.dart';
export 'src/blocks/subcomponent.dart';
export 'src/core/component.dart' hide parentComponent;
export 'src/core/fragment.dart';
export 'src/core/snapshot.dart';
export 'src/core/value.dart';
export 'src/core/watchable.dart';
export 'src/dom/dom.dart';
export 'src/dom/mutation_observer.dart';
|
<?php
namespace Ekyna\Component\Commerce\Order\EventListener;
use Ekyna\Component\Commerce\Common\Model\SaleInterface;
use Ekyna\Component\Commerce\Exception\InvalidArgumentException;
use Ekyna\Component\Commerce\Order\Event\OrderEvents;
use Ekyna\Component\Commerce\Order\Model\OrderShipmentInterface;
use Ekyna\Component\Commerce\Shipment\EventListener\AbstractShipmentListener;
use Ekyna\Component\Resource\Event\ResourceEventInterface;
/**
* Class OrderShipmentListener
* @package Ekyna\Component\Commerce\Order\EventListener
* @author Etienne Dauvergne <[email protected]>
*/
class OrderShipmentListener extends AbstractShipmentListener
{
/**
* @inheritdoc
*/
protected function scheduleSaleContentChangeEvent(SaleInterface $sale)
{
$this->persistenceHelper->scheduleEvent(OrderEvents::CONTENT_CHANGE, $sale);
}
/**
* @inheritdoc
*/
protected function getShipmentFromEvent(ResourceEventInterface $event)
{
$resource = $event->getResource();
if (!$resource instanceof OrderShipmentInterface) {
throw new InvalidArgumentException("Expected instance of OrderShipmentInterface");
}
return $resource;
}
/**
* @inheritdoc
*/
protected function getSalePropertyPath()
{
return 'order';
}
}
|
import { AlunoModel } from './../Models/Aluno.model.js'
export class AlunosController {
constructor(service, view) {
view.render(service.alunos)
this.view = view
this.service = service
}
add(aluno) {
this.service.add(new AlunoModel(aluno))
this.view.render(this.service.alunos)
}
search(name) {
const data = this.service.search(name)
this.view.render(data)
}
} |
import { IdentityKeyPair } from './utils';
interface RefreshOptions {
gaiaUrl: string;
}
export interface Identity {
keyPair: IdentityKeyPair;
address: string;
usernames: string[];
defaultUsername?: string;
profile?: Profile;
profileUrl(gaiaUrl: string): Promise<string>;
appPrivateKey(appDomain: string): string;
fetchNames(): Promise<string[]>;
refresh(opts: RefreshOptions): void;
makeAuthResponse(options: {
appDomain: string;
gaiaUrl: string;
transitPublicKey: string;
scopes: string[] | undefined;
stxAddress: string | undefined;
}): Promise<string>;
}
const PERSON_TYPE = 'Person';
const CONTEXT = 'http://schema.org';
const IMAGE_TYPE = 'ImageObject';
export interface ProfileImage {
'@type': typeof IMAGE_TYPE;
name: string;
contentUrl: string;
}
export interface Profile {
'@type': typeof PERSON_TYPE;
'@context': typeof CONTEXT;
apps?: {
[origin: string]: string;
};
appsMeta?: {
[origin: string]: {
publicKey: string;
storage: string;
};
};
name?: string;
image?: ProfileImage[];
[key: string]: any;
}
|
<?php
$empresa = $_SESSION['empresa'];
$nomeUsuario = $_SESSION['usuario'];
$statusUsuario = $_SESSION['status'];
$funcao = $_SESSION['funcao'];
$nivel = $_SESSION['nivel'];
$nomeEquipe = $_SESSION['equipe'];
$nomePrestadora = $_SESSION['prestadora'];
$acessoSP = $_SESSION['acessoSP'];
$acessoBR = $_SESSION['acessoBR'];
$acessoSL = $_SESSION['acessoSL'];
$acessoRX = $_SESSION['acessoRX'];
$acessoMT = $_SESSION['acessoMT'];
$acessoUTI = $_SESSION['acessoUTI'];
$acessoPROPOSTA = $_SESSION['acessoPROPOSTA'];
$acessoPROSPECTS = $_SESSION['acessoPROSPECTS'];
$acessoME = $_SESSION['acessoME'];
$acessoCE = $_SESSION['acessoCE'];
$acessoCLARO = $_SESSION['acessoCLARO'];
$acessoOportunidadeClaro = $_SESSION['acessoOportunidadeClaro'];
$acessoCANC = $_SESSION['acessoCANC'];
$acessoMultibase = $_SESSION['acessoMultibase'];
$acessoOportunidadeSite = $_SESSION['acessoOportunidadeSite'];
$acessoOportunidadeSAC = $_SESSION['acessoOportunidadeSAC'];
$acessoLEADSITE = $_SESSION['acessoLEADSITE'];
$acessoVENDARS = $_SESSION['acessoVENDARS'];
$acessoVIVO = $_SESSION['acessoVIVO'];
$acessoTIM = $_SESSION['acessoTIM'];
$acessoNET = $_SESSION['acessoNET'];
$acessoHUGHES = $_SESSION['acessoHUGHES'];
$acessoGERAL = $_SESSION['acessoGERAL'];
$acessoPARCEIROS = $_SESSION['acessoPARCEIROS'];
$acessoTODOS = $_SESSION['acessoTODOS'];
$limpar = '.';
//echo $limpar;
//exit;
//Validado o login
if( !isset($_SESSION['logado']) || $_SESSION['logado'] != true) {
unset($_SESSION['logado']);
$_SESSION['mensagem'] = "É necessário estar logado para entrar";
header("location: ../login/entrar.php");
exit;
}
|
import os, glob, shelve
from xlrd import open_workbook, XL_CELL_TEXT, XL_CELL_NUMBER, XL_CELL_DATE
import matplotlib.pylab as plt
#SRCDIR="""\\nsls2fs\Accelerator\MagnetMeasurements\RotatingCoil\Latest_Data"""
#SRCDIR="""Y:\MagnetMeasurements\RotatingCoil\Latest_Data"""
SRCDIR="./MagnetMeasurements/RotatingCoil/Latest_Data"
rotcoil_header = [
("Magnet Type", str, XL_CELL_TEXT),
("Alias", str, XL_CELL_TEXT),
("Vendor ID", str, XL_CELL_TEXT),
("Serial Number", int, XL_CELL_NUMBER),
("Measuring_Coil_ID", int, XL_CELL_NUMBER),
("Reference_Radius", float, XL_CELL_NUMBER),
("Magnet Notes", str, XL_CELL_TEXT),
("LoginName", str, XL_CELL_TEXT),
("Conditioning Current", float, XL_CELL_NUMBER),
("", None, None),
("Measured_at_Location", str, XL_CELL_TEXT)]
file_header = [
("Magnet Type", str, XL_CELL_TEXT),
("Alias", str, XL_CELL_TEXT),
("Vendor ID", str, XL_CELL_TEXT),
("Serial Number", int, XL_CELL_NUMBER),
("Measuring_Coil_ID", int, XL_CELL_NUMBER),
("Reference_Radius", float, XL_CELL_NUMBER),
("Magnet Notes", str, XL_CELL_TEXT),
("LoginName", str, XL_CELL_TEXT),
("Conditioning Current", float, XL_CELL_NUMBER),
("", None, None),
("Measured_at_Location", str, XL_CELL_TEXT)]
rotcoil_col = [
("Measured_at_Location", str, XL_CELL_TEXT),
("Run Number", int, XL_CELL_NUMBER),
("SubDevice", str, XL_CELL_TEXT),
("Current_1", float, XL_CELL_NUMBER),
("Current_2", float, XL_CELL_NUMBER),
("Current_3", float, XL_CELL_NUMBER),
("Up_Dn1", str, XL_CELL_TEXT),
("Up_Dn2", str, XL_CELL_TEXT),
("Up_Dn3", str, XL_CELL_TEXT),
("AnalysisNum", int, XL_CELL_NUMBER),
("Int_Trans_Func", float, XL_CELL_NUMBER),
("OriginOffset_X", float, XL_CELL_NUMBER),
("OriginOffset_Y", float, XL_CELL_NUMBER),
("B_ref_Int", float, XL_CELL_NUMBER),
("Roll_Angle", float, XL_CELL_NUMBER),
("Meas_notes", str, XL_CELL_TEXT),
("Meas_Date_Time", str, XL_CELL_TEXT),
("Author", str, XL_CELL_TEXT),
("a1", float, XL_CELL_NUMBER),
("a2", float, XL_CELL_NUMBER),
("a3", float, XL_CELL_NUMBER),
("a4", float, XL_CELL_NUMBER),
("a5", float, XL_CELL_NUMBER),
("a6", float, XL_CELL_NUMBER),
("a7", float, XL_CELL_NUMBER),
("a8", float, XL_CELL_NUMBER),
("a9", float, XL_CELL_NUMBER),
("a10", float, XL_CELL_NUMBER),
("a11", float, XL_CELL_NUMBER),
("a12", float, XL_CELL_NUMBER),
("a13", float, XL_CELL_NUMBER),
("a14", float, XL_CELL_NUMBER),
("a15", float, XL_CELL_NUMBER),
("a16", float, XL_CELL_NUMBER),
("a17", float, XL_CELL_NUMBER),
("a18", float, XL_CELL_NUMBER),
("a19", float, XL_CELL_NUMBER),
("a20", float, XL_CELL_NUMBER),
("a21", float, XL_CELL_NUMBER),
("b1", float, XL_CELL_NUMBER),
("b2", float, XL_CELL_NUMBER),
("b3", float, XL_CELL_NUMBER),
("b4", float, XL_CELL_NUMBER),
("b5", float, XL_CELL_NUMBER),
("b6", float, XL_CELL_NUMBER),
("b7", float, XL_CELL_NUMBER),
("b8", float, XL_CELL_NUMBER),
("b9", float, XL_CELL_NUMBER),
("b10", float, XL_CELL_NUMBER),
("b11", float, XL_CELL_NUMBER),
("b12", float, XL_CELL_NUMBER),
("b13", float, XL_CELL_NUMBER),
("b14", float, XL_CELL_NUMBER),
("b15", float, XL_CELL_NUMBER),
("b16", float, XL_CELL_NUMBER),
("b17", float, XL_CELL_NUMBER),
("b18", float, XL_CELL_NUMBER),
("b19", float, XL_CELL_NUMBER),
("b20", float, XL_CELL_NUMBER),
("b21", float, XL_CELL_NUMBER),
("Data Issues", int, XL_CELL_NUMBER)]
devices = ['Vert Field Dipole', 'Hor Field Dipole',
'Skew Quad', 'Sextupole', 'Quadrupole']
def rotcoil_col_index(cols):
idx = []
for i,c in enumerate(cols):
for j,h in enumerate(rotcoil_col):
if h[0] != c: continue
idx.append(j)
break
else:
raise RuntimeError("unknow data column: {0}".format(c))
return idx
def _rc_read_header(fname):
"""
read the header of XLS as a dict
"""
fstr = open(fname, 'rb').read()
wb = open_workbook(file_contents=fstr)
#return wb.sheet_names()
sht = wb.sheet_by_index(0)
ret = {}
for i,h in enumerate(rotcoil_header):
cname, cfc, ctype2 = h
if sht.cell(i,0).value != cname:
raise RuntimeError("Different Header info: {0} {1}".format(
sht.cell(i,0), h))
if sht.cell(i,1).value and sht.cell(i,1).ctype != ctype2:
raise RuntimeError("Different Header info: {0} {1}".format(
sht.cell(i,0), h))
elif sht.cell(i,1).value:
ret[cname] = cfc(sht.cell(i,1).value)
#else:
# ret.append((cname, None))
return ret
# check data header
def scan_rotating_coil(srcdir = SRCDIR, nmax = None):
"""
scan all files as (fname, header_dict)
"""
recs = {}
for root, dirs, files in os.walk(srcdir):
#print root,
for f in files:
if f.endswith(".zip"): continue
fname = os.path.join(root, f)
recs[fname] = _rc_read_header(fname)
if nmax and len(recs) > nmax: break
#print recs
if nmax and len(recs) > nmax: break
#for k,v in recs.items():
# print k, v
d = shelve.open("headers.shelve")
d["headers"] = recs
d.close()
return recs
|
## 0.0.1+2
* Fix `\frac` line positioning.
## 0.0.1+1
* Expanded color support to include HTML color keywords and hexadecimal strings.
* Improved documentation.
## 0.0.1
* Initial release of the CaTeX package.
## 0.0.0
* Placeholder version.
|
export default function validate(values, props) {
let { attributes } = props;
values.members === undefined ? (values.members = {}) : "";
const errors = { members: {} };
if (!values.name) {
errors.name = "Name is required";
}
if (attributes.find(el => el.name === values.name)) {
errors.name = `Another attribute already owns the name ${values.name}, please provide a different one.`;
}
if (!values.members.unit_measurement) {
errors.members["unit_measurement"] = "Unit of Measurement is required";
}
if (!values.members.min_range) {
errors.members["min_range"] = "Min range is required";
}
if (!values.members.max_range) {
errors.members["max_range"] = "Max range is required";
}
if (
parseFloat(values.members.max_range) < parseFloat(values.members.min_range)
) {
errors.members["max_range"] = "Max range must be greater than min range";
}
if (
parseFloat(values.members.min_range) > parseFloat(values.members.max_range)
) {
errors.members["min_range"] = "Min range must be lower than max range";
}
if (values.members.min_range && values.members.max_range) {
if (
!_.inRange(
parseFloat(values.members.precision),
parseFloat(values.members.min_range),
parseFloat(values.members.max_range) + 1
)
) {
errors.members[
"precision"
] = `Precision must be in a range between ${values.members
.min_range} and ${values.members.max_range}`;
}
if (
!_.inRange(
parseFloat(values.members.accuracy),
parseFloat(values.members.min_range),
parseFloat(values.members.max_range )+ 1
)
) {
errors.members["accuracy"] = `Accuracy must be in a range between ${values
.members.min_range} and ${values.members.max_range}`;
}
}
return errors;
}
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_package
DECL|package|org.jabref.logic.cleanup
package|package
name|org
operator|.
name|jabref
operator|.
name|logic
operator|.
name|cleanup
package|;
end_package
begin_import
import|import
name|java
operator|.
name|nio
operator|.
name|file
operator|.
name|Paths
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|ArrayList
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Collections
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|List
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Objects
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Optional
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|logic
operator|.
name|util
operator|.
name|io
operator|.
name|FileUtil
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|FieldChange
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|cleanup
operator|.
name|CleanupJob
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|database
operator|.
name|BibDatabaseContext
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|entry
operator|.
name|BibEntry
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|entry
operator|.
name|LinkedFile
import|;
end_import
begin_import
import|import
name|org
operator|.
name|jabref
operator|.
name|model
operator|.
name|metadata
operator|.
name|FilePreferences
import|;
end_import
begin_class
DECL|class|RelativePathsCleanup
specifier|public
class|class
name|RelativePathsCleanup
implements|implements
name|CleanupJob
block|{
DECL|field|databaseContext
specifier|private
specifier|final
name|BibDatabaseContext
name|databaseContext
decl_stmt|;
DECL|field|filePreferences
specifier|private
specifier|final
name|FilePreferences
name|filePreferences
decl_stmt|;
DECL|method|RelativePathsCleanup (BibDatabaseContext databaseContext, FilePreferences filePreferences)
specifier|public
name|RelativePathsCleanup
parameter_list|(
name|BibDatabaseContext
name|databaseContext
parameter_list|,
name|FilePreferences
name|filePreferences
parameter_list|)
block|{
name|this
operator|.
name|databaseContext
operator|=
name|Objects
operator|.
name|requireNonNull
argument_list|(
name|databaseContext
argument_list|)
expr_stmt|;
name|this
operator|.
name|filePreferences
operator|=
name|Objects
operator|.
name|requireNonNull
argument_list|(
name|filePreferences
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Override
DECL|method|cleanup (BibEntry entry)
specifier|public
name|List
argument_list|<
name|FieldChange
argument_list|>
name|cleanup
parameter_list|(
name|BibEntry
name|entry
parameter_list|)
block|{
name|List
argument_list|<
name|LinkedFile
argument_list|>
name|fileList
init|=
name|entry
operator|.
name|getFiles
argument_list|()
decl_stmt|;
name|List
argument_list|<
name|LinkedFile
argument_list|>
name|newFileList
init|=
operator|new
name|ArrayList
argument_list|<>
argument_list|()
decl_stmt|;
name|boolean
name|changed
init|=
literal|false
decl_stmt|;
for|for
control|(
name|LinkedFile
name|fileEntry
range|:
name|fileList
control|)
block|{
name|String
name|oldFileName
init|=
name|fileEntry
operator|.
name|getLink
argument_list|()
decl_stmt|;
name|String
name|newFileName
init|=
name|FileUtil
operator|.
name|relativize
argument_list|(
name|Paths
operator|.
name|get
argument_list|(
name|oldFileName
argument_list|)
argument_list|,
name|databaseContext
operator|.
name|getFileDirectoriesAsPaths
argument_list|(
name|filePreferences
argument_list|)
argument_list|)
operator|.
name|toString
argument_list|()
decl_stmt|;
name|LinkedFile
name|newFileEntry
init|=
name|fileEntry
decl_stmt|;
if|if
condition|(
operator|!
name|oldFileName
operator|.
name|equals
argument_list|(
name|newFileName
argument_list|)
condition|)
block|{
name|newFileEntry
operator|=
operator|new
name|LinkedFile
argument_list|(
name|fileEntry
operator|.
name|getDescription
argument_list|()
argument_list|,
name|newFileName
argument_list|,
name|fileEntry
operator|.
name|getFileType
argument_list|()
argument_list|)
expr_stmt|;
name|changed
operator|=
literal|true
expr_stmt|;
block|}
name|newFileList
operator|.
name|add
argument_list|(
name|newFileEntry
argument_list|)
expr_stmt|;
block|}
if|if
condition|(
name|changed
condition|)
block|{
name|Optional
argument_list|<
name|FieldChange
argument_list|>
name|change
init|=
name|entry
operator|.
name|setFiles
argument_list|(
name|newFileList
argument_list|)
decl_stmt|;
if|if
condition|(
name|change
operator|.
name|isPresent
argument_list|()
condition|)
block|{
return|return
name|Collections
operator|.
name|singletonList
argument_list|(
name|change
operator|.
name|get
argument_list|()
argument_list|)
return|;
block|}
else|else
block|{
return|return
name|Collections
operator|.
name|emptyList
argument_list|()
return|;
block|}
block|}
return|return
name|Collections
operator|.
name|emptyList
argument_list|()
return|;
block|}
block|}
end_class
end_unit
|
//线图方法
function createLine(id,names,datas,ttl,ttl2){
var presets = window.chartColors;
var utils = Samples.utils;
var inputs = {
min: 0,
max: 10000,
count: 8,
decimals: 1,
continuity: 1
};
function generateData(config) {
return utils.numbers(utils.merge(inputs, config || {}));
}
function generateLabels(config) {
return utils.months(utils.merge({
count: inputs.count,
section: 3
}, config || {}));
}
var options = {
maintainAspectRatio: false,
spanGaps: true,
elements: {
line: {
tension: 0.4
}
},
plugins: {
filler: {
propagate: false
}
},
scales: {
xAxes: [{
ticks: {
autoSkip: false,
maxRotation: 0
}
}]
}
};
["start"].forEach(function(boundary, index,obj) {
// reset the random seed to generate the same data for all charts
utils.srand(8);
new Chart(id, {
type: 'line',
data: {
labels:names,// generateLabels(),
datasets: [{
backgroundColor: utils.transparentize(presets.blue),
borderColor: presets.blue,
data:datas,
label: ttl2,
fill: boundary
}]
},
options: utils.merge(options, {
title: {
text: ttl,
display: true
}
})
});
});
}
//饼图方法
function createPei(id,names,dataList){
var count=0;
for(var i in dataList)
{
count+=dataList[i];
}
//alert(count)
function getBl(data){
return ((data/count)*100).toString().substr(0,4)+'%';
}
function GetPieData(names,dataList){
var data_arr=[];
for(var i2 in dataList){
data_arr.push({
value: dataList[i2],
name: names[i2]+"\n"+getBl(dataList[i2])
});
}
return data_arr;
}
var getOption = function(chartType) {
var chartOption = chartType == 'pie' ?
{
calculable: false,
series: [{
name: '访问来源',
type: 'pie',
radius: '65%',
center: ['50%', '50%'],
data: GetPieData(names,dataList)
}]
} : {
legend: {
data: ['价格'] //修改地方
},
grid: {
x: 35,
x2: 10,
y: 30,
y2: 25
},
toolbox: {
show: false,
feature: {
mark: {
show: true
},
dataView: {
show: true,
readOnly: false
},
magicType: {
show: true,
type: ['line', 'bar']
},
restore: {
show: true
},
saveAsImage: {
show: true
}
}
},
calculable: false,
xAxis: [{
type: 'category',
//修改地方
data: ['1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', '9月', '10月', '11月', '12月']
}],
yAxis: [{
type: 'value',
splitArea: {
show: true
}
}],
series: [{
name: '价格', //修改地方
type: chartType,
data: [2.0, 4.9, 7.0, 23.2, 25.6, 76.7, 135.6, 162.2, 32.6, 20.0, 6.4, 3.3]//修改地方
}]
};
return chartOption;
};
var byId = function(id) {
return document.getElementById(id);
};
var pieChart = echarts.init(byId(id));
pieChart.setOption(getOption('pie'));
}
|
# Introduction to Load Testing with Gatling
Source-code from the Introduction to Load Testing with [Gatling](http://gatling.io) articles.
<li>The first article can be found here: https://www.ivankrizsan.se/2016/04/16/introduction-to-load-testing-with-gatling-part-1/</li>
<li>The second article can be found here: https://www.ivankrizsan.se/2016/04/24/introduction-to-load-testing-with-gatling-part-2/</li>
<li>The third artcile can be found here: https://www.ivankrizsan.se/2016/04/26/introduction-to-load-testing-with-gatling-part-3/</li>
<li>The fourth and final article can be found here: https://www.ivankrizsan.se/2016/05/06/introduction-to-load-testing-with-gatling-part-4/</li>
|
package org.gotson.komga.infrastructure.datasource
import com.zaxxer.hikari.HikariDataSource
import org.gotson.komga.infrastructure.configuration.KomgaProperties
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties
import org.springframework.boot.context.properties.ConfigurationProperties
import org.springframework.boot.jdbc.DataSourceBuilder
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import org.springframework.data.jdbc.repository.config.AbstractJdbcConfiguration
import org.springframework.data.relational.core.dialect.Dialect
import org.springframework.data.relational.core.dialect.H2Dialect
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations
import javax.sql.DataSource
@Configuration
class DataSourcesConfiguration(
private val komgaProperties: KomgaProperties
) : AbstractJdbcConfiguration() {
@Bean("sqliteDataSource")
@Primary
fun sqliteDataSource(): DataSource =
(
DataSourceBuilder.create()
.apply {
driverClassName("org.sqlite.JDBC")
url("jdbc:sqlite:${komgaProperties.database.file}?foreign_keys=on;")
}.type(HikariDataSource::class.java)
.build() as HikariDataSource
)
.apply { maximumPoolSize = 1 }
@Bean
@Primary
@ConfigurationProperties(prefix = "spring.datasource")
fun h2DataSourceProperties() = DataSourceProperties()
@Bean("h2DataSource")
fun h2DataSource(): DataSource =
h2DataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource::class.java).build()
@Bean
override fun jdbcDialect(operations: NamedParameterJdbcOperations): Dialect = H2Dialect.INSTANCE
}
|
package com.hendraanggrian.javapoet.dsl
import com.google.common.truth.Truth
import com.squareup.javapoet.TypeName
import com.hendraanggrian.javapoet.asTypeName
import kotlin.test.Test
class TypeNameHandlerTest {
private val list = TypeNameHandler(mutableListOf())
@Test
fun test() {
list += TypeName.CHAR
list += Double::class.java
list += Boolean::class
list.add<String>()
Truth.assertThat(list).containsExactly(
TypeName.CHAR,
Double::class.java.asTypeName(),
Boolean::class.asTypeName(),
String::class.asTypeName()
)
}
} |
set names iso88591;
select charset('abc'), if(charset('abc')='iso88591', 'OK','NOK');
select charset(''), if(charset('')='iso88591', 'OK','NOK');
select charset('~*. '), if(charset('~*. ')='iso88591', 'OK','NOK');
set names utf8;
select charset('abc'), if(charset('abc')='utf8', 'OK','NOK');
select charset(''), if(charset('')='utf8', 'OK','NOK');
select charset('~*. '), if(charset('~*. ')='utf8', 'OK','NOK');
set names euckr;
select charset('abc'), if(charset('abc')='euckr', 'OK','NOK');
select charset(''), if(charset('')='euckr', 'OK','NOK');
select charset('~*. '), if(charset('~*. ')='euckr', 'OK','NOK');
set names iso88591;
|
package com.example.androiddevchallenge.ui.components
import androidx.compose.material.MaterialTheme
import androidx.compose.runtime.Composable
import com.example.androiddevchallenge.R
@Composable
fun logoResource() =
if (MaterialTheme.colors.isLight) R.drawable.ic_light_welcome_illos else R.drawable.ic_dark_welcome_illos
@Composable
fun welcomeBackgroundResource() =
if (MaterialTheme.colors.isLight) R.drawable.ic_light_welcome_bg else R.drawable.ic_dark_welcome_bg
@Composable
fun bloomLogoRessource() =
if (MaterialTheme.colors.isLight) R.drawable.ic_light_logo else R.drawable.ic_dark_logo
|
from wtforms.widgets import HTMLString
class InlineWidget(object):
"""
Renders a list of fields inline.
This is used for fields which encapsulate many inner fields as subfields.
The widget will try to iterate the field to get access to the subfields and
call them to render them.
If `prefix_label` is set, the subfield's label is printed before the field,
otherwise afterwards. The latter is useful for iterating radios or
checkboxes.
"""
def __init__(self, prefix_label=True):
self.prefix_label = prefix_label
def __call__(self, field, **kwargs):
kwargs.setdefault('id', field.id)
html = ['<div>']
for subfield in field:
if self.prefix_label:
html.append(
'<span>%s %s</span> ' %
(subfield.label, subfield(**kwargs))
)
else:
html.append(
'<span>%s %s</span> ' %
(subfield(**kwargs), subfield.label)
)
html.append('</div>')
return HTMLString(''.join(html))
|
<?php
namespace PHPfriends\SimplePdf\LowLevelParts;
use PHPfriends\SimplePdf\Common\GetAliasInterface;
class Content implements PartInterface
{
use LazyReferenceTrait;
/** @var string */
protected $stream;
/**
*/
public function __construct()
{
$this->stream = '';
}
/**
* @param string $stream
*/
public function setStream($stream)
{
$this->stream = $stream;
}
public function addStream($stream)
{
$this->stream .= $stream;
}
/**
* @param float $x
* @param float $y
* @param GetAliasInterface|string $font
* @param float $fontSize
* @param string $text
* @param array $options
*/
public function addText($x, $y, $font, $fontSize, $text, $options = [])
{
$fontName = ($font instanceof GetAliasInterface) ? $font->getAlias() : $font;
$this->stream .= sprintf(
"BT\r\n%.2F %.2F Td\r\n/%s %d Tf\r\n%s(%s) Tj\r\nET\r\n",
$x, $y,
$fontName, $fontSize,
$this->developOptions($options),
$text
);
}
/**
* @param float $x
* @param float $y
* @param float $w
* @param float $h
* @param string $color
* @param int $stroke
*/
public function addRectangle($x, $y, $w, $h, $color = '0 0 0', $stroke = 4)
{
$this->stream .= sprintf(
"%d w\r\nq\r\n%s RG\r\n%f %f %f %f re\r\nS\r\nQ\r\n",
$stroke,
$color,
$x, $y, $w, $h
);
}
/**
* @param array $options
* @return string
*/
private function developOptions($options)
{
$result = '';
foreach ($options as $optionName => $optionValue){
switch ($optionName){
case 'word_spacing':
$result .= $optionValue." Tw\n";
break;
case 'char_spacing':
$result .= $optionValue." Tc\n";
break;
}
}
return $result;
}
/**
* @return string
*/
public function dump()
{
$header = new Dictionary();
$result = "stream\r\n".trim($this->stream)."\r\nendstream\r\n";
$header->addItem('Length', new PdfNumber(strlen($this->stream)));
$result = $header->dump()."\r\n".$result;
return $result;
}
/**
* @return string
*/
public function __toString()
{
return 'Content '.md5($this->dump());
}
} |
# Wellcome to Hippo Project
[Visit us! Hippoapp](http://fathomless-journey-9978.herokuapp.com/) |
<?php
declare(strict_types=1);
namespace Antidot\Logger\Container\Config;
use Antidot\Logger\Application\Http\Middleware\ExceptionLoggerMiddleware;
use Antidot\Logger\Application\Http\Middleware\RequestLoggerMiddleware;
use Antidot\Logger\Container\MonologFactory;
use Psr\Log\LoggerInterface;
class ConfigProvider
{
/**
* @return array<mixed>
*/
public function __invoke(): array
{
return [
'services' => [
RequestLoggerMiddleware::class => RequestLoggerMiddleware::class,
ExceptionLoggerMiddleware::class => ExceptionLoggerMiddleware::class,
],
'factories' => [
LoggerInterface::class => MonologFactory::class,
],
];
}
}
|
#
# Table structure for table 'radippool'
#
CREATE TABLE radippool (
id int(11) unsigned NOT NULL auto_increment,
pool_name varchar(30) NOT NULL,
framedipaddress varchar(15) NOT NULL default '',
nasipaddress varchar(15) NOT NULL default '',
calledstationid VARCHAR(30) NOT NULL,
callingstationid VARCHAR(30) NOT NULL,
expiry_time DATETIME NULL default NULL,
username varchar(64) NOT NULL default '',
pool_key varchar(30) NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB;
|
module Dependent.HListSpec where
import Basic.TypeOf (typeOf)
import Dependent.HList (HList (..))
import Test.Hspec (Spec, describe, it, shouldBe)
spec_HList :: Spec
spec_HList = describe "HListSpec" $ do
construction
construction :: Spec
construction = describe "Construction" $ do
it "typeOf @(HList (Int : Bool : Char : [])) shouldBe Int : Bool : Char : []"
$ typeOf @(HList (Int ': Bool ': Char ': '[])) `shouldBe` "Int : Bool : Char : []"
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.messaging.ui;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import android.view.ActionMode;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import com.android.messaging.R;
import com.android.messaging.util.BugleActivityUtil;
import com.android.messaging.util.ImeUtil;
import com.android.messaging.util.LogUtil;
import com.android.messaging.util.UiUtils;
import java.util.HashSet;
import java.util.Set;
/**
* Base class for app activities that use an action bar. Responsible for logging/telemetry/other
* needs that will be common for all activities. We can break out the common code if/when we need
* a version that doesn't use an actionbar.
*/
public class BugleActionBarActivity extends AppCompatActivity implements ImeUtil.ImeStateHost {
// Tracks the list of observers opting in for IME state change.
private final Set<ImeUtil.ImeStateObserver> mImeStateObservers = new HashSet<>();
// Tracks the soft keyboard display state
private boolean mImeOpen;
// The ActionMode that represents the modal contextual action bar, using our own implementation
// rather than the built in contextual action bar to reduce jank
private CustomActionMode mActionMode;
// The menu for the action bar
private Menu mActionBarMenu;
// Used to determine if a onDisplayHeightChanged was due to the IME opening or rotation of the
// device
private int mLastScreenHeight;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (UiUtils.redirectToPermissionCheckIfNeeded(this)) {
return;
}
mLastScreenHeight = getResources().getDisplayMetrics().heightPixels;
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onCreate");
}
}
@Override
protected void onStart() {
super.onStart();
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onStart");
}
}
@Override
protected void onRestart() {
super.onStop();
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onRestart");
}
}
@Override
protected void onResume() {
super.onResume();
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onResume");
}
BugleActivityUtil.onActivityResume(this, BugleActionBarActivity.this);
}
@Override
protected void onPause() {
super.onPause();
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onPause");
}
}
@Override
protected void onStop() {
super.onStop();
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onStop");
}
}
private boolean mDestroyed;
@Override
protected void onDestroy() {
super.onDestroy();
mDestroyed = true;
}
public boolean getIsDestroyed() {
return mDestroyed;
}
@Override
public void onDisplayHeightChanged(final int heightSpecification) {
int screenHeight = getResources().getDisplayMetrics().heightPixels;
if (screenHeight != mLastScreenHeight) {
// Appears to be an orientation change, don't fire ime updates
mLastScreenHeight = screenHeight;
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onDisplayHeightChanged " +
" screenHeight: " + screenHeight + " lastScreenHeight: " + mLastScreenHeight +
" Skipped, appears to be orientation change.");
return;
}
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null && actionBar.isShowing()) {
screenHeight -= actionBar.getHeight();
}
final int height = View.MeasureSpec.getSize(heightSpecification);
final boolean imeWasOpen = mImeOpen;
mImeOpen = screenHeight - height > 100;
if (LogUtil.isLoggable(LogUtil.BUGLE_TAG, LogUtil.VERBOSE)) {
LogUtil.v(LogUtil.BUGLE_TAG, this.getLocalClassName() + ".onDisplayHeightChanged " +
"imeWasOpen: " + imeWasOpen + " mImeOpen: " + mImeOpen + " screenHeight: " +
screenHeight + " height: " + height);
}
if (imeWasOpen != mImeOpen) {
for (final ImeUtil.ImeStateObserver observer : mImeStateObservers) {
observer.onImeStateChanged(mImeOpen);
}
}
}
@Override
public void registerImeStateObserver(final ImeUtil.ImeStateObserver observer) {
mImeStateObservers.add(observer);
}
@Override
public void unregisterImeStateObserver(final ImeUtil.ImeStateObserver observer) {
mImeStateObservers.remove(observer);
}
@Override
public boolean isImeOpen() {
return mImeOpen;
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
mActionBarMenu = menu;
if (mActionMode != null &&
mActionMode.getCallback().onCreateActionMode(mActionMode, menu)) {
return true;
}
return false;
}
@Override
public boolean onPrepareOptionsMenu(final Menu menu) {
mActionBarMenu = menu;
if (mActionMode != null &&
mActionMode.getCallback().onPrepareActionMode(mActionMode, menu)) {
return true;
}
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(final MenuItem menuItem) {
if (mActionMode != null &&
mActionMode.getCallback().onActionItemClicked(mActionMode, menuItem)) {
return true;
}
switch (menuItem.getItemId()) {
case android.R.id.home:
if (mActionMode != null) {
dismissActionMode();
return true;
}
}
return super.onOptionsItemSelected(menuItem);
}
@Override
public ActionMode startActionMode(final ActionMode.Callback callback) {
mActionMode = new CustomActionMode(callback);
supportInvalidateOptionsMenu();
invalidateActionBar();
return mActionMode;
}
public void dismissActionMode() {
if (mActionMode != null) {
mActionMode.finish();
mActionMode = null;
invalidateActionBar();
}
}
public ActionMode getActionMode() {
return mActionMode;
}
protected ActionMode.Callback getActionModeCallback() {
if (mActionMode == null) {
return null;
}
return mActionMode.getCallback();
}
/**
* Receives and handles action bar invalidation request from sub-components of this activity.
*
* <p>Normally actions have sole control over the action bar, but in order to support seamless
* transitions for components such as the full screen media picker, we have to let it take over
* the action bar and then restore its state afterwards</p>
*
* <p>If a fragment does anything that may change the action bar, it should call this method
* and then it is this method's responsibility to figure out which component "controls" the
* action bar and delegate the updating of the action bar to that component</p>
*/
public final void invalidateActionBar() {
if (mActionMode != null) {
mActionMode.updateActionBar(getSupportActionBar());
} else {
updateActionBar(getSupportActionBar());
}
}
protected void updateActionBar(final ActionBar actionBar) {
actionBar.setHomeAsUpIndicator(null);
}
/**
* Custom ActionMode implementation which allows us to just replace the contents of the main
* action bar rather than overlay over it
*/
private class CustomActionMode extends ActionMode {
private CharSequence mTitle;
private CharSequence mSubtitle;
private View mCustomView;
private final Callback mCallback;
public CustomActionMode(final Callback callback) {
mCallback = callback;
}
@Override
public void setTitle(final CharSequence title) {
mTitle = title;
}
@Override
public void setTitle(final int resId) {
mTitle = getResources().getString(resId);
}
@Override
public void setSubtitle(final CharSequence subtitle) {
mSubtitle = subtitle;
}
@Override
public void setSubtitle(final int resId) {
mSubtitle = getResources().getString(resId);
}
@Override
public void setCustomView(final View view) {
mCustomView = view;
}
@Override
public void invalidate() {
invalidateActionBar();
}
@Override
public void finish() {
mActionMode = null;
mCallback.onDestroyActionMode(this);
supportInvalidateOptionsMenu();
invalidateActionBar();
}
@Override
public Menu getMenu() {
return mActionBarMenu;
}
@Override
public CharSequence getTitle() {
return mTitle;
}
@Override
public CharSequence getSubtitle() {
return mSubtitle;
}
@Override
public View getCustomView() {
return mCustomView;
}
@Override
public MenuInflater getMenuInflater() {
return BugleActionBarActivity.this.getMenuInflater();
}
public Callback getCallback() {
return mCallback;
}
public void updateActionBar(final ActionBar actionBar) {
actionBar.setDisplayOptions(ActionBar.DISPLAY_HOME_AS_UP);
actionBar.setDisplayShowTitleEnabled(false);
actionBar.setDisplayShowCustomEnabled(false);
mActionMode.getCallback().onPrepareActionMode(mActionMode, mActionBarMenu);
actionBar.setBackgroundDrawable(new ColorDrawable(
getResources().getColor(R.color.contextual_action_bar_background_color)));
actionBar.setHomeAsUpIndicator(R.drawable.ic_cancel_small_dark);
actionBar.show();
}
}
}
|
require "rails_admin_password_edit/engine"
module RailsAdminPasswordEdit
end
require 'rails_admin/config/actions'
module RailsAdmin
module Config
module Actions
class PasswordEdit < Base
RailsAdmin::Config::Actions.register(self)
register_instance_option :member? do
true
end
register_instance_option :link_icon do
'icon-user'
end
register_instance_option :only do
[Profile]
end
register_instance_option :controller do
proc do
if request.put?
@edited_object = @object.user
@edited_object.password = params[:password]
@edited_object.password_confirmation = params[:password_confirmation]
if @edited_object.save
sign_in @edited_object, :bypass => true
notice = t('admin.flash.successful', name: nil, action: t("admin.actions.password_edit.done"))
redirect_to dashboard_path, flash: {success: notice}
else
render action: @action.template_name
end
else
@edited_abstract_model = RailsAdmin.config('User').abstract_model
@edited_model_config = @edited_abstract_model.try(:config)
@edited_object = @object.user
render action: @action.template_name
end
end
end
register_instance_option :http_methods do
[:get, :put]
end
end
end
end
end
|
import { NestedPageMetadata, PageMetadata } from '@docfy/core/lib/types';
import flatNested from './flat-nested';
export default function findPreviousOrNextPage(
url: string,
isPrevious: boolean,
nested: NestedPageMetadata
): PageMetadata | undefined {
const flat = flatNested(nested);
const index = flat.findIndex((page) => {
return page.url === url || page.url === `${url}/`;
});
if (index > -1) {
if (isPrevious) {
return flat[index - 1];
} else {
return flat[index + 1];
}
}
return undefined;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.