text
stringlengths 27
775k
|
---|
#|
This file is a part of Maiden
(c) 2017 Shirakumo http://tymoon.eu ([email protected])
Author: Nicolas Hafner <[email protected]>
|#
(in-package #:maiden-user)
(defpackage #:lichat-cmd
(:nicknames #:org.shirakumo.maiden.clients.lichat.cmd)
(:use)
(:export
#:update
#:ping
#:pong
#:connect
#:disconnect
#:register
#:join
#:leave
#:create
#:kick
#:pull
#:permissions
#:message
#:users
#:channels
#:user-info
#:failure
#:malformed-update
#:connection-unstable
#:too-many-connections
#:update-failure
#:update-id
#:invalid-update
#:username-mismatch
#:incompatible-version
#:invalid-password
#:no-such-profile
#:username-taken
#:no-such-channel
#:already-in-channel
#:not-in-channel
#:channelname-taken
#:bad-name
#:insufficient-permissions
#:invalid-permissions
#:no-such-user
#:too-many-updates))
(defpackage #:lichat-rpl
(:nicknames #:org.shirakumo.maiden.clients.lichat.rpl)
(:use)
(:export
#:update
#:ping
#:pong
#:connect
#:disconnect
#:register
#:join
#:leave
#:create
#:kick
#:pull
#:permissions
#:message
#:users
#:channels
#:user-info
#:failure
#:malformed-update
#:connection-unstable
#:too-many-connections
#:update-failure
#:update-id
#:invalid-update
#:username-mismatch
#:incompatible-version
#:invalid-password
#:no-such-profile
#:username-taken
#:no-such-channel
#:already-in-channel
#:not-in-channel
#:channelname-taken
#:bad-name
#:insufficient-permissions
#:invalid-permissions
#:no-such-user
#:too-many-updates))
(defpackage #:maiden-lichat
(:nicknames #:org.shirakumo.maiden.clients.lichat)
(:use #:cl #:maiden #:maiden-networking #:maiden-client-entities)
(:export
#:lichat-user
#:lichat-channel
#:lichat-client
#:username
#:password
#:bridge))
|
package com.mhuman.movieplot.data.local.setting
import com.mhuman.movieplot.data.local.SettingDataSource
import com.mhuman.movieplot.network.model.SettingInfoResult
import io.reactivex.disposables.Disposable
class SettingInfoRepository(
private val dataSource: SettingDataSource
) : SettingDataSource {
override fun saveSettingInfo(
settingInfo: List<SettingInfoResult>,
success: () -> Unit,
failed: () -> Unit
): Disposable = dataSource.saveSettingInfo(settingInfo, success, failed)
override fun getSettingInfoList(
success: (List<SettingInfoResult>) -> Unit,
failed: () -> Unit
): Disposable = dataSource.getSettingInfoList(success, failed)
} |
class TutorsController < ApplicationController
before_action :find_tutor, only: [:show, :edit, :update]
def search
if params[:tutor]
methods = params[:tutor][:methods]
@tutors = Tutor.send_chain(methods)
else
@tutors = Tutor.all
end
end
def index
if params[:tutor]
methods = params[:tutor][:methods]
@tutors = Tutor.send_chain(methods)
else
@tutors = Tutor.all
respond_to do |f|
f.html {render :index}
f.json {render json: @tutors}
end
end
end
def new
@tutor = Tutor.new
end
def create
tutor = Tutor.create(tutor_params)
if tutor.save
render json: tutor
else
flash[:error] = "Please correct Errors"
render :index
end
end
def show
@tutor= Tutor.find(params[:id])
Tutor.subject(params[:name])
respond_to do |f|
f.html {render :show}
f.json {render json: @tutor}
end
end
def edit
end
def update
@tutor.update(tutor_params)
if @tutor.save
redirect_to tutor_path(@tutor)
else
render :edit
end
end
def destroy
@tutor = Tutor.find(params[:id])
@tutor.destroy
redirect_to tutors_path
end
private
def find_tutor
@tutor = Tutor.find_by(id: params[:id])
end
def tutor_params
params.require(:tutor).permit(:name, :email, :subject, :bio, :gender)
end
end
|
<?php
$file = 'templates/' . $_GET['name'];
$contents = file_get_contents($file);
$xml = simplexml_load_string($contents);
$resp = (string)$xml->ResponseType;
$attr=array();
foreach ($xml->QueryParams->Param as $param){
$attr[(string)$param->attributes()] = (string)$param;
}
echo json_encode(array('resp'=>$resp,'attr'=>$attr));
|
package snakesladders.consumers
import scala.concurrent.duration._
import scala.language.postfixOps
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import snakesladders.domain.events.{CurrentPlayerChanged, GameEvent, PlayerMustRollAgain}
import snakesladders.domain.models.Players.Player
import snakesladders.domain.services.GameService.RollDiceForPlayer
class ComputerPlayerSpecs
extends TestKit(ActorSystem("ComputerPlayerSpecs"))
with ImplicitSender
with WordSpecLike
with Matchers
with BeforeAndAfterAll {
override def afterAll {
TestKit.shutdownActorSystem(system)
}
val humanPlayer = Player("p1")
val computerPlayer = Player("p2", computer = true)
val computer = system.actorOf(ComputerPlayer.props(testActor))
system.eventStream.subscribe(computer, classOf[GameEvent])
"ComputerPlayer" when {
"received the CurrentPlayerChanged event for computer player" should {
"send RollDiceForPlayer to the game service" in {
system.eventStream.publish(CurrentPlayerChanged(computerPlayer))
expectMsg(RollDiceForPlayer(computerPlayer))
}
}
"received the CurrentPlayerChanged event for human player" should {
"not send RollDiceForPlayer to the game service" in {
val computer = system.actorOf(ComputerPlayer.props(testActor))
system.eventStream.publish(CurrentPlayerChanged(humanPlayer))
expectNoMsg(1 second)
}
}
"received the PlayerMustRollAgain event for computer player" should {
"send RollDiceForPlayer to the game service" in {
val computer = system.actorOf(ComputerPlayer.props(testActor))
system.eventStream.publish(PlayerMustRollAgain(computerPlayer))
expectMsg(RollDiceForPlayer(computerPlayer))
}
}
"received the PlayerMustRollAgain event for human player" should {
"not send RollDiceForPlayer to the game service" in {
val computer = system.actorOf(ComputerPlayer.props(testActor))
system.eventStream.publish(PlayerMustRollAgain(humanPlayer))
expectNoMsg(1 second)
}
}
}
}
|
# Voids
Voids was created to match the existing PHP datatypes. It is 100% worthless don't use it. |
package conf
import (
"time"
)
type Looker interface {
Lookup(name string) Flag
}
type Mapper interface {
Looker
Parse()
GetUint16(name string) (uint16, bool)
GetDuration(name string) (time.Duration, bool)
}
type Flag interface {
Changed() bool
}
type mapper func(name string) Flag
type Uint16 interface {
GetUint16() (uint16, bool)
}
func (m mapper) GetUint16(name string) (uint16, bool) {
if f := m(name); f != nil {
if v, ok := f.(Uint16); ok {
return v.GetUint16()
}
}
return 0, false
}
type Duration interface {
GetDuration() (time.Duration, bool)
}
func (m mapper) GetDuration(name string) (time.Duration, bool) {
if f := m(name); f != nil {
if v, ok := f.(Duration); ok {
return v.GetDuration()
}
}
return 0, false
}
|
using System;
using System.Collections.Generic;
namespace IoTConnect.Model
{
/// <summary>
/// Device result.
/// </summary>
public class SingleResult
{
/// <summary>
/// Device guid.
/// </summary>
public string Guid { get; set; }
/// <summary>
/// Device Unique Id.
/// </summary>
public string UniqueId { get; set; }
/// <summary>
/// Display name.
/// </summary>
public string DisplayName { get; set; }
/// <summary>
/// Is Device Connected?
/// </summary>
public bool IsConnected { get; set; }
/// <summary>
/// Device Last Activity Date.
/// </summary>
public DateTime? LastActivityDate { get; set; }
/// <summary>
/// Device Status.
/// </summary>
public bool IsActive { get; set; }
/// <summary>
/// firmware upgrade guid.
/// </summary>
public string FirmwareUpgradeGuid { get; set; }
/// <summary>
/// Device template guid.
/// </summary>
public string DeviceTemplateGuid { get; set; }
/// <summary>
/// Device entity guid.
/// </summary>
public string EntityGuid { get; set; }
/// <summary>
/// Certificate guid.
/// </summary>
public string CertificateGuid { get; set; }
/// <summary>
/// Certificate Name.
/// </summary>
public string CertificateName { get; set; }
/// <summary>
/// Certificate Type.
/// </summary>
public string CertificateType { get; set; }
/// <summary>
/// Device Template Auth Type.
/// </summary>
public int DeviceTemplateAuthType { get; set; }
/// <summary>
/// Entity Name.
/// </summary>
public string EntityName { get; set; }
/// <summary>
/// Device Template Name.
/// </summary>
public string DeviceTemplateName { get; set; }
/// <summary>
/// Note.
/// </summary>
public string Note { get; set; }
/// <summary>
/// Device Image.
/// </summary>
public string Image { get; set; }
/// <summary>
/// Company Id.
/// </summary>
public string CpId { get; set; }
/// <summary>
/// Is Device Acquired?
/// </summary>
public int IsAcquired { get; set; }
/// <summary>
/// Firmware Avail.
/// </summary>
public int FirmwareAvail { get; set; }
/// <summary>
/// Parent Device guid.
/// </summary>
public string ParentDeviceGuid { get; set; }
/// <summary>
/// Tag.
/// </summary>
public string Tag { get; set; }
/// <summary>
/// Endorsement Key.
/// </summary>
public string EndorsementKey { get; set; }
/// <summary>
/// Parent Device Unique Id.
/// </summary>
public string ParentDeviceUniqueId { get; set; }
/// <summary>
/// Is Parent Acquired?
/// </summary>
public int IsParentAcquired { get; set; }
/// <summary>
/// Is Device Simulator On?
/// </summary>
public int IsSimulatorOn { get; set; }
/// <summary>
/// Type 2 support available?
/// </summary>
public bool IsType2Support { get; set; }
/// <summary>
/// Edge Support available?
/// </summary>
public bool IsEdgeSupport { get; set; }
/// <summary>
/// Is allotted Device?
/// </summary>
public int IsAllottedDevice { get; set; }
/// <summary>
/// Allotted Device Permission.
/// </summary>
public AllottedDevicePermission AllottedDevicePermission { get; set; }
/// <summary>
/// Image URL.
/// </summary>
public string ImageUrl { get; set; }
/// <summary>
/// Device Properties.
/// </summary>
public List<object> Properties { get; set; }
/// <summary>
/// Device Broker guid.
/// </summary>
public string BrokerGuid { get; set; }
}
public class AllottedDevicePermission
{
public bool? View { get; set; }
public bool? Operation { get; set; }
}
}
|
//! CAN and FD-CAN support.
//!
//! The module implements CAN and CAN-FD support using the [fdcan] crate.
//!
//! # Message RAM
//!
//! The Message RAM allocation is fixed as follows
//!
//! | Section | Size
//! | --- | ---
//! | Standard 11-bit filters | 28
//! | Extended 29-bit filters | 8
//! | Rx FIFO 0 | 3 elements
//! | Rx FIFO 1 | 3 elements
//! | Tx Buffers | 3 elements
//!
//! # Usage
//!
//! In `Cargo.toml`
//! ```toml
//! fdcan = "^0.1"
//! ```
//!
//! Initialisation
//! ```
//! use stm32h7xx_hal::prelude::*;
//! use fdcan::{
//! config::NominalBitTiming,
//! filter::{StandardFilter, StandardFilterSlot},
//! id::StandardId,
//! FdCan,
//! };
//!
//! let mut can: FdCan<_, fdcan::ConfigMode> = dp.FDCAN1.fdcan(tx, rx, fdcan_prec);
//! ```
//!
//! [fdcan]: https://docs.rs/fdcan
use crate::gpio::gpioa::{PA11, PA12};
use crate::gpio::gpiob::{PB12, PB13, PB5, PB6, PB8, PB9};
use crate::gpio::gpiod::{PD0, PD1};
use crate::gpio::gpioh::{PH13, PH14};
use crate::gpio::Alternate;
use crate::rcc::{rec, rec::ResetEnable};
/// Storage type for the CAN controller
#[derive(Debug)]
pub struct Can<FDCAN> {
rb: FDCAN,
}
impl<FDCAN> Can<FDCAN> {
/// Returns a reference to the inner peripheral
fn inner(&self) -> &FDCAN {
&self.rb
}
}
/// Extension trait for CAN controller
pub trait CanExt: Sized
where
Can<Self>: fdcan::Instance,
{
fn fdcan<TX, RX>(
self,
_tx: TX,
_rx: RX,
prec: rec::Fdcan,
) -> fdcan::FdCan<Can<Self>, fdcan::ConfigMode>
where
TX: sealed::Tx<Self>,
RX: sealed::Rx<Self>,
{
self.fdcan_unchecked(prec)
}
fn fdcan_unchecked(
self,
prec: rec::Fdcan,
) -> fdcan::FdCan<Can<Self>, fdcan::ConfigMode>;
}
/// Configure Message RAM layout on H7 to match the fixed sized used on G4
///
/// These are protected bits, write access is only possible when bit CCE and bit
/// INIT for FDCAN_CCCR are set to 1
macro_rules! message_ram_layout {
($can:ident, $start_word_addr:expr) => {
use fdcan::message_ram::*;
let mut word_adr: u16 = $start_word_addr;
// 11-bit filter
$can.sidfc
.modify(|_, w| unsafe { w.flssa().bits(word_adr) });
word_adr += STANDARD_FILTER_MAX as u16;
// 29-bit filter
$can.xidfc
.modify(|_, w| unsafe { w.flesa().bits(word_adr) });
word_adr += 2 * EXTENDED_FILTER_MAX as u16;
// Rx FIFO 0
$can.rxf0c.modify(|_, w| unsafe {
w.f0sa()
.bits(word_adr)
.f0s()
.bits(RX_FIFO_MAX)
.f0wm()
.bits(RX_FIFO_MAX)
});
word_adr += 18 * RX_FIFO_MAX as u16;
// Rx FIFO 1
$can.rxf1c.modify(|_, w| unsafe {
w.f1sa()
.bits(word_adr)
.f1s()
.bits(RX_FIFO_MAX)
.f1wm()
.bits(RX_FIFO_MAX)
});
word_adr += 18 * RX_FIFO_MAX as u16;
// Rx buffer - see below
// Tx event FIFO
$can.txefc.modify(|_, w| unsafe {
w.efsa()
.bits(word_adr)
.efs()
.bits(TX_EVENT_MAX)
.efwm()
.bits(TX_EVENT_MAX)
});
word_adr += 2 * TX_EVENT_MAX as u16;
// Tx buffers
$can.txbc.modify(|_, w| unsafe {
w.tbsa().bits(word_adr).tfqs().bits(TX_FIFO_MAX)
});
word_adr += 18 * TX_FIFO_MAX as u16;
// Rx Buffer - not used
$can.rxbc.modify(|_, w| unsafe { w.rbsa().bits(word_adr) });
// TX event FIFO?
// Trigger memory?
// Set the element sizes to 16 bytes
$can.rxesc.modify(|_, w| unsafe {
w.rbds().bits(0b111).f1ds().bits(0b111).f0ds().bits(0b111)
});
$can.txesc.modify(|_, w| unsafe { w.tbds().bits(0b111) });
};
}
mod sealed {
/// A TX pin configured for CAN communication
pub trait Tx<FDCAN> {}
/// An RX pin configured for CAN communication
pub trait Rx<FDCAN> {}
}
/// Implements sealed::{Tx,Rx} for pins associated with a CAN peripheral
macro_rules! pins {
($PER:ident =>
(TX: [ $($( #[ $pmetatx:meta ] )* $tx:ty),+ $(,)? ],
RX: [ $($( #[ $pmetarx:meta ] )* $rx:ty),+ $(,)? ])) => {
$(
$( #[ $pmetatx ] )*
impl sealed::Tx<crate::stm32::$PER> for $tx {}
)+
$(
$( #[ $pmetarx ] )*
impl sealed::Rx<crate::stm32::$PER> for $rx {}
)+
};
}
pins! {
FDCAN1 => (
TX: [
PA12<Alternate<9>>,
PB9<Alternate<9>>,
PD1<Alternate<9>>,
PH13<Alternate<9>>
],
RX: [
PA11<Alternate<9>>,
PB8<Alternate<9>>,
PD0<Alternate<9>>,
PH14<Alternate<9>>
]
)
}
pins! {
FDCAN2 => (
TX: [
PB6<Alternate<9>>,
PB13<Alternate<9>>
],
RX: [
PB5<Alternate<9>>,
PB12<Alternate<9>>
]
)
}
mod fdcan1 {
use super::{rec, Can, CanExt, ResetEnable};
use crate::stm32::FDCAN1;
impl Can<FDCAN1> {
pub fn fdcan1(
rb: FDCAN1,
prec: rec::Fdcan,
) -> fdcan::FdCan<Self, fdcan::ConfigMode> {
prec.enable(); // Enable APB1 peripheral clock
// Initialisation and RAM layout configuation
let mut fdcan = fdcan::FdCan::new(Self { rb }).into_config_mode();
let can = fdcan.instance().inner();
message_ram_layout!(can, 0x000);
fdcan
}
}
impl CanExt for FDCAN1 {
fn fdcan_unchecked(
self,
prec: rec::Fdcan,
) -> fdcan::FdCan<Can<Self>, fdcan::ConfigMode> {
Can::fdcan1(self, prec)
}
}
unsafe impl fdcan::Instance for Can<FDCAN1> {
const REGISTERS: *mut fdcan::RegisterBlock = FDCAN1::ptr() as *mut _;
}
unsafe impl fdcan::message_ram::Instance for Can<FDCAN1> {
const MSG_RAM: *mut fdcan::message_ram::RegisterBlock =
(0x4000_ac00 as *mut _);
}
}
mod fdcan2 {
use super::{rec, Can, CanExt, ResetEnable};
use crate::stm32::FDCAN2;
impl Can<FDCAN2> {
pub fn fdcan2(
rb: FDCAN2,
prec: rec::Fdcan,
) -> fdcan::FdCan<Self, fdcan::ConfigMode> {
prec.enable(); // Enable APB1 peripheral clock
// Initialisation and RAM layout configuation
let mut fdcan = fdcan::FdCan::new(Self { rb }).into_config_mode();
let can = fdcan.instance().inner();
message_ram_layout!(can, 0x400); // + 1k words = 4kB
fdcan
}
}
impl CanExt for FDCAN2 {
fn fdcan_unchecked(
self,
prec: rec::Fdcan,
) -> fdcan::FdCan<Can<Self>, fdcan::ConfigMode> {
Can::fdcan2(self, prec)
}
}
unsafe impl fdcan::Instance for Can<FDCAN2> {
const REGISTERS: *mut fdcan::RegisterBlock = FDCAN2::ptr() as *mut _;
}
unsafe impl fdcan::message_ram::Instance for Can<FDCAN2> {
const MSG_RAM: *mut fdcan::message_ram::RegisterBlock =
((0x4000_ac00 + 0x1000) as *mut _); // FDCAN1 + 4kB
}
}
|
use ethabi;
use futures::{Async, Future, Poll};
use serde;
use std::mem;
use crate::contract;
use crate::contract::tokens::Detokenize;
use crate::helpers;
use crate::rpc;
use crate::types::Bytes;
use crate::Error as ApiError;
#[derive(Debug)]
enum ResultType<T, F> {
Decodable(helpers::CallFuture<Bytes, F>, ethabi::Function),
Simple(helpers::CallFuture<T, F>),
Constant(Result<T, contract::Error>),
Done,
}
/// A standard function (RPC) call result.
/// Takes any type which is deserializable from JSON,
/// a function definition and a future which yields that type.
#[derive(Debug)]
pub struct CallFuture<T, F> {
inner: ResultType<T, F>,
}
impl<T, F> From<crate::helpers::CallFuture<T, F>> for CallFuture<T, F> {
fn from(inner: crate::helpers::CallFuture<T, F>) -> Self {
CallFuture {
inner: ResultType::Simple(inner),
}
}
}
impl<T, F, E> From<E> for CallFuture<T, F>
where
E: Into<contract::Error>,
{
fn from(e: E) -> Self {
CallFuture {
inner: ResultType::Constant(Err(e.into())),
}
}
}
/// Function-specific bytes-decoder future.
/// Takes any type which is deserializable from `Vec<ethabi::Token>`,
/// a function definition and a future which yields that type.
#[derive(Debug)]
pub struct QueryResult<T, F> {
inner: ResultType<T, F>,
}
impl<T, F, E> From<E> for QueryResult<T, F>
where
E: Into<contract::Error>,
{
fn from(e: E) -> Self {
QueryResult {
inner: ResultType::Constant(Err(e.into())),
}
}
}
impl<T, F> QueryResult<T, F> {
/// Create a new `QueryResult` wrapping the inner future.
pub fn new(inner: helpers::CallFuture<Bytes, F>, function: ethabi::Function) -> Self {
QueryResult {
inner: ResultType::Decodable(inner, function),
}
}
}
impl<T: Detokenize, F> Future for QueryResult<T, F>
where
F: Future<Item = rpc::Value, Error = ApiError>,
{
type Item = T;
type Error = contract::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let ResultType::Decodable(ref mut inner, ref function) = self.inner {
let bytes: Bytes = try_ready!(inner.poll());
return Ok(Async::Ready(T::from_tokens(
function.decode_output(&bytes.0)?,
)?));
}
match mem::replace(&mut self.inner, ResultType::Done) {
ResultType::Constant(res) => res.map(Async::Ready),
_ => panic!("Unsupported state"),
}
}
}
impl<T: serde::de::DeserializeOwned, F> Future for CallFuture<T, F>
where
F: Future<Item = rpc::Value, Error = ApiError>,
{
type Item = T;
type Error = contract::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let ResultType::Simple(ref mut inner) = self.inner {
let hash: T = try_ready!(inner.poll());
return Ok(Async::Ready(hash));
}
match mem::replace(&mut self.inner, ResultType::Done) {
ResultType::Constant(res) => res.map(Async::Ready),
_ => panic!("Unsupported state"),
}
}
}
|
#ifndef _FACE_CENTERFACE_H_
#define _FACE_CENTERFACE_H_
#include <vector>
#include "MNN/Interpreter.hpp"
#include "MNN/ImageProcess.hpp"
#include "MNN/MNNDefine.h"
#include "MNN/Tensor.hpp"
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
struct FaceInfo {
cv::Rect face_;
float score_;
float keypoints_[10];
};
class Centerface {
public:
Centerface();
~Centerface();
int Init(const char* model_path);
int Detect(const cv::Mat& img_src, std::vector<FaceInfo>* faces);
private:
uint8_t* GetImage(const cv::Mat& img_src) {
uchar* data_ptr = new uchar[img_src.total() * 4];
cv::Mat img_tmp(img_src.size(), CV_8UC4, data_ptr);
cv::cvtColor(img_src, img_tmp, cv::COLOR_BGR2RGBA, 4);
return (uint8_t*)img_tmp.data;
}
float InterRectArea(const cv::Rect& a, const cv::Rect& b) {
cv::Point left_top = cv::Point(MAX(a.x, b.x), MAX(a.y, b.y));
cv::Point right_bottom = cv::Point(MIN(a.br().x, b.br().x), MIN(a.br().y, b.br().y));
cv::Point diff = right_bottom - left_top;
return (MAX(diff.x + 1, 0) * MAX(diff.y + 1, 0));
}
int ComputeIOU(const cv::Rect& rect1,
const cv::Rect& rect2, float* iou,
const std::string& type = "UNION") {
float inter_area = InterRectArea(rect1, rect2);
if (type == "UNION") {
*iou = inter_area / (rect1.area() + rect2.area() - inter_area);
}
else {
*iou = inter_area / MIN(rect1.area(), rect2.area());
}
return 0;
}
int NMS(const std::vector<FaceInfo>& faces,
std::vector<FaceInfo>* result, const float& threshold,
const std::string& type = "UNION") {
result->clear();
if (faces.size() == 0)
return -1;
std::vector<size_t> idx(faces.size());
for (unsigned i = 0; i < idx.size(); i++) {
idx[i] = i;
}
while (idx.size() > 0) {
int good_idx = idx[0];
result->push_back(faces[good_idx]);
std::vector<size_t> tmp = idx;
idx.clear();
for (unsigned i = 1; i < tmp.size(); i++) {
int tmp_i = tmp[i];
float iou = 0.0f;
ComputeIOU(faces[good_idx].face_, faces[tmp_i].face_, &iou, type);
if (iou <= threshold)
idx.push_back(tmp_i);
}
}
}
private:
bool initialized_;
std::shared_ptr<MNN::CV::ImageProcess> pretreat_;
std::shared_ptr<MNN::Interpreter> centerface_interpreter_;
MNN::Session* centerface_sess_ = nullptr;
MNN::Tensor* input_tensor_ = nullptr;
const float meanVals_[3] = { 0.0f, 0.0f, 0.0f };
const float normVals_[3] = { 1.0f, 1.0f, 1.0f };
const float scoreThreshold_ = 0.5f;
const float nmsThreshold_ = 0.5f;
};
#endif // !_FACE_CENTERFACE_H_
|
default rel
section .data
extern _rax
extern _rcx
extern _rdx
extern _rbx
extern _rsp
extern _rbp
extern _rsi
extern _rdi
extern _r8
extern _r9
extern _r10
extern _r11
extern _r12
extern _r13
extern _r14
extern _r15
extern _rip
extern _reg_size8
extern __rax
extern __rcx
extern __rdx
extern __rbx
extern __rsp
extern __rbp
extern __rsi
extern __rdi
extern __r8
extern __r9
extern __r10
extern __r11
extern __r12
extern __r13
extern __r14
extern __r15
extern __rip
extern _tls1
section .text
global print
global _check_register
global _initialize_v_regs
global _test_on_real_cpu
global __clone
global __mmap
extern _exec
extern _hello_world
%include "constant.asm"
print:
push rbp
lea r10,[_reg_size8+0x0a]
mov r9,r8
call print1
mov r8,r9
sar r8,8
mov r9,r8
call print1
mov r8,r9
sar r8,8
mov r9,r8
call print1
mov r8,r9
sar r8,8
mov r9,r8
call print1
call _reg_save
call _write
call _reg_regain
pop rbp
ret
print1:
push rbp
sub r10,1
mov r9b,r8b
call print1.f1
mov byte [r10],r8b
mov r8b,r9b
sar r8b,4
call print1.f1
sub r10,1
mov byte [r10],r8b
;; mov byte [2+reg_size8],0x31
pop rbp
ret
;; and r8b,0xf0
.f1:
and r8b,0x0f
cmp r8b,0x0a
jae print1.more_than_0x0a
jmp print1.less_than_0x0a
.more_than_0x0a:
add r8b,0x57
ret
.less_than_0x0a:
add r8b,0x30
ret
__set_to_virtual_register:
mov [__rax],rax
mov [__rcx],rcx
mov [__rdx],rdx
mov [__rbx],rbx
mov [__rsp],rsp
mov [__rbp],rbp
mov [__rsi],rsi
mov [__r8 ],r8
mov [__r9 ],r9
mov [__r10],r10
mov [__r11],r11
mov [__r12],r12
mov [__r13],r13
mov [__r14],r14
mov [__r15],r15
ret
_set_to_virtual_register:
mov [_rax],rax
mov [_rcx],rcx
mov [_rdx],rdx
mov [_rbx],rbx
mov [_rsp],rsp
mov [_rbp],rbp
mov [_rsi],rsi
mov [_r8 ],r8
mov [_r9 ],r9
mov [_r10],r10
mov [_r11],r11
mov [_r12],r12
mov [_r13],r13
mov [_r14],r14
mov [_r15],r15
ret
;; mov [_rip],rip
_set_to_real_register:
mov rax,[_rax]
mov rcx,[_rcx]
mov rdx,[_rdx]
mov rbx,[_rbx]
;; mov rsp,[_rsp]
;; mov rbp,[_rbp]
mov rsi,[_rsi]
mov rdi,[_rdi]
mov r8, [_r8 ]
mov r9, [_r9 ]
mov r10,[_r10]
mov r11,[_r11]
mov r12,[_r12]
mov r13,[_r13]
mov r14,[_r14]
mov r15,[_r15]
ret
;;; set_to_real_register
;;; jump to instruction that you want to jump on
;;; set_to_virtual register after the operation
test_f1:
push rax
_test_on_real_cpu:
;; set the starting address of test subject
push rbp
mov [_rip],rdi
add rdi,rsi
;; insert return at the end of it
mov byte [rdi],0xc3
;; mov byte [rdi],0x48
;; inc rdi
;; mov byte [rdi],0xb8
;; inc rdi
;; lea rbx,[_test_on_real_cpu.done]
;; mov qword [rdi],rbx
;; add rdi,8
;; mov byte [rdi],0xff
;; inc rdi
;; mov byte [rdi],0xe0
;; inc rdi
;; set all of registers but rip
mov [_rsp],rsp
call _set_to_real_register
mov rsp,[_rsp]
call [_rip]
.done:
call __set_to_virtual_register
mov rsp,[_rsp]
pop rbp
ret
_test_on_v_cpu:
call _exec
call _set_to_real_register
;; call [test_sub_addr]
call _set_to_virtual_register
_initialize_v_regs:
mov dword [_rax],0
mov dword [_rcx],0
mov dword [_rdx],0
mov dword [_rbx],0
mov dword [_rsp],0
mov dword [_rbp],0
mov dword [_rsi],0
mov dword [_rdi],0
mov dword [_r8 ],0
mov dword [_r9 ],0
mov dword [_r10],0
mov dword [_r11],0
mov dword [_r12],0
mov dword [_r13],0
mov dword [_r14],0
mov dword [_r15],0
ret
_check_register:
mov rax,0x100014000
db 0x02
db 0x00
db 0x02
db 0x00
db 0x02
db 0x00
mov [_rbp],rbp
mov [_rax],rax
mov rax,rsp
push rsp
mov rbp,rsp
lea rsp,[_rbp]
;; push %rsp
push rax
;; push %rip
push qword [rbp + 8]
;; push eflags
pushf
push r15
push r14
push r13
push r12
push r11
push r10
push r9
push r8
push rdi
push rsi
push rbx
push rdx
push rcx
mov rsp,rbp
pop rsp
ret
_reg_save:
mov r12,rax
mov r13,rdi
mov r14,rsi
mov r15,rdx
ret
_reg_regain:
mov rax,r12
mov rdi,r13
mov rsi,r14
mov rdx,r15
ret
_write:
mov rax, SYS_write
%ifidn __OUTPUT_FORMAT__, macho64
add rax,0x2000000
%endif
mov rdi, STDOUT
lea rsi, [_reg_size8]
mov rdx, 0x0b
;; rcx,r8,r9 is another register
;; mov rdx, _reg_size8.len
syscall
ret
_exit:
mov rdi, 0
mov rax, SYS_exit
syscall
__clone:
;; mov rdi,_do1
;; push rdi
;; mov qword [rsi],0x10
;; lea rsi, [rsi + STACK_SIZE - 8]
;; mov [rsi],rdi
;; pop qword [rsi]
;; THREAD_FLAGS
;; mov rdi,CLONE_VM | CLONE_VFORK
mov rdi, CLONE_VM | CLONE_CHILD_SETTID | CLONE_PARENT_SETTID | CLONE_SETTLS | CLONE_DETACHED | CLONE_PARENT| CLONE_FS;
;; | CLONE_FS | CLONE_FILES;
;; | CLONE_THREAD | CLONE_SIGHAND
;; | CLONE_CHILD_SETTID
mov r10,rcx
mov rax, SYS_clone
syscall
ret
;; void *stack_create(void)
__mmap:
;; mov rdi, 0
;; mov rsi, STACK_SIZE
;; mov rdx, PROT_WRITE | PROT_READ | PROT_EXEC
mov r10, rcx
;; MAP_ANONYMOUS | MAP_PRIVATE
;; | MAP_GROWSDOWN
;; mov r8, -1
;; mov r9, 0
mov rax, SYS_mmap
syscall
ret
;;; you can let a thread sleep on an address with futex.
;;; 1st :: futex addr
;;; 2nd :: FUTEX_WAIT
;;; 3rd ::
;; __futex:
;; mov rax,SYS_futex
;; ret
|
import { context, trace } from '@opentelemetry/api';
export function Span(name?: string) {
return (
target: any,
propertyKey: string,
propertyDescriptor: PropertyDescriptor,
) => {
const method = propertyDescriptor.value;
// eslint-disable-next-line no-param-reassign
propertyDescriptor.value = function PropertyDescriptor(...args: any[]) {
const currentSpan = trace.getSpan(context.active());
const tracer = trace.getTracer('default');
return context.with(trace.setSpan(context.active(), currentSpan), () => {
const span = tracer.startSpan(
name || `${target.constructor.name}.${propertyKey}`,
);
if (method.constructor.name === 'AsyncFunction') {
return method.apply(this, args).finally(() => {
span.end();
});
}
const result = method.apply(this, args);
span.end();
return result;
});
};
};
}
|
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
type D = AnnDiagram Cairo V2 Double Any
p :: Trail V2 Double
p = fromOffsets [(1,2), (1,-5)]
burst n = close . mconcat . take n . iterate (rotateBy (-1/(fromIntegral n))) $ p
sun = (strokeT $ burst 25)
# lineJoin LineJoinRound
# fc yellow
# lc red
# lw 1
# centerXY
dia = mconcat . reverse . take 15 . iterate (scale 0.8) $ sun
main = defaultMain (pad 1.1 dia) |
import { performance } from 'perf_hooks';
import { Context } from '@azure/functions';
import { Request, Response } from 'express';
export function setHeaders(res: Response, context: Context) {
context.res = context.res || ({} as Record<string, any>);
context.res.headers = Object.assign(
res.getHeaders() || {},
context.res.headers,
);
}
export function createRequestLog(
req: Request,
res: Response,
context: Context,
) {
const reqMethod = req.method.toUpperCase();
const reqSegments = context.req?.params.segments || '';
const reqQuery = Object.keys(req.query)
.map((key) => `${key}=${req.query[key]}`)
.join('&');
const reqUri = '/' + reqSegments + (reqQuery ? '?' + reqQuery : '');
const resStatusCode = res.statusCode;
const logs: (string | number)[] = [reqMethod, reqUri, resStatusCode];
const { startAt }: { startAt: number } = context.bindingData.sys;
if (startAt) {
const elapsedTime = Math.round((performance.now() - startAt) * 1000) / 1000;
logs.push(`${elapsedTime}msec`);
}
context.log.info(logs.join(' '));
}
export function setStartAt(context: Context) {
context.bindingData.sys['startAt'] = performance.now();
}
|
---
path: /blog/my-first-post
date: '2017-11-07'
title: My first blog post edited
---
|
# flappy-bird_clone
TAMZ course project in JS.
You can try it [here](http://cmoud94.tode.cz/).
# License
This project uses [MIT License](LICENSE).
|
package com.andres.multiwork.pc.utils;
public interface Importer {
public void importData();
}
|
import * as assert from 'assert'
import { configureWorkflow, MemoryPersistenceProvider, ConsoleLogger } from '@huksley/workflow-es'
import { emitter, SampleWorkflow } from '../src/main'
describe('main.ts', () => {
it('can start workflow', async () => {
return new Promise(async resolve => {
const config = configureWorkflow()
assert.ok(config)
const persistence = new MemoryPersistenceProvider()
config.useLogger(new ConsoleLogger())
config.usePersistence(persistence)
const host = config.getHost()
assert.ok(host)
host.registerWorkflow(SampleWorkflow)
await host.start()
emitter.on('ping', () => {
console.info('Sending event')
host.publishEvent('myEvent', '0', 'Hi!')
})
emitter.on('done', () => {
console.info('Workflow done')
assert.ok(true)
resolve(true)
})
const id = await host.startWorkflow('test1', 1, {})
assert.ok(id)
console.log('Started workflow: ' + id)
})
}).timeout(10000)
})
|
const { Event } = require("klasa");
/*
* Event watches for deleted starboard messages and updates the data as not to cause
* Margarine to crash if another reaction was added to the original message.
*/
module.exports = class extends Event {
constructor(...args) {
super(...args, {
name: "starboardWatch",
enabled: true,
event: "messageDelete"
});
}
async run(message) {
//Message is not in a guild, do nothing.
if (message.guild === undefined) { return; }
let sbConfig = message.guild.settings.starboard;
if (sbConfig.sbCache.includes(message.id)) {
var index = sbConfig.sbCache.indexOf(message.id);
var msgLink = sbConfig.msgCache[index];
message.guild.settings.update("starboard.msgCache", msgLink, { action: "remove" }).then(() => {
message.guild.settings.update("starboard.sbCache", message.id, { action: "remove" });
});
}
}
}; |
const { prime, load, flush } = require('../lib/load')
const wait = t => new Promise(r => setTimeout(r, t))
module.exports = async (test, assert) => {
test('runs', async () => {
let i = 0
const loader = async () => {
await wait(100)
return { value: 'runs' }
}
function component () {
i++
const data = load(loader, { key: 'runs' })
return data ? data.value : null
}
const { content, data } = await flush(component)
assert(content === 'runs')
assert(data.runs.value === 'runs')
assert(i === 2)
})
test('nested - cached', async () => {
let i = 0
const loader = async () => {
await wait(100)
return { value: 'nested' }
}
function child () {
i++
const data = load(loader, { key: 'nested' })
return data ? data.value : null
}
function entry () {
i++
const data = load(loader, { key: 'nested' })
return data ? child() : null
}
const { content, data } = await flush(entry)
assert(content === 'nested')
assert(data.nested.value === 'nested')
assert(i === 3)
})
test('nested - not cached', async () => {
let i = 0
const loader = async () => {
await wait(100)
return { value: 'nested' }
}
function child () {
i++
const data = load(loader, { key: 'nested_child' })
return data ? data.value : null
}
function entry () {
i++
const data = load(loader, { key: 'nested_entry' })
return data ? child() : null
}
const { content, data } = await flush(entry)
assert(content === 'nested')
assert(data.nested_entry.value === 'nested')
assert(data.nested_child.value === 'nested')
assert(i === 5)
})
test('no recursion on error', async () => {
let i = 0
const loader = async () => {
await wait(100)
throw 'error'
return { value: 'runs' }
}
function component () {
i++
const data = load(loader, { key: 'runs' })
return data ? data.value : null
}
await flush(component)
assert(i === 1)
})
test('prime', async () => {
let i = 0
const loader = async () => {
return { value: 'val' }
}
function component () {
i++
const data = load(loader, { key: 'runs' })
return data ? data.value : null
}
prime('runs', 'val')
await flush(component)
assert(i === 1)
})
test('catches sync and async errors', async () => {
let one = 0
let two = 0
const asyncLoader = async () => {
throw 'async'
return { value: 'async' }
}
const syncLoader = () => {
throw 'sync'
return { value: 'sync' }
}
function asyncComponent () {
one++
const data = load(asyncLoader, { key: 'async' })
return data ? data.value : null
}
function syncComponent () {
two++
const data = load(syncLoader, { key: 'async' })
return data ? data.value : null
}
await flush(asyncComponent)
assert(one === 2)
await flush(syncComponent)
assert(two === 1)
})
}
|
---
layout: post
title: "Linux 操作系统:zip压缩"
subtitle: "《Linux 操作系统》zip 用法介绍"
date: 2020-04-04 15:36:00
author: "chanweiyan"
header-img: "img/cwy/post-bg/unix-linux.jpg"
catalog: true
tags:
- Linux
- Shell
---
#### 获取帮助信息
```bash
zip -h
unzip -h
man zip
man unzip
```
#### zip 压缩
```bash
# 压缩当前目录下的 etc.tar 包和 hello.bak 目录及其下的所有文件
zip -r compress.zip etc.tar hello.bak/
# 压缩文件 production.sql
zip production.sql.zip production.sql
```
#### unzip 解压缩
```bash
# 解压文件
unzip production.sql.zip
# 将 compress.zip 中除 etc.tar 之外的内容解压缩到 `/tmp` 目录
unzip compress.zip -d /tmp -x etc.tar
```
#### 查看压缩包的内容
通过 -Z 参数
```bash
unzip -Z compress.zip
```
通过 zipinfo
```bash
zipinfo compress.zip
```
|
import { BagItem } from '@/types/bagItem'
import { useState, useContext, createContext } from 'react'
interface Result {
title: string
message: string
status: string
}
interface BagContextInterface {
bag: BagItem[]
fetchBag: () => Promise<void>
fetchBagOnSignIn: () => Promise<void>
addToBag: (item: BagItem, signedIn?: boolean) => Promise<Result>
removeFromBag: (item: BagItem, signedIn?: boolean) => Promise<Result>
}
export const bagContext = createContext<BagContextInterface | null>(null)
export function ProvideBag({ children }: { children: React.ReactNode }): JSX.Element {
const bag = useProvideBag()
return <bagContext.Provider value={bag}>{children}</bagContext.Provider>
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
export const useBag = () => {
return useContext(bagContext)
}
const useProvideBag = () => {
const [bag, setBag] = useState([])
const fetchBag = async () => {
try {
const result = await fetch('/api/bag/')
const data = await result.json()
setBag(data.bag)
} catch (error) {
console.log(error)
}
}
const fetchBagOnSignIn = async () => {
if (bag.length > 0) {
// Combine guest shopping bag with users saved bag
try {
const response = await fetch('/api/bag/', {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ bag, route: 'fetchBagOnSignIn' }),
})
const data = await response.json()
setBag(data.bag)
} catch {
console.log('there was an error fetching your shopping bag')
}
} else {
try {
await fetchBag()
} catch {
console.log('there was an error fetching your shopping bag')
}
}
}
const addToBag = async (item: BagItem, signedIn = false) => {
if (signedIn) {
try {
const response = await fetch('/api/bag/', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(item),
})
const data = await response.json()
setBag(data.bag)
return {
title: 'Item Added!',
message: 'Your item has been added to your shopping bag',
status: 'success',
}
} catch {
return {
title: 'Error!',
message: 'There was an error adding your item',
status: 'error',
}
}
} else {
const filteredBag = bag.filter((product: BagItem) => product.id !== item.id)
setBag([...filteredBag, item])
return {
title: 'Item Added!',
message: 'Your item has been added to your shopping bag',
status: 'success',
}
}
}
const removeFromBag = async (item: BagItem, signedIn = false) => {
if (!signedIn) {
const updatedBag = bag.filter((el: BagItem) => el.id !== item.id)
setBag(updatedBag)
return {
title: 'Item Removed!',
message: 'Your item has been removed from your shopping bag',
status: 'success',
}
} else {
try {
const response = await fetch('/api/bag/', {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(item),
})
const data = await response.json()
const updatedBag = bag.filter((el: BagItem) => el.id !== item.id)
setBag(updatedBag)
return {
title: 'Item Removed!',
message: data.message,
status: 'success',
}
} catch {
return {
title: 'Error!',
message: 'There was an error removing your item',
status: 'error',
}
}
}
}
return {
bag,
fetchBag,
fetchBagOnSignIn,
addToBag,
removeFromBag,
} as const
}
|
namespace MyMovieDb.App.Common.Constants
{
public static class ActionNames
{
//public const string
}
}
|
# ACF Color Palette


|
using System;
namespace Qkmaxware.Geometry.Coordinates {
/// <summary>
/// 3D vector in cylindrical coordinates
/// </summary>
public class CylindricalCoordinate {
/// <summary>
/// Distance from the origin
/// </summary>
/// <value>distance</value>
public double Distance {get; private set;}
/// <summary>
/// Azimuthal angle around the vertical axis
/// </summary>
/// <value>angle</value>
public double AzimuthalAngle {get; private set;}
/// <summary>
/// Height of the point
/// </summary>
/// <value>height</value>
public double Altitude {get; private set;}
/// <summary>
/// Create an zero coordinate point
/// </summary>
/// <returns>cylindrical coordinate</returns>
public CylindricalCoordinate() : this(0,0,0) {}
/// <summary>
/// Create a specific coordinate point
/// </summary>
/// <param name="distance">distance</param>
/// <param name="azimuth">azimuthal angle</param>
/// <param name="height">height</param>
public CylindricalCoordinate(double distance, double azimuth, double height) {
this.Distance = distance;
this.AzimuthalAngle = azimuth;
this.Altitude = height;
}
/// <summary>
/// Convert a cylindrical coordinate to a cartesian one
/// </summary>
/// <param name="coord">cylindrical coordinate</param>
public static implicit operator Vec3 (CylindricalCoordinate coord) {
var r = coord.Distance;
var CosPhi = Math.Cos(coord.AzimuthalAngle);
var SinPhi = Math.Sin(coord.AzimuthalAngle);
var x = r * CosPhi;
var y = r * SinPhi;
var z = coord.Altitude;
return new Vec3(x, y, z);
}
/// <summary>
/// Convert a cartesian coordinate to a cylindrical one
/// </summary>
/// <param name="coord">cartesian coordinate</param>
public static implicit operator CylindricalCoordinate (Vec3 coord) {
var r = Math.Sqrt(coord.X * coord.X + coord.Y * coord.Y);
var phi = Math.Atan2(coord.Y, coord.X);
var z = coord.Z;
return new CylindricalCoordinate(r, phi, z);
}
}
} |
function route(pathName) {
console.log("pathName:" + pathName);
}
exports.route = route; |
def read_file(file_name)
file = File.open(file_name, "r")
data = file.read
file.close
return data
end
file_name = "input.txt"
arr = read_file(file_name).split(",").map(&:to_i)
min_fuel = 999999999999999
for j in arr.min..arr.max
fuel = 0
for i in 0...arr.size
r = [arr[i], j].max - [arr[i], j].min
extra = (r * (r+1))/2
fuel += extra
end
min_fuel = [min_fuel, fuel].min
end
p min_fuel |
class Symbol
def to_string(prev="")
"#{prev}#{self.to_s}"
end
def sanitize
self.to_s.sanitize
end
end |
// This file was generated by go generate; DO NOT EDIT
package language
// NumCompactTags is the number of common tags. The maximum tag is
// NumCompactTags-1.
const NumCompactTags = 710
var specialTags = []Tag{ // 2 elements
0: {lang: 0x61, region: 0x6d, script: 0x0, pVariant: 0x5, pExt: 0xe, str: "ca-ES-valencia"},
1: {lang: 0x9a, region: 0x132, script: 0x0, pVariant: 0x5, pExt: 0x5, str: "en-US-u-va-posix"},
} // Size: 72 bytes
var coreTags = map[uint32]uint16{
0x0: 0, // und
0x00a00000: 3, // af
0x00a000d0: 4, // af-NA
0x00a0015e: 5, // af-ZA
0x00b00000: 6, // agq
0x00b00051: 7, // agq-CM
0x00d00000: 8, // ak
0x00d0007e: 9, // ak-GH
0x01100000: 10, // am
0x0110006e: 11, // am-ET
0x01500000: 12, // ar
0x01500001: 13, // ar-001
0x01500022: 14, // ar-AE
0x01500038: 15, // ar-BH
0x01500061: 16, // ar-DJ
0x01500066: 17, // ar-DZ
0x0150006a: 18, // ar-EG
0x0150006b: 19, // ar-EH
0x0150006c: 20, // ar-ER
0x01500095: 21, // ar-IL
0x01500099: 22, // ar-IQ
0x0150009f: 23, // ar-JO
0x015000a6: 24, // ar-KM
0x015000aa: 25, // ar-KW
0x015000ae: 26, // ar-LB
0x015000b7: 27, // ar-LY
0x015000b8: 28, // ar-MA
0x015000c7: 29, // ar-MR
0x015000df: 30, // ar-OM
0x015000eb: 31, // ar-PS
0x015000f1: 32, // ar-QA
0x01500106: 33, // ar-SA
0x01500109: 34, // ar-SD
0x01500113: 35, // ar-SO
0x01500115: 36, // ar-SS
0x0150011a: 37, // ar-SY
0x0150011e: 38, // ar-TD
0x01500126: 39, // ar-TN
0x0150015b: 40, // ar-YE
0x01c00000: 41, // as
0x01c00097: 42, // as-IN
0x01d00000: 43, // asa
0x01d0012d: 44, // asa-TZ
0x01f00000: 45, // ast
0x01f0006d: 46, // ast-ES
0x02400000: 47, // az
0x0241d000: 48, // az-Cyrl
0x0241d031: 49, // az-Cyrl-AZ
0x0244f000: 50, // az-Latn
0x0244f031: 51, // az-Latn-AZ
0x02a00000: 52, // bas
0x02a00051: 53, // bas-CM
0x02f00000: 54, // be
0x02f00046: 55, // be-BY
0x03100000: 56, // bem
0x0310015f: 57, // bem-ZM
0x03300000: 58, // bez
0x0330012d: 59, // bez-TZ
0x03800000: 60, // bg
0x03800037: 61, // bg-BG
0x04900000: 62, // bm
0x049000c1: 63, // bm-ML
0x04b00000: 64, // bn
0x04b00034: 65, // bn-BD
0x04b00097: 66, // bn-IN
0x04c00000: 67, // bo
0x04c00052: 68, // bo-CN
0x04c00097: 69, // bo-IN
0x05000000: 70, // br
0x05000076: 71, // br-FR
0x05300000: 72, // brx
0x05300097: 73, // brx-IN
0x05400000: 74, // bs
0x0541d000: 75, // bs-Cyrl
0x0541d032: 76, // bs-Cyrl-BA
0x0544f000: 77, // bs-Latn
0x0544f032: 78, // bs-Latn-BA
0x06100000: 79, // ca
0x06100021: 80, // ca-AD
0x0610006d: 81, // ca-ES
0x06100076: 82, // ca-FR
0x0610009c: 83, // ca-IT
0x06400000: 84, // ce
0x06400104: 85, // ce-RU
0x06600000: 86, // cgg
0x0660012f: 87, // cgg-UG
0x06c00000: 88, // chr
0x06c00132: 89, // chr-US
0x06f00000: 90, // ckb
0x06f00099: 91, // ckb-IQ
0x06f0009a: 92, // ckb-IR
0x07900000: 93, // cs
0x0790005d: 94, // cs-CZ
0x07d00000: 95, // cu
0x07d00104: 96, // cu-RU
0x07f00000: 97, // cy
0x07f00079: 98, // cy-GB
0x08000000: 99, // da
0x08000062: 100, // da-DK
0x08000080: 101, // da-GL
0x08300000: 102, // dav
0x083000a2: 103, // dav-KE
0x08500000: 104, // de
0x0850002d: 105, // de-AT
0x08500035: 106, // de-BE
0x0850004d: 107, // de-CH
0x0850005f: 108, // de-DE
0x085000b0: 109, // de-LI
0x085000b5: 110, // de-LU
0x08800000: 111, // dje
0x088000d2: 112, // dje-NE
0x08b00000: 113, // dsb
0x08b0005f: 114, // dsb-DE
0x08e00000: 115, // dua
0x08e00051: 116, // dua-CM
0x09000000: 117, // dyo
0x09000112: 118, // dyo-SN
0x09200000: 119, // dz
0x09200042: 120, // dz-BT
0x09300000: 121, // ebu
0x093000a2: 122, // ebu-KE
0x09400000: 123, // ee
0x0940007e: 124, // ee-GH
0x09400120: 125, // ee-TG
0x09900000: 126, // el
0x0990005c: 127, // el-CY
0x09900085: 128, // el-GR
0x09a00000: 129, // en
0x09a00001: 130, // en-001
0x09a0001a: 131, // en-150
0x09a00024: 132, // en-AG
0x09a00025: 133, // en-AI
0x09a0002c: 134, // en-AS
0x09a0002d: 135, // en-AT
0x09a0002e: 136, // en-AU
0x09a00033: 137, // en-BB
0x09a00035: 138, // en-BE
0x09a00039: 139, // en-BI
0x09a0003c: 140, // en-BM
0x09a00041: 141, // en-BS
0x09a00045: 142, // en-BW
0x09a00047: 143, // en-BZ
0x09a00048: 144, // en-CA
0x09a00049: 145, // en-CC
0x09a0004d: 146, // en-CH
0x09a0004f: 147, // en-CK
0x09a00051: 148, // en-CM
0x09a0005b: 149, // en-CX
0x09a0005c: 150, // en-CY
0x09a0005f: 151, // en-DE
0x09a00060: 152, // en-DG
0x09a00062: 153, // en-DK
0x09a00063: 154, // en-DM
0x09a0006c: 155, // en-ER
0x09a00070: 156, // en-FI
0x09a00071: 157, // en-FJ
0x09a00072: 158, // en-FK
0x09a00073: 159, // en-FM
0x09a00079: 160, // en-GB
0x09a0007a: 161, // en-GD
0x09a0007d: 162, // en-GG
0x09a0007e: 163, // en-GH
0x09a0007f: 164, // en-GI
0x09a00081: 165, // en-GM
0x09a00088: 166, // en-GU
0x09a0008a: 167, // en-GY
0x09a0008b: 168, // en-HK
0x09a00094: 169, // en-IE
0x09a00095: 170, // en-IL
0x09a00096: 171, // en-IM
0x09a00097: 172, // en-IN
0x09a00098: 173, // en-IO
0x09a0009d: 174, // en-JE
0x09a0009e: 175, // en-JM
0x09a000a2: 176, // en-KE
0x09a000a5: 177, // en-KI
0x09a000a7: 178, // en-KN
0x09a000ab: 179, // en-KY
0x09a000af: 180, // en-LC
0x09a000b2: 181, // en-LR
0x09a000b3: 182, // en-LS
0x09a000bd: 183, // en-MG
0x09a000be: 184, // en-MH
0x09a000c4: 185, // en-MO
0x09a000c5: 186, // en-MP
0x09a000c8: 187, // en-MS
0x09a000c9: 188, // en-MT
0x09a000ca: 189, // en-MU
0x09a000cc: 190, // en-MW
0x09a000ce: 191, // en-MY
0x09a000d0: 192, // en-NA
0x09a000d3: 193, // en-NF
0x09a000d4: 194, // en-NG
0x09a000d7: 195, // en-NL
0x09a000db: 196, // en-NR
0x09a000dd: 197, // en-NU
0x09a000de: 198, // en-NZ
0x09a000e4: 199, // en-PG
0x09a000e5: 200, // en-PH
0x09a000e6: 201, // en-PK
0x09a000e9: 202, // en-PN
0x09a000ea: 203, // en-PR
0x09a000ee: 204, // en-PW
0x09a00105: 205, // en-RW
0x09a00107: 206, // en-SB
0x09a00108: 207, // en-SC
0x09a00109: 208, // en-SD
0x09a0010a: 209, // en-SE
0x09a0010b: 210, // en-SG
0x09a0010c: 211, // en-SH
0x09a0010d: 212, // en-SI
0x09a00110: 213, // en-SL
0x09a00115: 214, // en-SS
0x09a00119: 215, // en-SX
0x09a0011b: 216, // en-SZ
0x09a0011d: 217, // en-TC
0x09a00123: 218, // en-TK
0x09a00127: 219, // en-TO
0x09a0012a: 220, // en-TT
0x09a0012b: 221, // en-TV
0x09a0012d: 222, // en-TZ
0x09a0012f: 223, // en-UG
0x09a00131: 224, // en-UM
0x09a00132: 225, // en-US
0x09a00136: 226, // en-VC
0x09a00139: 227, // en-VG
0x09a0013a: 228, // en-VI
0x09a0013c: 229, // en-VU
0x09a0013f: 230, // en-WS
0x09a0015e: 231, // en-ZA
0x09a0015f: 232, // en-ZM
0x09a00161: 233, // en-ZW
0x09b00000: 234, // eo
0x09b00001: 235, // eo-001
0x09c00000: 236, // es
0x09c00003: 237, // es-003
0x09c0001e: 238, // es-419
0x09c0002b: 239, // es-AR
0x09c0003e: 240, // es-BO
0x09c00050: 241, // es-CL
0x09c00053: 242, // es-CO
0x09c00055: 243, // es-CR
0x09c00058: 244, // es-CU
0x09c00064: 245, // es-DO
0x09c00067: 246, // es-EA
0x09c00068: 247, // es-EC
0x09c0006d: 248, // es-ES
0x09c00084: 249, // es-GQ
0x09c00087: 250, // es-GT
0x09c0008d: 251, // es-HN
0x09c00092: 252, // es-IC
0x09c000cd: 253, // es-MX
0x09c000d6: 254, // es-NI
0x09c000e0: 255, // es-PA
0x09c000e2: 256, // es-PE
0x09c000e5: 257, // es-PH
0x09c000ea: 258, // es-PR
0x09c000ef: 259, // es-PY
0x09c00118: 260, // es-SV
0x09c00132: 261, // es-US
0x09c00133: 262, // es-UY
0x09c00138: 263, // es-VE
0x09e00000: 264, // et
0x09e00069: 265, // et-EE
0x0a000000: 266, // eu
0x0a00006d: 267, // eu-ES
0x0a100000: 268, // ewo
0x0a100051: 269, // ewo-CM
0x0a300000: 270, // fa
0x0a300023: 271, // fa-AF
0x0a30009a: 272, // fa-IR
0x0a500000: 273, // ff
0x0a500051: 274, // ff-CM
0x0a500082: 275, // ff-GN
0x0a5000c7: 276, // ff-MR
0x0a500112: 277, // ff-SN
0x0a700000: 278, // fi
0x0a700070: 279, // fi-FI
0x0a900000: 280, // fil
0x0a9000e5: 281, // fil-PH
0x0ac00000: 282, // fo
0x0ac00062: 283, // fo-DK
0x0ac00074: 284, // fo-FO
0x0ae00000: 285, // fr
0x0ae00035: 286, // fr-BE
0x0ae00036: 287, // fr-BF
0x0ae00039: 288, // fr-BI
0x0ae0003a: 289, // fr-BJ
0x0ae0003b: 290, // fr-BL
0x0ae00048: 291, // fr-CA
0x0ae0004a: 292, // fr-CD
0x0ae0004b: 293, // fr-CF
0x0ae0004c: 294, // fr-CG
0x0ae0004d: 295, // fr-CH
0x0ae0004e: 296, // fr-CI
0x0ae00051: 297, // fr-CM
0x0ae00061: 298, // fr-DJ
0x0ae00066: 299, // fr-DZ
0x0ae00076: 300, // fr-FR
0x0ae00078: 301, // fr-GA
0x0ae0007c: 302, // fr-GF
0x0ae00082: 303, // fr-GN
0x0ae00083: 304, // fr-GP
0x0ae00084: 305, // fr-GQ
0x0ae0008f: 306, // fr-HT
0x0ae000a6: 307, // fr-KM
0x0ae000b5: 308, // fr-LU
0x0ae000b8: 309, // fr-MA
0x0ae000b9: 310, // fr-MC
0x0ae000bc: 311, // fr-MF
0x0ae000bd: 312, // fr-MG
0x0ae000c1: 313, // fr-ML
0x0ae000c6: 314, // fr-MQ
0x0ae000c7: 315, // fr-MR
0x0ae000ca: 316, // fr-MU
0x0ae000d1: 317, // fr-NC
0x0ae000d2: 318, // fr-NE
0x0ae000e3: 319, // fr-PF
0x0ae000e8: 320, // fr-PM
0x0ae00100: 321, // fr-RE
0x0ae00105: 322, // fr-RW
0x0ae00108: 323, // fr-SC
0x0ae00112: 324, // fr-SN
0x0ae0011a: 325, // fr-SY
0x0ae0011e: 326, // fr-TD
0x0ae00120: 327, // fr-TG
0x0ae00126: 328, // fr-TN
0x0ae0013c: 329, // fr-VU
0x0ae0013d: 330, // fr-WF
0x0ae0015c: 331, // fr-YT
0x0b500000: 332, // fur
0x0b50009c: 333, // fur-IT
0x0b800000: 334, // fy
0x0b8000d7: 335, // fy-NL
0x0b900000: 336, // ga
0x0b900094: 337, // ga-IE
0x0c100000: 338, // gd
0x0c100079: 339, // gd-GB
0x0c700000: 340, // gl
0x0c70006d: 341, // gl-ES
0x0d100000: 342, // gsw
0x0d10004d: 343, // gsw-CH
0x0d100076: 344, // gsw-FR
0x0d1000b0: 345, // gsw-LI
0x0d200000: 346, // gu
0x0d200097: 347, // gu-IN
0x0d600000: 348, // guz
0x0d6000a2: 349, // guz-KE
0x0d700000: 350, // gv
0x0d700096: 351, // gv-IM
0x0da00000: 352, // ha
0x0da0007e: 353, // ha-GH
0x0da000d2: 354, // ha-NE
0x0da000d4: 355, // ha-NG
0x0dc00000: 356, // haw
0x0dc00132: 357, // haw-US
0x0de00000: 358, // he
0x0de00095: 359, // he-IL
0x0df00000: 360, // hi
0x0df00097: 361, // hi-IN
0x0ec00000: 362, // hr
0x0ec00032: 363, // hr-BA
0x0ec0008e: 364, // hr-HR
0x0ed00000: 365, // hsb
0x0ed0005f: 366, // hsb-DE
0x0f000000: 367, // hu
0x0f000090: 368, // hu-HU
0x0f100000: 369, // hy
0x0f100027: 370, // hy-AM
0x0f600000: 371, // id
0x0f600093: 372, // id-ID
0x0f800000: 373, // ig
0x0f8000d4: 374, // ig-NG
0x0f900000: 375, // ii
0x0f900052: 376, // ii-CN
0x10000000: 377, // is
0x1000009b: 378, // is-IS
0x10100000: 379, // it
0x1010004d: 380, // it-CH
0x1010009c: 381, // it-IT
0x10100111: 382, // it-SM
0x10500000: 383, // ja
0x105000a0: 384, // ja-JP
0x10700000: 385, // jgo
0x10700051: 386, // jgo-CM
0x10900000: 387, // jmc
0x1090012d: 388, // jmc-TZ
0x10e00000: 389, // ka
0x10e0007b: 390, // ka-GE
0x11000000: 391, // kab
0x11000066: 392, // kab-DZ
0x11300000: 393, // kam
0x113000a2: 394, // kam-KE
0x11800000: 395, // kde
0x1180012d: 396, // kde-TZ
0x11a00000: 397, // kea
0x11a00059: 398, // kea-CV
0x12500000: 399, // khq
0x125000c1: 400, // khq-ML
0x12800000: 401, // ki
0x128000a2: 402, // ki-KE
0x12c00000: 403, // kk
0x12c000ac: 404, // kk-KZ
0x12d00000: 405, // kkj
0x12d00051: 406, // kkj-CM
0x12e00000: 407, // kl
0x12e00080: 408, // kl-GL
0x12f00000: 409, // kln
0x12f000a2: 410, // kln-KE
0x13000000: 411, // km
0x130000a4: 412, // km-KH
0x13200000: 413, // kn
0x13200097: 414, // kn-IN
0x13300000: 415, // ko
0x133000a8: 416, // ko-KP
0x133000a9: 417, // ko-KR
0x13500000: 418, // kok
0x13500097: 419, // kok-IN
0x13e00000: 420, // ks
0x13e00097: 421, // ks-IN
0x13f00000: 422, // ksb
0x13f0012d: 423, // ksb-TZ
0x14000000: 424, // ksf
0x14000051: 425, // ksf-CM
0x14100000: 426, // ksh
0x1410005f: 427, // ksh-DE
0x14700000: 428, // kw
0x14700079: 429, // kw-GB
0x14a00000: 430, // ky
0x14a000a3: 431, // ky-KG
0x14e00000: 432, // lag
0x14e0012d: 433, // lag-TZ
0x15100000: 434, // lb
0x151000b5: 435, // lb-LU
0x15700000: 436, // lg
0x1570012f: 437, // lg-UG
0x15e00000: 438, // lkt
0x15e00132: 439, // lkt-US
0x16100000: 440, // ln
0x16100029: 441, // ln-AO
0x1610004a: 442, // ln-CD
0x1610004b: 443, // ln-CF
0x1610004c: 444, // ln-CG
0x16200000: 445, // lo
0x162000ad: 446, // lo-LA
0x16500000: 447, // lrc
0x16500099: 448, // lrc-IQ
0x1650009a: 449, // lrc-IR
0x16600000: 450, // lt
0x166000b4: 451, // lt-LT
0x16800000: 452, // lu
0x1680004a: 453, // lu-CD
0x16a00000: 454, // luo
0x16a000a2: 455, // luo-KE
0x16b00000: 456, // luy
0x16b000a2: 457, // luy-KE
0x16d00000: 458, // lv
0x16d000b6: 459, // lv-LV
0x17700000: 460, // mas
0x177000a2: 461, // mas-KE
0x1770012d: 462, // mas-TZ
0x17d00000: 463, // mer
0x17d000a2: 464, // mer-KE
0x17f00000: 465, // mfe
0x17f000ca: 466, // mfe-MU
0x18000000: 467, // mg
0x180000bd: 468, // mg-MG
0x18100000: 469, // mgh
0x181000cf: 470, // mgh-MZ
0x18200000: 471, // mgo
0x18200051: 472, // mgo-CM
0x18900000: 473, // mk
0x189000c0: 474, // mk-MK
0x18a00000: 475, // ml
0x18a00097: 476, // ml-IN
0x18c00000: 477, // mn
0x18c000c3: 478, // mn-MN
0x19300000: 479, // mr
0x19300097: 480, // mr-IN
0x19700000: 481, // ms
0x1970003d: 482, // ms-BN
0x197000ce: 483, // ms-MY
0x1970010b: 484, // ms-SG
0x19800000: 485, // mt
0x198000c9: 486, // mt-MT
0x19a00000: 487, // mua
0x19a00051: 488, // mua-CM
0x1a200000: 489, // my
0x1a2000c2: 490, // my-MM
0x1a600000: 491, // mzn
0x1a60009a: 492, // mzn-IR
0x1aa00000: 493, // naq
0x1aa000d0: 494, // naq-NA
0x1ab00000: 495, // nb
0x1ab000d8: 496, // nb-NO
0x1ab0010e: 497, // nb-SJ
0x1ad00000: 498, // nd
0x1ad00161: 499, // nd-ZW
0x1b000000: 500, // ne
0x1b000097: 501, // ne-IN
0x1b0000d9: 502, // ne-NP
0x1b900000: 503, // nl
0x1b90002f: 504, // nl-AW
0x1b900035: 505, // nl-BE
0x1b90003f: 506, // nl-BQ
0x1b90005a: 507, // nl-CW
0x1b9000d7: 508, // nl-NL
0x1b900114: 509, // nl-SR
0x1b900119: 510, // nl-SX
0x1ba00000: 511, // nmg
0x1ba00051: 512, // nmg-CM
0x1bb00000: 513, // nn
0x1bb000d8: 514, // nn-NO
0x1bc00000: 515, // nnh
0x1bc00051: 516, // nnh-CM
0x1c500000: 517, // nus
0x1c500115: 518, // nus-SS
0x1ca00000: 519, // nyn
0x1ca0012f: 520, // nyn-UG
0x1ce00000: 521, // om
0x1ce0006e: 522, // om-ET
0x1ce000a2: 523, // om-KE
0x1cf00000: 524, // or
0x1cf00097: 525, // or-IN
0x1d000000: 526, // os
0x1d00007b: 527, // os-GE
0x1d000104: 528, // os-RU
0x1d200000: 529, // pa
0x1d205000: 530, // pa-Arab
0x1d2050e6: 531, // pa-Arab-PK
0x1d22e000: 532, // pa-Guru
0x1d22e097: 533, // pa-Guru-IN
0x1e200000: 534, // pl
0x1e2000e7: 535, // pl-PL
0x1e800000: 536, // prg
0x1e800001: 537, // prg-001
0x1e900000: 538, // ps
0x1e900023: 539, // ps-AF
0x1ea00000: 540, // pt
0x1ea00029: 541, // pt-AO
0x1ea00040: 542, // pt-BR
0x1ea00059: 543, // pt-CV
0x1ea00089: 544, // pt-GW
0x1ea000c4: 545, // pt-MO
0x1ea000cf: 546, // pt-MZ
0x1ea000ec: 547, // pt-PT
0x1ea00116: 548, // pt-ST
0x1ea00124: 549, // pt-TL
0x1ec00000: 550, // qu
0x1ec0003e: 551, // qu-BO
0x1ec00068: 552, // qu-EC
0x1ec000e2: 553, // qu-PE
0x1f700000: 554, // rm
0x1f70004d: 555, // rm-CH
0x1fc00000: 556, // rn
0x1fc00039: 557, // rn-BI
0x1fe00000: 558, // ro
0x1fe000ba: 559, // ro-MD
0x1fe00102: 560, // ro-RO
0x20000000: 561, // rof
0x2000012d: 562, // rof-TZ
0x20200000: 563, // ru
0x20200046: 564, // ru-BY
0x202000a3: 565, // ru-KG
0x202000ac: 566, // ru-KZ
0x202000ba: 567, // ru-MD
0x20200104: 568, // ru-RU
0x2020012e: 569, // ru-UA
0x20500000: 570, // rw
0x20500105: 571, // rw-RW
0x20600000: 572, // rwk
0x2060012d: 573, // rwk-TZ
0x20a00000: 574, // sah
0x20a00104: 575, // sah-RU
0x20b00000: 576, // saq
0x20b000a2: 577, // saq-KE
0x20f00000: 578, // sbp
0x20f0012d: 579, // sbp-TZ
0x21800000: 580, // se
0x21800070: 581, // se-FI
0x218000d8: 582, // se-NO
0x2180010a: 583, // se-SE
0x21a00000: 584, // seh
0x21a000cf: 585, // seh-MZ
0x21c00000: 586, // ses
0x21c000c1: 587, // ses-ML
0x21d00000: 588, // sg
0x21d0004b: 589, // sg-CF
0x22100000: 590, // shi
0x2214f000: 591, // shi-Latn
0x2214f0b8: 592, // shi-Latn-MA
0x221cc000: 593, // shi-Tfng
0x221cc0b8: 594, // shi-Tfng-MA
0x22300000: 595, // si
0x223000b1: 596, // si-LK
0x22500000: 597, // sk
0x2250010f: 598, // sk-SK
0x22700000: 599, // sl
0x2270010d: 600, // sl-SI
0x22d00000: 601, // smn
0x22d00070: 602, // smn-FI
0x23000000: 603, // sn
0x23000161: 604, // sn-ZW
0x23200000: 605, // so
0x23200061: 606, // so-DJ
0x2320006e: 607, // so-ET
0x232000a2: 608, // so-KE
0x23200113: 609, // so-SO
0x23400000: 610, // sq
0x23400026: 611, // sq-AL
0x234000c0: 612, // sq-MK
0x2340014a: 613, // sq-XK
0x23500000: 614, // sr
0x2351d000: 615, // sr-Cyrl
0x2351d032: 616, // sr-Cyrl-BA
0x2351d0bb: 617, // sr-Cyrl-ME
0x2351d103: 618, // sr-Cyrl-RS
0x2351d14a: 619, // sr-Cyrl-XK
0x2354f000: 620, // sr-Latn
0x2354f032: 621, // sr-Latn-BA
0x2354f0bb: 622, // sr-Latn-ME
0x2354f103: 623, // sr-Latn-RS
0x2354f14a: 624, // sr-Latn-XK
0x24100000: 625, // sv
0x24100030: 626, // sv-AX
0x24100070: 627, // sv-FI
0x2410010a: 628, // sv-SE
0x24200000: 629, // sw
0x2420004a: 630, // sw-CD
0x242000a2: 631, // sw-KE
0x2420012d: 632, // sw-TZ
0x2420012f: 633, // sw-UG
0x24b00000: 634, // ta
0x24b00097: 635, // ta-IN
0x24b000b1: 636, // ta-LK
0x24b000ce: 637, // ta-MY
0x24b0010b: 638, // ta-SG
0x25200000: 639, // te
0x25200097: 640, // te-IN
0x25400000: 641, // teo
0x254000a2: 642, // teo-KE
0x2540012f: 643, // teo-UG
0x25700000: 644, // th
0x25700121: 645, // th-TH
0x25b00000: 646, // ti
0x25b0006c: 647, // ti-ER
0x25b0006e: 648, // ti-ET
0x25e00000: 649, // tk
0x25e00125: 650, // tk-TM
0x26600000: 651, // to
0x26600127: 652, // to-TO
0x26900000: 653, // tr
0x2690005c: 654, // tr-CY
0x26900129: 655, // tr-TR
0x27800000: 656, // twq
0x278000d2: 657, // twq-NE
0x27b00000: 658, // tzm
0x27b000b8: 659, // tzm-MA
0x27d00000: 660, // ug
0x27d00052: 661, // ug-CN
0x27f00000: 662, // uk
0x27f0012e: 663, // uk-UA
0x28500000: 664, // ur
0x28500097: 665, // ur-IN
0x285000e6: 666, // ur-PK
0x28600000: 667, // uz
0x28605000: 668, // uz-Arab
0x28605023: 669, // uz-Arab-AF
0x2861d000: 670, // uz-Cyrl
0x2861d134: 671, // uz-Cyrl-UZ
0x2864f000: 672, // uz-Latn
0x2864f134: 673, // uz-Latn-UZ
0x28700000: 674, // vai
0x2874f000: 675, // vai-Latn
0x2874f0b2: 676, // vai-Latn-LR
0x287d3000: 677, // vai-Vaii
0x287d30b2: 678, // vai-Vaii-LR
0x28b00000: 679, // vi
0x28b0013b: 680, // vi-VN
0x29000000: 681, // vo
0x29000001: 682, // vo-001
0x29300000: 683, // vun
0x2930012d: 684, // vun-TZ
0x29500000: 685, // wae
0x2950004d: 686, // wae-CH
0x2aa00000: 687, // xog
0x2aa0012f: 688, // xog-UG
0x2b000000: 689, // yav
0x2b000051: 690, // yav-CM
0x2b200000: 691, // yi
0x2b200001: 692, // yi-001
0x2b300000: 693, // yo
0x2b30003a: 694, // yo-BJ
0x2b3000d4: 695, // yo-NG
0x2bb00000: 696, // zgh
0x2bb000b8: 697, // zgh-MA
0x2bc00000: 698, // zh
0x2bc32000: 699, // zh-Hans
0x2bc32052: 700, // zh-Hans-CN
0x2bc3208b: 701, // zh-Hans-HK
0x2bc320c4: 702, // zh-Hans-MO
0x2bc3210b: 703, // zh-Hans-SG
0x2bc33000: 704, // zh-Hant
0x2bc3308b: 705, // zh-Hant-HK
0x2bc330c4: 706, // zh-Hant-MO
0x2bc3312c: 707, // zh-Hant-TW
0x2be00000: 708, // zu
0x2be0015e: 709, // zu-ZA
}
// Total table size 4328 bytes (4KiB); checksum: C9659787
|
//! A hash map implementation with consistent ordering.
//!
//! The types in this module are commonly used as the underlying data structure
//! of arbitrary objects found in JSON API data.
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::RangeFull;
use ordermap::{self, OrderMap};
use serde::de::{Deserialize, Deserializer};
use serde::ser::{Serialize, Serializer};
use crate::value::collections::Equivalent;
use crate::value::{Key, Value};
/// A hash map implementation with consistent ordering.
#[derive(Clone, Eq, PartialEq)]
pub struct Map<K = Key, V = Value>
where
K: Eq + Hash,
{
inner: OrderMap<K, V>,
}
impl<K, V> Map<K, V>
where
K: Eq + Hash,
{
/// Creates an empty `Map`.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # fn main() {
/// use json_api::value::{Key, Map, Value};
/// let mut map = Map::<Key, Value>::new();
/// # }
/// ```
pub fn new() -> Self {
Default::default()
}
/// Creates a new empty `Map`, with specified capacity.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::Error;
/// # use json_api::value::Map;
/// #
/// # fn example() -> Result<(), Error> {
/// let mut map = Map::with_capacity(2);
///
/// map.insert("x", 1);
/// map.insert("y", 2);
///
/// // The next insert will likely require reallocation...
/// map.insert("z", 3);
/// #
/// # Ok(())
/// # }
/// #
/// # fn main() {
/// # example().unwrap();
/// # }
/// ```
pub fn with_capacity(capacity: usize) -> Self {
let inner = OrderMap::with_capacity(capacity);
Map { inner }
}
/// Returns the number of key-value pairs the map can hold without
/// reallocating.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::{Key, Map, Value};
/// #
/// # fn main() {
/// let map = Map::<Key, Value>::with_capacity(2);
/// assert!(map.capacity() >= 2);
/// # }
/// ```
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
/// map.clear();
/// assert!(map.is_empty());
/// # }
/// ```
pub fn clear(&mut self) {
self.inner.clear();
}
/// Returns true if the map contains a value for the specified key.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// # }
/// ```
pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash,
{
self.inner.contains_key(key)
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
/// map.insert("y", 2);
///
/// for (key, value) in map.drain(..) {
/// assert!(key == "x" || key == "y");
/// assert!(value == 1 || value == 2);
/// }
///
/// assert!(map.is_empty());
/// # }
/// ```
pub fn drain(&mut self, range: RangeFull) -> Drain<K, V> {
let iter = self.inner.drain(range);
Drain { iter }
}
/// Returns a reference to the value corresponding to the key.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
///
/// assert_eq!(map.get("x"), Some(&1));
/// assert_eq!(map.get("y"), None);
/// # }
/// ```
pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
where
Q: Equivalent<K> + Hash,
{
self.inner.get(key)
}
/// Inserts a key-value pair into the map.
///
/// If a value already existed for key, that old value is returned in
/// `Some`; otherwise, `None` is returned.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// assert_eq!(map.insert("x", 1), None);
/// assert_eq!(map.insert("x", 2), Some(1));
/// # }
/// ```
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
self.inner.insert(key, value)
}
/// Return an iterator visiting all the key-value pairs of the map in the
/// order in which they were inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, value) in map.iter() {
/// println!("key: {} value: {}", key, value);
/// }
/// # }
/// ```
pub fn iter(&self) -> Iter<K, V> {
let iter = self.inner.iter();
Iter { iter }
}
/// Return an iterator visiting all the key-value pairs of the map in the
/// order in which they were inserted, with mutable references to the
/// values.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (_, value) in map.iter_mut() {
/// *value += 1;
/// }
///
/// for (key, value) in &map {
/// println!("key: {} value: {}", key, value);
/// }
/// # }
/// ```
pub fn iter_mut(&mut self) -> IterMut<K, V> {
let iter = self.inner.iter_mut();
IterMut { iter }
}
/// Return an iterator visiting all keys in the order in which they were
/// inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// # }
/// ```
pub fn keys(&self) -> Keys<K, V> {
let iter = self.inner.keys();
Keys { iter }
}
/// Return the number of key-value pairs in the map.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
/// assert_eq!(map.len(), 0);
///
/// map.insert("x", 1);
/// assert_eq!(map.len(), 1);
/// # }
/// ```
pub fn len(&self) -> usize {
self.inner.len()
}
/// Returns true if the map contains no elements.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
/// assert!(map.is_empty());
///
/// map.insert("x", 1);
/// assert!(!map.is_empty());
/// # }
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
///
/// assert_eq!(map.remove("x"), Some(1));
/// assert_eq!(map.remove("x"), None);
/// # }
/// ```
pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
where
Q: Equivalent<K> + Hash,
{
self.inner.remove(key)
}
/// Reserves capacity for at least additional more elements to be inserted
/// in the `Map`. The collection may reserve more space to avoid frequent
/// reallocations.
///
/// # Note
///
/// This method has yet to be fully implemented in the [`ordermap`] crate.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::{Key, Map, Value};
/// #
/// # fn main() {
/// let mut map = Map::<Key, Value>::new();
/// map.reserve(10);
/// # }
/// ```
///
/// [`ordermap`]: https://docs.rs/ordermap
pub fn reserve(&mut self, additional: usize) {
self.inner.reserve(additional);
}
/// Return an iterator visiting all values in the order in which they were
/// inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for value in map.values() {
/// println!("{}", value);
/// }
/// # }
/// ```
pub fn values(&self) -> Values<K, V> {
let iter = self.inner.values();
Values { iter }
}
/// Return an iterator visiting all values mutably in the order in which
/// they were inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for value in map.values_mut() {
/// *value += 1;
/// }
///
/// for value in map.values() {
/// println!("{}", value);
/// }
/// # }
pub fn values_mut(&mut self) -> ValuesMut<K, V> {
let iter = self.inner.values_mut();
ValuesMut { iter }
}
}
impl<K, V> Debug for Map<K, V>
where
K: Debug + Eq + Hash,
V: Debug,
{
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_map().entries(self).finish()
}
}
impl<K, V> Default for Map<K, V>
where
K: Eq + Hash,
{
fn default() -> Self {
let inner = Default::default();
Map { inner }
}
}
impl<K, V> Extend<(K, V)> for Map<K, V>
where
K: Eq + Hash,
{
fn extend<I>(&mut self, iter: I)
where
I: IntoIterator<Item = (K, V)>,
{
self.inner.extend(iter);
}
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: Eq + Hash,
{
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = (K, V)>,
{
let inner = OrderMap::from_iter(iter);
Map { inner }
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: Eq + Hash,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
let iter = self.inner.into_iter();
IntoIter { iter }
}
}
impl<'a, K, V> IntoIterator for &'a Map<K, V>
where
K: Eq + Hash,
{
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, K, V> IntoIterator for &'a mut Map<K, V>
where
K: Eq + Hash,
{
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<'de, K, V> Deserialize<'de> for Map<K, V>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
OrderMap::deserialize(deserializer).map(|inner| Map { inner })
}
}
impl<K, V> Serialize for Map<K, V>
where
K: Eq + Hash + Serialize,
V: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.inner.serialize(serializer)
}
}
/// A draining iterator over the entries of a `Map`.
pub struct Drain<'a, K: 'a, V: 'a> {
iter: ordermap::Drain<'a, K, V>,
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
/// An iterator over the entries of a `Map`.
pub struct Iter<'a, K: 'a, V: 'a> {
iter: ordermap::Iter<'a, K, V>,
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An mutable iterator over the entries of a `Map`.
pub struct IterMut<'a, K: 'a, V: 'a> {
iter: ordermap::IterMut<'a, K, V>,
}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An owning iterator over the entries of a `Map`.
pub struct IntoIter<K, V> {
iter: ordermap::IntoIter<K, V>,
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An iterator over the keys of a `Map`.
pub struct Keys<'a, K: 'a, V: 'a> {
iter: ordermap::Keys<'a, K, V>,
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An iterator over the values of a `Map`.
pub struct Values<'a, K: 'a, V: 'a> {
iter: ordermap::Values<'a, K, V>,
}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// A mutable iterator over the values of a `Map`.
pub struct ValuesMut<'a, K: 'a, V: 'a> {
iter: ordermap::ValuesMut<'a, K, V>,
}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
|
let playedDivs = [...document.getElementsByTagName("div")]
.filter(div => div.hasAttribute("played") && div.title)
.map(div => div.textContent)
// .map(element => console.warn('2', element))
let clipBoardValue = `// ${window.location}\n\n`
clipBoardValue += `let artistsToLoad = ${JSON.stringify(playedDivs)};\n\n`
clipBoardValue += `[...document.getElementsByTagName("div")].forEach(div => {
div.onclick = () => div.hasAttribute('played') ? div.removeAttribute('played') : div.setAttribute('played', true)
artistsToLoad.includes(div.textContent) && div.setAttribute("played", true)
})`
let el = document.createElement('textarea')
el.value = clipBoardValue
el.setAttribute('readonly', '')
el.style = { position: 'absolute', left: '-9999px' }
document.body.appendChild(el)
el.select()
document.execCommand('copy')
document.body.removeChild(el)
|
import { Component,Injectable } from '@angular/core';
import {HttpClient} from '@angular/common/http';
let url = 'ailab-manager/v1';
@Injectable()
export class TagService {
//url = 'ailab-manager/v1/';
//http;
constructor(private http:HttpClient) {
//this.http = HttpClient;
}
getTags(){
return this.http.get(`${url}/front/curriculumPath/count`);
}
}
|
using Deltix.Timebase.Api.Utilities.Binding;
namespace TimebaseSample
{
public class TypeMap
{
public static readonly TypeLoader TYPE_LOADER = new TypeLoader();
static TypeMap()
{
TYPE_LOADER.AddType(typeof(Activity));
}
}
}
|
package com.lernoscio.rover;
import com.lernoscio.rover.Rover;
import com.lernoscio.rover.environment.Coordinates;
import com.lernoscio.rover.environment.Direction;
import com.lernoscio.rover.environment.Room;
import com.lernoscio.rover.environment.Door;
import com.lernoscio.rover.environment.Stair;
import org.junit.Assert;
import org.junit.Test;
public class RoverTest {
@Test
public void canProvideCurrentLocationAsString() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(3,3);
//When
Rover rover = new Rover(room, startingPosition, Direction.N);
//then
Assert.assertEquals("A 3 3 N", rover.currentLocation());
}
@Test
public void canRotateLeft() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.turnLeft();
//then
Assert.assertEquals("A 1 2 W", rover.currentLocation());
}
@Test
public void canRotateRight() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.turnRight();
//then
Assert.assertEquals("A 1 2 E", rover.currentLocation());
}
@Test
public void canTurnAround() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.turnAround();
//then
Assert.assertEquals("A 1 2 S", rover.currentLocation());
}
@Test
public void canMove() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.move();
//then
Assert.assertEquals("A 1 3 N", rover.currentLocation());
}
@Test
public void canRunCommandToRotateRight() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.run("R");
//then
Assert.assertEquals("A 1 2 E", rover.currentLocation());
}
@Test
public void canRunCommandToRotateLeft() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.run("L");
//then
Assert.assertEquals("A 1 2 W", rover.currentLocation());
}
@Test
public void canRunCommandToTurnAround() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.run("T");
//then
Assert.assertEquals("A 1 2 S", rover.currentLocation());
}
@Test
public void canRunCommandToMove() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(1,2);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.run("M");
//then
Assert.assertEquals("A 1 3 N", rover.currentLocation());
}
@Test
public void canRunCommandWithMultipleInstructions() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room, startingPosition, Direction.W);
//When
rover.run("TMMRMMRMRRM");
//then
Assert.assertEquals("A 5 1 E", rover.currentLocation());
}
@Test
public void wontDriveBeyondWall() {
//Given
Room room = new Room("A",5,5);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room, startingPosition, Direction.N);
//When
rover.run("MMMMMMMMMMR");
//then
Assert.assertEquals("A 3 5 E", rover.currentLocation());
}
@Test
public void canRunCommandAndStopInFrontOfDoor() {
//Given
Room room1 = new Room("X",5,5);
Room room2 = new Room("Y",4,4);
Coordinates room1DoorCoordinates = new Coordinates(2,0);
Coordinates room2DoorCoordinates = new Coordinates(2,3);
Door door = new Door("XY",Direction.S,room1,room1DoorCoordinates,room2,room2DoorCoordinates);
room1.addDoor(door);
room2.addDoor(door);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room1, startingPosition, Direction.N);
//When
rover.run("LMLMMM");
//then
Assert.assertEquals("X 2 0 S", rover.currentLocation());
}
@Test
public void canRunCommandAndSwitchRooms() {
//Given
Room room1 = new Room("X",5,5);
Room room2 = new Room("Y",4,4);
Coordinates room1DoorCoordinates = new Coordinates(2,0);
Coordinates room2DoorCoordinates = new Coordinates(2,3);
Door door = new Door("XY",Direction.S,room1,room1DoorCoordinates,room2,room2DoorCoordinates);
room1.addDoor(door);
room2.addDoor(door);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room1, startingPosition, Direction.N);
//When
rover.run("LMLMMMM");
//then
Assert.assertEquals("Y 2 3 S", rover.currentLocation());
}
@Test
public void canRunCommandAndStopOnStairs() {
//Given
Room room1 = new Room("X",5,5);
Room room2 = new Room("Y",4,4);
Coordinates room1StairCoordinates = new Coordinates(2,0);
Coordinates room2StairCoordinates = new Coordinates(2,3);
Stair stair = new Stair("XY",5,Direction.S,room1,room1StairCoordinates,room2,room2StairCoordinates);
room1.addStair(stair);
room2.addStair(stair);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room1, startingPosition, Direction.S);
//When
rover.run("RMLMMMMMMMM");
//then
Assert.assertEquals("XY 0 -4 S", rover.currentLocation());
}
@Test
public void canRunCommandAndSwitchRoomsViaStairs() {
//Given
Room room1 = new Room("X",5,5);
Room room2 = new Room("Y",4,4);
Coordinates room1StairCoordinates = new Coordinates(2,0);
Coordinates room2StairCoordinates = new Coordinates(2,3);
Stair stair = new Stair("XY",5,Direction.S,room1,room1StairCoordinates,room2,room2StairCoordinates);
room1.addStair(stair);
room2.addStair(stair);
Coordinates startingPosition = new Coordinates(3,3);
Rover rover = new Rover(room1, startingPosition, Direction.S);
//When
rover.run("RMLMMMMMMMMM");
//then
Assert.assertEquals("Y 2 3 S", rover.currentLocation());
}
}
|
<?php
use Src\BookCrawlers\TenlongCrawler;
/**
* Class TenlongCrawlerTest
*
* 爬蟲的套件使用方式請參考:@link https://github.com/voku/simple_html_dom
*
* 請完成以下使用案例:
* 1. 使用者搜集的書籍名稱:「領域驅動設計」;系統透過爬蟲向天瓏圖書執行關鍵字搜尋;爬蟲找到書籍並擷取出書籍內容(單本)
* 2. 使用者搜尋「Object-oriented 物件導向」分類的書籍;系統透過爬蟲向天瓏圖書執行分類搜尋;爬蟲找到書籍並擷取出書籍內容(多本)
*
* 請依照使用案例開出來的需求規格(given、when、then)來完成測試案例。
*/
class TenlongCrawlerTest
{
/**
* 使用案例:
*
* 使用者搜集的書籍名稱:「領域驅動設計」;系統透過爬蟲向天瓏圖書執行關鍵字搜尋;爬蟲找到書籍並擷取出書籍內容(單本)
*
* 請依照使用案例開出來的需求規格(given、when、then)來完成測試案例。
*
* Tips1: 請先執行 composer install 安裝爬蟲套件
* Tips2: 請封裝爬蟲套件: @link https://github.com/voku/simple_html_dom#quick-start
* Tips3: 爬蟲的目標 URL: https://www.tenlong.com.tw/search?utf8=✓&keyword=領域驅動設計
* Tips4: 盡可能將複雜的實作邏輯封裝進受測物件內
* Tips5: 盡可能設計出可讀性高的介面
*/
public function testXXX()
{
/** @given 要搜集的書籍名稱:領域驅動設計 */
$bookName = '領域驅動設計';
/** @when 系統透過爬蟲向天瓏圖書執行關鍵字搜尋 */
// 爬蟲的目標 URL: https://www.tenlong.com.tw/search?utf8=✓&keyword=領域驅動設計
$crawler = new TenlongCrawler();
/** @then 爬蟲找到書籍並擷取出書籍內容 */
// 完整書名:領域驅動設計:軟體核心複雜度的解決方法 (Domain-Driven Design: Tackling Complexity in the Heart of Software)
// 書籍語言:繁體中文
// 作者名稱:Eric Evans
// 出版社:博碩文化
// 出版日期:2019-05-13
// 售價:530
// 超連結:https://www.tenlong.com.tw/products/9789864343874?list_name=srh
// 縮圖位置:https://cf-assets2.tenlong.com.tw/products/images/000/130/774/medium/9789864343874_bc.jpg?1554177163
}
/**
* TODO: 完成第二個使用案例
*
* 爬蟲的目標 URL: https://www.tenlong.com.tw/categories/object-oriented
*/
} |
<?
/* this ALL needs to be PHP!! otherwise it won't load in JS properly
and this doesn't do anything at all
*/
?> |
package etcd
import (
"context"
"github.com/rancher/support-bundle-kit/pkg/simulator/certs"
"go.etcd.io/etcd/clientv3"
"io/ioutil"
"os"
"testing"
"time"
)
// TestRunEmbeddedEtcdWithoutCerts will run an embedded ETCD server without TLS and try to create and read a kv pair
func TestRunEmbeddedEtcdWithoutCerts(t *testing.T) {
dir, err := ioutil.TempDir("/tmp", "etcd-")
if err != nil {
t.Fatalf("error creating etcd temp directory %v", err)
}
defer os.RemoveAll(dir)
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
defer cancel()
// run an insecure etcd server
e, err := RunEmbeddedEtcd(ctx, dir, nil)
if err != nil {
t.Fatalf("error running embedded etcd server %v", err)
}
etcdClient, err := clientv3.New(clientv3.Config{
Endpoints: e.Endpoints,
DialTimeout: 5 * time.Second,
})
if err != nil {
t.Fatalf("error creating etcd client %v", err)
}
defer etcdClient.Close()
// put a value
_, err = etcdClient.Put(ctx, "test", "true")
if err != nil {
t.Fatalf("error putting key test into etcd %v", err)
}
// get the value
resp, err := etcdClient.Get(ctx, "test")
if err != nil {
t.Fatalf("error fetching key test from etcd %v", err)
}
if len(resp.Kvs) != 1 {
t.Fatalf("expected to find 1 key-value but got %d", len(resp.Kvs))
}
for _, kv := range resp.Kvs {
if string(kv.Key) != "test" {
t.Fatalf("expected key test got %s", kv.Key)
}
if string(kv.Value) != "true" {
t.Fatalf("expected key test got %s", kv.Key)
}
}
}
// TestRunEmbeddedEtcdWithoutCerts will run an embedded ETCD server with TLS and try to create and read a kv pair
func TestRunEmbeddedEtcdWithCerts(t *testing.T) {
dir, err := ioutil.TempDir("/tmp", "etcd-")
if err != nil {
t.Fatalf("error creating etcd temp directory %v", err)
}
defer os.RemoveAll(dir)
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
defer cancel()
certificates, err := certs.GenerateCerts([]string{"localhost", "127.0.0.1"}, dir)
if err != nil {
t.Fatalf("error generating certs %v", err)
}
// run etcd server
e, err := RunEmbeddedEtcd(ctx, dir, certificates)
if err != nil {
t.Fatalf("error running embedded etcd server %v", err)
}
etcdClient, err := clientv3.New(clientv3.Config{
Endpoints: e.Endpoints,
DialTimeout: 5 * time.Second,
TLS: e.TLS,
})
if err != nil {
t.Fatalf("error creating etcd client %v", err)
}
defer etcdClient.Close()
// put a value
_, err = etcdClient.Put(ctx, "test", "true")
if err != nil {
t.Fatalf("error putting key test into etcd %v", err)
}
// get the value
resp, err := etcdClient.Get(ctx, "test")
if err != nil {
t.Fatalf("error fetching key test from etcd %v", err)
}
if len(resp.Kvs) != 1 {
t.Fatalf("expected to find 1 key-value but got %d", len(resp.Kvs))
}
for _, kv := range resp.Kvs {
if string(kv.Key) != "test" {
t.Fatalf("expected key test got %s", kv.Key)
}
if string(kv.Value) != "true" {
t.Fatalf("expected key test got %s", kv.Key)
}
}
}
|
filterList([], [], _).
filterList([E|T], R, E) :- filterList(T, R, E).
filterList([H|T], [H|R], E) :- not(H = E), filterList(T, R, E).
listMember([H], H).
listMember([H|_], H).
listMember([H|T], M) :- not(H = M), listMember(T, M).
prod([], 1).
prod([_|T], N) :- prod(T, N).
prod([H|T], N) :- not(0 is N mod H), prod(T, N).
prod([H|T], N) :- 0 is N mod H, M is N div H, prod(T, M).
prod([H|T], N) :- 0 is N mod H, M is N div H, prod([H|T], M).
p(L, 0) :- listMember(L, 0).
p(L, N) :- N > 0, filterList(L, P, 1), prod(P, N). |
/*
* Copyright 2020 Google LLC.
*
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
*
* Code distributed by Google as part of this project is also subject to an additional IP rights
* grant found at
* http://polymer.github.io/PATENTS.txt
*/
package arcs.android.storage.service
import arcs.android.storage.decodeStoreOptions
import arcs.core.storage.DirectStoreMuxerImpl
import arcs.core.storage.DriverFactory
import arcs.core.storage.StorageKey
import arcs.core.storage.UntypedDirectStoreMuxer
import arcs.core.storage.WriteBackProvider
import arcs.core.util.statistics.TransactionStatisticsImpl
import arcs.flags.BuildFlagDisabledError
import arcs.flags.BuildFlags
import java.util.concurrent.ConcurrentHashMap
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
/**
* Implementation of the [IMuxedStorageService] AIDL interface. Responsible for forwarding messages
* to [DirectStorageMuxers] and back again.
*/
class MuxedStorageServiceImpl(
private val scope: CoroutineScope,
private val stats: TransactionStatisticsImpl,
private val driverFactory: DriverFactory,
private val writeBackProvider: WriteBackProvider,
private val devToolsProxy: DevToolsProxyImpl?
) : IMuxedStorageService.Stub() {
init {
if (!BuildFlags.ENTITY_HANDLE_API) {
throw BuildFlagDisabledError("ENTITY_HANDLE_API")
}
}
// TODO(b/162747024): Replace this with an LruCache so its size doesn't grow unbounded.
private val directStoreMuxers = ConcurrentHashMap<StorageKey, UntypedDirectStoreMuxer>()
override fun openMuxedStorageChannel(
encodedStoreOptions: ByteArray,
channelCallback: IStorageChannelCallback,
messageCallback: IMessageCallback
) {
val storeOptions = encodedStoreOptions.decodeStoreOptions()
val directStoreMuxer = directStoreMuxers.computeIfAbsent(storeOptions.storageKey) {
DirectStoreMuxerImpl(
storageKey = storeOptions.storageKey,
backingType = storeOptions.type,
scope = scope,
driverFactory = driverFactory,
writeBackProvider = writeBackProvider,
devTools = devToolsProxy
)
}
scope.launch {
channelCallback.onCreate(
MuxedStorageChannelImpl.create(directStoreMuxer, scope, stats, messageCallback)
)
}
}
}
|
echo "MySQL Database Connection Configuration"
read -p "DB_HOST: " db_host
read -p "DB_NAME: " db_name
read -p "DB_USERNAME: " db_username
read -p "DB_PASSWORD: " db_password
echo "Creating database based on variables"
mysql << EOF
CREATE DATABASE IF NOT EXISTS $db_name;
EOF
echo "Database name created"
mysql << EOF
CREATE USER "$db_username"@"$db_host" IDENTIFIED WITH mysql_native_password BY "$db_password";
EOF
echo "Database user created"
mysql << EOF
GRANT ALL PRIVILEGES ON * . * TO "$db_username"@"$db_host";
EOF
echo "Database permission created"
mysql << EOF
FLUSH PRIVILEGES;
EOF
echo "Database flush privileges successful"
echo
|
export default {
noWobble: { stiffness: 170, damping: 26 },
gentle: { stiffness: 120, damping: 14 },
wobbly: { stiffness: 180, damping: 12 },
stiff: { stiffness: 210, damping: 20 }
};
|
use seq_io::prelude::*; // needed to import necessary traits
use seq_io::fastq::{Reader};
use std::io;
use std::collections::HashMap;
use fasthash::RandomState;
use std::hash::{Hash, Hasher};
use fasthash::{sea, SeaHasher};
fn main() {
let mut reader = Reader::new(io::stdin());
let mut stdout = io::stdout();
let s = RandomState::<sea::Hash64>::new();
let mut counts = HashMap::with_hasher(s);
while let Some(result) = reader.next() {
let rec = result.expect("reading error");
let id = rec.id().unwrap();
let tid = id.split(";").next().unwrap().split("/").nth(1).unwrap().to_string();
*counts.entry(tid).or_insert(0) += 1;
}
println!("tname\ttrue_count");
// Iterate over everything.
for (tn, c) in &counts{
println!("{}\t{}", tn, c);
}
}
|
# SBX JSON-RPC library
A sub package of [sbx](http://www.github.com/techworker/sbx)
This is the JSON-RPC communication library of for the SBX family that provides
access to remote endpoints related to the [PascalCoin](https://www.pascalcoin.org)
BlockChain project.
## Installation
`npm install @pascalcoin-sbx/json-rpc`
## Usage
This library implements all JSON-RPC functionalities of PascalCoin. It provides
full featured objects you can use to better work with the responses.
See [the RPC docs](docs/rpc.md) for more info.
### Creating a Client instance
To start requesting a node you need to create a new client instance.
```js
const sbxRpc = require('@pascalcoin-sbx/json-rpc');
// create an rpc client for a local wallet
const rpcClient = sbxRpc.Client.factory('http://127.0.0.1:4003');
```
The resulting `rpcClient` variable can now be used to execute requests against
the node.
## Actions
Each rpc method in the client object returns a specific action type that
itself has functionality to finetune the call before it gets send to the node
and returns data.
The execution will return a promise, the successful callback will have a
single parameter which is an array.
The array consists of 2 values:
- Index 0: The raw returned result.
- Index 1: A method to transform the raw result in a rich object. It is not
not always necessary to work with rich objects as transformation takes some time,
so you can choose when to do the transformation.
There are 4 different action types returned by the client calls.
### BaseAction
A simple action type that has no finetuning, since you already provided all
tunable parameters in the call to the method itself.
For example the `getAccount` method returns a `BaseAction`. `getAccount`
expects a single parameter (the account number), nothing else. So there is
nothing else to finetune.
A `BaseAction` only has one method to execute a call: `execute`. This method
will return a promise.
**Example:**
```js
const sbxRpc = require('@pascalcoin-sbx/json-rpc');
const rpcClient = sbxRpc.Client.factory('http://127.0.0.1:4003');
rpcClient.getAccount({account: 123, depth: 1000000})
.execute()
.then(([accountObj, transform]) => {
console.log(accountObj); // raw
console.log(transform(accountObj)); // rich object
});
```
### PagedAction
A `PagedAction` is returned whenever there is a possibility to page results.
It has the following options to finetune a call:
- `action.start` - sets the start offset
- `action.max` - sets the maximum number of results that are returned.
A `PagedAction` has 3 methods to execute the call to the API:
#### execute
This method will execute the action based on the given parameters.
**Example**
```js
// ...
let action = rpcClient.getAccountOperations({account: 123, depth: 1000000});
action.start = 50; // offset
action.max = 10; // limit
action.execute()
.then(([operations, transform]) => {
console.log(operations); // raw
console.log(operations.map(op => transform(op)));
});
```
#### executeAll
This method will try to fetch all data and dynamically increments the
`action.start` value. This method can be useful, but with a lot of data
it will take some time and eat a lot of resources. The result is an
array with all found data.
**Example**
```js
// ...
let action = rpcClient.getAccountOperations({account: 123, depth: 1000000});
// fine tune paging and startpoint (optional)
action.start = 50; // offset
action.max = 10; // limit
action.executeAll()
.then(([operations, transform]) => {
// ALL operations for account
console.log(operations); // raw
console.log(operations.map(op => transform(op)));
});
```
#### executeAllReport
This method will try to fetch all data and dynamically increments the
`action.start` value. Executing this method lets you be a bit more reactive,
as you can set a callback function that gets called, whenever a chunk of data
was loaded in a single call.
**Example**
```js
// ...
let action = rpcClient.getAccountOperations({account: 123, depth: 1000000});
// fine tune paging and startpoint (optional)
action.start = 50; // offset
action.max = 10; // limit
let countAll = [];
action.executeAllReport(([chunkedOperations, transform]) => {
// raw array
console.log(chunkedOperations);
// array of rich objects
console.log(chunkedOperations.map(op => transform(op)));
// example, increment a counter to report progress for example
countAll += chunkedOperations.length;
}).then(() => {
console.log(`${countAll} operations found and reported, finished`);
});
```
Executing `PagedAction`s that will try to return all data can burn down
the node when there are too many requests executed without letting the
node rest a bit.
There are several parameters to the `executeAll` and `executeAllReport`
methods to control this behaviour:
1. `restEach` If this parameter is given, it will rest after it executed
a remote call equaling the value.
2. `restSeconds` In combination with `restEach`, this value determines the
number of seconds to rest.
2. `restCallback` In combination with `restEach`, this callback is called
whenever the action rests.
```js
// ...
let action = rpcClient.getAccountOperations({account: 123, depth: 1000000});
// fine tune paging and startpoint (optional)
action.start = 50; // offset
action.max = 10; // limit
let countAll = [];
// start execution and let it rest for 30 seconds after number of
// calls % 10 === 0. Report to console.
action.executeAllReport(([chunkedOperations, transform]) => {
// raw array
console.log(chunkedOperations);
// array of rich objects
console.log(chunkedOperations.map(op => transform(op)));
// example, increment a counter to report progress for example
countAll += chunkedOperations.length;
}, 10, 30, () => console.log('resting..')).then(() => {
console.log(`${countAll} operations found and reported, finished`);
});
```
It is also possible to implement your own method to fence the concurrency,
e.g. with PQueue. Have a look at the factory method `sbxRpc.Client.factory`
and factory your own functionality.
### OperationAction
This action will give you some finetuning about common tasks when remotely
executing operations.
An `OperationAction` only has one method to execute a call: `execute`. This method
will return a promise.
The action has the following options to finetune a call:
`action.withPayload()`
Sets the payload, the payload method and either the password to encrypt
the payload or the public key. This call is optional, the default payload
will be empty.
`action.withFee()`
Sets the fee used for the operation. Default is no fee. This call is
optional, the default fee is 0.
`action.withMinFee`
Sets the fee to the available minimum fee. This call is optional, the
default fee is 0.
```js
// ...
let action = rpcClient.sendTo({sender: 123, target: 456, amount: 0.00015});
action.withFee(0.0001); // 1 molina fee
// or..
action.withMinFee(); // 1 molina fee
// no encryption
action.withPayload('techworker');
// payload with password encryption
action.withPayload('techworker', 'pwd', 'test123');
// payload with public key encryption (@pascalcoin-sbx/common.Types.Keys.PublicKey)
action.withPayload('techworker', 'pubkey', null, publicKey);
// execute
action.execute(([operation, transform]) => {
// validate if operation is good
let richOperation = transform(operation);
if(richOperation.valid) {
console.log('All fine..');
} else {
console.log('Something went wrong.');
}
});
```
### SignOperationAction
This action will be returned whenever the call is a signing operation.
An `SignOperationAction` only has one method to execute a call: `execute`. This
method will return a promise.
The action has the following options to finetune a call:
`action.withLastNOperation()`
This call is mandatory. It sets the last NOperation of the account.
`action.withRawOperations()`
This call is optional. When signing multiple operations, you can set
the previous sign result and the new one will be appended.
**Example**
```js
// ...
let action = rpcClient.signSendTo({sender: 123, target: 456, senderPubkey: X, targetPubkey: y, amount: 0.0001});
action.withFee(0.0001); // 1 molina fee
// or..
action.withMinFee(); // 1 molina fee
// no encryption
action.withPayload('techworker');
// payload with password encryption
action.withPayload('techworker', 'pwd', 'test123');
// payload with public key encryption (@pascalcoin-sbx/common.Types.Keys.PublicKey)
action.withPayload('techworker', 'pubkey', null, publicKey);
// set last n_operation
action.withLastNOperation(1);
// set raw ops from previous call results
action.withRawOperations('');
// execute
action.execute(([operation, transform]) => {
// validate if operation is good
let richOperation = transform(operation);
if(richOperation.valid) {
console.log('All fine..');
} else {
console.log('Something went wrong.');
}
});
```
## CookBook
You can use `Promise.all` to wait for multiple calls:
```js
// ...
let actions = [];
for(let account = 1; account < 5; account++) {
let action = rpcClient.getAccount({account});
actions.push(action.execute());
}
Promise.all(actions).then(([...result]) => {
result.forEach(([accountObj, transform]) => {
console.log(accountObj);
console.log(transform(accountObj));
});
});
```
|
<?php
namespace App\Http\Controllers;
use App\Http\Controllers\Controller;
use App\Models\User;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Auth;
use Illuminate\Support\Facades\Hash;
use Illuminate\Support\Facades\Validator;
class ProfileController extends Controller
{
public function getProfileDetail()
{
$class = User::where("id", Auth::user()->id)
->first();
if (!$class) {
return $this->responseError("Data Tidak Ditemukan");
}
return $this->responseOK(User::mapData($class));
}
public function editProfile(Request $request)
{
$id = Auth::user()->id;
$validator = Validator::make($request->all(), [
'email' => "email|unique:tbl_users,email,$id",
'username' => "unique:tbl_users,username,$id",
'whatsapp' => "unique:tbl_users,whatsapp,$id",
]);
if ($validator->fails()) {
return $this->responseInvalidInput($validator->errors());
}
$data = User::find(Auth::user()->id);
if (!$data) {
return $this->responseError("User Tidak Ditemukan");
}
if ($request->name) {
$data->name = $request->name;
}
if ($request->email) {
$data->email = $request->email;
}
if ($request->username) {
$data->username = $request->username;
}
if ($request->birth_date) {
$data->birth_date = $request->birth_date;
}
if ($request->province_id) {
$data->province_id = $request->province_id;
}
if ($request->nip) {
$data->nip = $request->nip;
}
if ($request->province_name) {
$data->province_name = $request->province_name;
}
if ($request->city_id) {
$data->city_id = $request->city_id;
}
if ($request->city_name) {
$data->city_name = $request->city_name;
}
if ($request->school) {
$data->school = $request->school;
}
if ($request->grade_level) {
$data->grade_level = $request->grade_level;
}
if ($request->whatsapp) {
$data->whatsapp = $request->whatsapp;
}
if ($request->whatsapp) {
$data->whatsapp = $request->whatsapp;
}
if ($request->parent_phone_number) {
$data->parent_phone_number = $request->parent_phone_number;
}
if($request->profile_image) {
$data->profile_image = $this->uploadImage($request->profile_image);
}
if ($request->old_password) {
if ($request->password) {
if (Hash::check($request->old_password, $data->password)) {
if ($request->password) {
if ($request->re_password) {
if ($request->password == $request->re_password) {
$hashed = Hash::make($request->password);
$data->password = $hashed;
} else {
return $this->responseError("Password baru anda tidak sama !");
}
} else if (!$request->re_password) {
return $this->responseError("Masukan ulang password baru anda !");
}
} else if (!$request->password) {
return $this->responseError("Masukan password baru anda !");
}
} else if (!$request->password) {
return $this->responseError("Password anda tidak cocok !");
}
} else {
return $this->responseError("Masukan Password Baru Anda !");
}
}
$data->save();
return $this->responseOK(User::mapData($data));
}
}
|
<?php
/**
* Request.
*
* Handler to output ListTemplate.
*
* PHP Version 7
*
* @category Response
* @package Alexa
* @subpackage Response\Display
* @author Peter McDonald <[email protected]>
* @license https://opensource.org/licenses/MIT MIT
* @link https://alexabible.com
* @since 1.0.0
*/
declare(strict_types=1);
namespace Alexa_Bible\Response\Display;
/**
* Handler for creating a ListTemplate. Should not be called directly.
*
* Class BodyTemplate7.
*
* @category Response
* @package Alexa
* @subpackage Response\Display
* @author Peter McDonald <[email protected]>
* @license https://opensource.org/licenses/MIT MIT
* @link https://alexabible.com
*/
class ListTemplate extends Display
{
protected $listItems = [];
/**
* ListTemplate constructor.
*
* @param string $type Type of template to use.
* @param string $title Title to be displayed for
* the template.
* @param bool $backButtonVisible Show or hide the back button.
* @param string|null $token Token used for sessions.
*/
protected function __construct(
string $type,
string $title,
bool $backButtonVisible = true,
string $token = null
) {
parent::__construct($type, $title, $backButtonVisible, $token);
}
/**
* Setter for adding ListItems
*
* @param ListItem ...$listItem Array of ListItem's to add.
*
* @return null
*/
public function addListItem(ListItem ...$listItem)
{
$this->listItems = array_merge($this->listItems, $listItem);
return null;
}
/**
* Compiles ListTemplate object ready for output.
*
* Function compile.
*
* @return \stdClass
*/
public function compile():\stdClass
{
$result = new \stdClass();
//TODO complete implementation
$directives = new \stdClass();
if ($this->hint !== null) {
$hint = new \stdClass();
$hint->type = 'Hint';
$hint->hint = new \stdClass();
$hint->hint->type = 'PlainText';
$hint->hint->text = $this->hint;
$directives = [];
$directives[] = $hint;
}
return $result;
}
} |
/*
* @Author: 时光弧线
* @Date: 2017-12-30 11:55:19
* @Last Modified by: 时光弧线
* @Last Modified time: 2017-12-30 13:13:07
*/
package tcplibrary
import (
"golang.org/x/net/websocket"
)
/* websocket的连接处理,涉及到包内容解析,所以单独新建文件 */
// 用于websocket的连接处理函数
func (ws *WebSocketServer) handleConn(conn *Conn) {
defer func() {
if r := recover(); r != nil {
globalLogger.Errorf("%T", r)
}
}()
// 收到消息的管道
messageChannel := make(chan interface{}, DefaultMessageChannelSize)
go ws.handleMessage(conn, messageChannel)
// 循环读取 websocket
for {
select {
case <-ws.ctx.Done():
globalLogger.Infof("ws handleConn收到ctx.Done()")
return
default:
// 解析websocket传输的包
defaultPacket := new(DefaultPacket)
err := ws.packet.GetWebsocketCodec().Receive(conn.Conn.(*websocket.Conn), defaultPacket)
if err != nil {
globalLogger.Errorf(err.Error())
// 关闭连接,并通知错误
ws.closeConn(conn, err)
return
}
// 向管道写入数据
messageChannel <- defaultPacket
}
}
}
|
import { FolderListModule } from './folder-list.module';
describe('FolderListModule', () => {
let folderListModule: FolderListModule;
beforeEach(() => {
folderListModule = new FolderListModule();
});
it('should create an instance', () => {
expect(folderListModule).toBeTruthy();
});
});
|
object fmSizesPosition: TfmSizesPosition
Left = 258
Top = 262
BorderStyle = bsToolWindow
Caption = '{Sizes and position}'
ClientHeight = 184
ClientWidth = 370
Color = clBtnFace
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Name = 'Segoe UI'
Font.Size = 9
Font.Quality = fqClearTypeNatural
Font.Style = []
OldCreateOrder = False
PixelsPerInch = 96
TextHeight = 13
object GroupBox1: TGroupBox
Left = 8
Top = 8
Width = 161
Height = 137
Caption = '{Anchors}'
TabOrder = 0
object c_left: TCheckBox
Left = 16
Top = 24
Width = 129
Height = 17
Caption = '{to Left side}'
TabOrder = 0
end
object c_top: TCheckBox
Left = 16
Top = 48
Width = 129
Height = 17
Caption = '{to Top}'
TabOrder = 1
end
object c_right: TCheckBox
Left = 16
Top = 80
Width = 129
Height = 17
Caption = '{to Right side}'
TabOrder = 2
end
object c_bottom: TCheckBox
Left = 16
Top = 104
Width = 129
Height = 17
Caption = '{to Bottom}'
TabOrder = 3
end
end
object GroupBox2: TGroupBox
Left = 176
Top = 8
Width = 185
Height = 137
Caption = '{Sizes && Position}'
TabOrder = 1
object Label1: TLabel
Left = 16
Top = 24
Width = 10
Height = 13
Caption = 'X:'
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Name = 'Segoe UI'
Font.Size = 9
Font.Style = [fsBold]
ParentFont = False
end
object Label2: TLabel
Left = 16
Top = 48
Width = 10
Height = 13
Caption = 'Y:'
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Name = 'Segoe UI'
Font.Size = 9
Font.Style = [fsBold]
ParentFont = False
end
object Label3: TLabel
Left = 16
Top = 80
Width = 14
Height = 13
Caption = 'W:'
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Name = 'Segoe UI'
Font.Size = 9
Font.Style = [fsBold]
ParentFont = False
end
object Label4: TLabel
Left = 16
Top = 104
Width = 11
Height = 13
Caption = 'H:'
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Name = 'Segoe UI'
Font.Size = 9
Font.Style = [fsBold]
ParentFont = False
end
object e_x: TEdit
Left = 32
Top = 24
Width = 137
Height = 21
TabOrder = 0
Text = '0'
end
object e_y: TEdit
Left = 32
Top = 48
Width = 137
Height = 21
TabOrder = 1
Text = '0'
end
object e_w: TEdit
Left = 32
Top = 80
Width = 137
Height = 21
TabOrder = 2
Text = '0'
end
object e_h: TEdit
Left = 32
Top = 104
Width = 137
Height = 21
TabOrder = 3
Text = '0'
end
end
object BitBtn1: TBitBtn
Left = 264
Top = 152
Width = 99
Height = 25
Caption = '{ok}'
ModalResult = 1
TabOrder = 2
end
object BitBtn2: TBitBtn
Left = 160
Top = 151
Width = 99
Height = 25
Caption = '{cancel}'
ModalResult = 2
TabOrder = 3
end
end
|
namespace Platform.VirtualFileSystem
{
public interface ITempIdentityFileService
: IService
{
IFile GetTempFile();
IFile GetOriginalFile();
}
}
|
import { INotificationsState } from './notifications.state';
import { notificationsAdapter } from './notifications.adapter';
export const NotificationsDefaultState: INotificationsState = notificationsAdapter.getInitialState({
ids: [],
entities: {}
});
|
package com.soyle.stories.domain.scene.events
import com.soyle.stories.domain.scene.Scene
abstract class SceneEvent {
abstract val sceneId: Scene.Id
} |
import {React, useState } from "react";
import { useNavigate } from "react-router";
const Class = () => {
const [item, setItem] = useState({
class_id: 1,
class_name: "Morning Zen",
class_duration: "1 hour",
max_class_size: 15,
class_date: "2021-11-17T05:00:00.000Z",
start_time: "08:00:00",
class_location: "Central Park",
instructor: "tom",
intensity_level: "beginner",
type_description: "yoga",
number_registered: 2
})
return (
<div >
<div className="box">
<div key={item.class_id} >
<h4>Class name: {item.class_name}</h4>
<p>Instructor name: {item.instructor}</p>
<p>Class date: {item.class_date}</p>
<p>Class time: {item.start_time}</p>
<p>Class Duration: {item.class_duration}</p>
<p>Class Location: {item.class_location}</p>
<p>Class Intensity: {item.intensity_level}</p>
<p>Currently enrolled in class: {item.number_registered}</p>
<p>Max class Participants: {item.max_class_size}</p>
</div>
</div>
)
</div>
)
}
export default Class; |
import * as React from 'react';
import { connect } from 'react-redux';
import { Redirect } from 'react-router-dom';
import { createSelector } from 'reselect';
import { redirected } from '../store/actions/redirect';
import { getSwipe } from '../store/selectors/swipe';
import { getRedirect } from '../store/selectors/user';
export interface RedirectorStateProps {
to?: string;
swipe?: string;
}
export interface RedirectorDispatchProps {
redirected: typeof redirected;
}
export type RedirectorProps = RedirectorStateProps & RedirectorDispatchProps;
export interface RedirectorLocalState {
timeout: NodeJS.Timeout | null;
redirect?: string;
}
export class UnconnectedRedirector extends React.Component<
RedirectorProps,
RedirectorLocalState
> {
constructor(props: RedirectorProps) {
super(props);
this.state = {
timeout: null
};
}
public componentDidMount() {
const { to, redirected: handleRedirected, swipe } = this.props;
const { timeout, redirect } = this.state;
if (to && !timeout && !redirect) {
this.setState({
timeout: setTimeout(
() => {
const $body = document.getElementsByTagName('body');
try {
$body[0].setAttribute('style', '');
} catch (error) {
// do nothing
}
handleRedirected(undefined);
this.setState({ timeout: null, redirect: to });
scrollTo(0, 100);
},
swipe === 'left' ? 500 : 6000
)
});
}
}
public componentDidUpdate() {
this.componentDidMount();
const { redirect } = this.state;
if (redirect) {
this.setState({
redirect: undefined
});
}
}
public render() {
const { redirect } = this.state;
if (redirect) {
return <Redirect to={redirect} push />;
}
return <React.Fragment />;
}
}
export const mapStateToProps = createSelector(
[getRedirect, getSwipe],
(to, swipe) => ({
swipe,
to
})
);
export const actionCreators = {
redirected
};
export const Redirector = connect(
mapStateToProps,
actionCreators
)(UnconnectedRedirector);
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var InstructionType;
(function (InstructionType) {
InstructionType[InstructionType["set"] = 0] = "set";
InstructionType[InstructionType["add"] = 1] = "add";
InstructionType[InstructionType["remove"] = 2] = "remove";
})(InstructionType = exports.InstructionType || (exports.InstructionType = {}));
//# sourceMappingURL=InstructionType.js.map |
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module containing build tools installation and cleanup functions."""
import logging
from absl import flags
from perfkitbenchmarker import errors
from perfkitbenchmarker import os_types
FLAGS = flags.FLAGS
flags.DEFINE_string('gcc_version', None, 'Version of gcc to use. Benchmarks '
'that utilize gcc compilation should ensure reinstallation '
'of GCC. Default is set by the OS package manager.')
def YumInstall(vm):
"""Installs build tools on the VM."""
vm.InstallPackageGroup('Development Tools')
def AptInstall(vm):
"""Installs build tools on the VM."""
vm.InstallPackages('build-essential git libtool autoconf automake')
if FLAGS.gcc_version:
Reinstall(vm, version=FLAGS.gcc_version)
def GetVersion(vm, pkg):
"""Get version of package using -dumpversion."""
out, _ = vm.RemoteCommand(
'{pkg} -dumpversion'.format(pkg=pkg), ignore_failure=True)
return out.rstrip()
def GetVersionInfo(vm, pkg):
"""Get compiler version info for package using --version."""
out, _ = vm.RemoteCommand(
'{pkg} --version'.format(pkg=pkg), ignore_failure=True)
# return first line of pkg --version
return out.splitlines()[0] if out else None
def Reinstall(vm, version='4.7'):
"""Install specific version of gcc.
Args:
vm: VirtualMachine object.
version: string. GCC version.
Raises:
Error: If this is ran on a non debian based system.
"""
# TODO(user): Make this work on yum based systems.
if vm.BASE_OS_TYPE != os_types.DEBIAN:
raise errors.Error('Updating GCC only works on Debian based systems.')
vm.Install('ubuntu_toolchain')
for pkg in ('gcc', 'gfortran', 'g++'):
version_string = GetVersion(vm, pkg)
if version in version_string:
logging.info('Have expected version of %s: %s', pkg, version_string)
continue
else:
new_pkg = pkg + '-' + version
vm.InstallPackages(new_pkg)
vm.RemoteCommand('sudo rm -f /usr/bin/{pkg}'.format(pkg=pkg))
vm.RemoteCommand('sudo ln -s /usr/bin/{new_pkg} /usr/bin/{pkg}'.format(
new_pkg=new_pkg, pkg=pkg))
logging.info('Updated version of %s: Old: %s New: %s', pkg,
version_string, GetVersion(vm, pkg))
|
package main
import (
"bufio"
"fmt"
"regexp"
"os"
"strconv"
)
func main() {
if len(os.Args) < 2 {
return
}
infile, err := os.Open(os.Args[1])
if err != nil { return }
defer infile.Close()
scanner := bufio.NewScanner(infile)
reOuter, _ := regexp.Compile(`^(\w+) \((\d+)\)(?: -> )?(.*)`)
reInner, _ := regexp.Compile(`\w+`)
allPrograms := make(map[string]int)
topPrograms := make(map[string][]string)
notBottomPrograms := make(map[string]struct{})
for scanner.Scan() {
line := scanner.Text()
rOuter := reOuter.FindAllStringSubmatch(line, -1)
weight, _ := strconv.Atoi(rOuter[0][2])
a := rOuter[0][1]
allPrograms[a] = weight
if len(rOuter[0][3]) > 0 {
rInner := reInner.FindAllString(rOuter[0][3], -1)
for _, b := range rInner {
topPrograms[a] = append(topPrograms[a], b)
notBottomPrograms[b] = struct{}{}
}
}
}
sumWeights := make(map[string]int)
var computeWeightSum func(s string) int
computeWeightSum = func(s string) int {
v, ok := sumWeights[s]
if !ok {
v = allPrograms[s]
for _, onTop := range topPrograms[s] {
v += computeWeightSum(onTop)
}
sumWeights[s] = v
}
return v
}
root := "x"
for p := range allPrograms {
_, ok := notBottomPrograms[p]
if !ok {
root = p
break
}
}
computeWeightSum(root)
wrongNode := ""
for {
//fmt.Println(root)
// Build histogram of next-level weights
topWeights := make(map[int]int)
for _, onTop := range topPrograms[root] {
topWeights[sumWeights[onTop]]++
}
// Find out-of-balance child
next := ""
for k, v := range topWeights {
if v == 1 {
for _, onTop := range topPrograms[root] {
if k == sumWeights[onTop] {
next = onTop
break
}
}
break
}
}
// If no child is out of balance, we are at the goal...
if next == "" {
wrongNode = root
break
} else {
// ...else we recurse
root = next
}
}
for p := range allPrograms {
for _, c := range topPrograms[p] {
if c == wrongNode {
topWeights := make(map[int]int)
for _, onTop := range topPrograms[p] {
topWeights[sumWeights[onTop]]++
}
for k, v := range topWeights {
if v != 1 {
targetWeight := k
for _, onTop := range topPrograms[wrongNode] {
targetWeight -= sumWeights[onTop]
}
fmt.Println(targetWeight)
return
}
}
}
}
}
fmt.Println()
}
|
package datasource
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNullDataSource(t *testing.T) {
d := NewNullDataSource()
assert.True(t, d.IsInitialized())
ch := make(chan struct{})
d.Start(ch)
_, ok := <-ch
assert.False(t, ok)
assert.Nil(t, d.Close())
}
|
;;;; Spy
;;;;
;;;; Peek into the internal state of the compositor.
(defpackage :mezzano.gui.spy
(:use :cl)
(:export #:spy #:spawn)
(:local-nicknames (:sync :mezzano.sync)
(:gui :mezzano.gui)
(:comp :mezzano.gui.compositor)
(:font :mezzano.gui.font)
(:widgets :mezzano.gui.widgets)
(:sup :mezzano.supervisor)))
(in-package :mezzano.gui.spy)
(defclass spy-window ()
((%window :initarg :window :reader window)
(%redraw :initarg :redraw :accessor redraw)
(%frame :initarg :frame :reader frame)
(%text-pane :initarg :text-pane :reader text-pane))
(:default-initargs :redraw t))
(defgeneric dispatch-event (app event)
(:method (app event)))
(defmethod dispatch-event (app (event comp:window-activation-event))
(let ((frame (frame app)))
(setf (widgets:activep frame) (comp:state event))
(widgets:draw-frame frame)))
(defmethod dispatch-event (app (event comp:mouse-event))
(handler-case
(widgets:frame-mouse-event (frame app) event)
(widgets:close-button-clicked ()
(throw 'quit nil))))
(defmethod dispatch-event (app (event comp:resize-request-event))
(let* ((win (window app))
(old-width (comp:width win))
(old-height (comp:height win))
(new-width (max 100 (comp:width event)))
(new-height (max 100 (comp:height event))))
(when (or (not (eql old-width new-width))
(not (eql old-height new-height)))
(let ((new-framebuffer (mezzano.gui:make-surface
new-width new-height)))
(widgets:resize-frame (frame app) new-framebuffer)
(comp:resize-window
win new-framebuffer
:origin (comp:resize-origin event))))))
(defmethod dispatch-event (app (event comp:resize-event))
(let* ((fb (comp:window-buffer (window app)))
(new-width (mezzano.gui:surface-width fb))
(new-height (mezzano.gui:surface-height fb)))
(multiple-value-bind (left right top bottom)
(widgets:frame-size (frame app))
(widgets:resize-widget
(text-pane app)
fb
left top
(- new-width left right)
(- new-height top bottom))))
(setf (redraw app) t))
(defmethod dispatch-event (app (event comp:window-close-event))
(throw 'quit nil))
(defmethod dispatch-event (app (event comp:quit-event))
(throw 'quit nil))
(defparameter *spy-refresh-interval* 1/5)
(defparameter *spy-immediate-mode* nil
"Enable instant updates whenever the compositor itself receives an event.")
(defparameter *spy-report-mouse-window* t
"If true, describe the mouse window, not the active window.")
(defun update-spy (spy)
(declare (ignore spy))
(let ((*print-pretty* t))
(format t "Mouse: ~Dx~D ~6,'0B ptr: ~S~%"
comp::*mouse-x* comp::*mouse-y* comp::*mouse-buttons*
comp::*mouse-pointer*)
(format t "Mouse win: ~S~%" comp::(window-at-point *mouse-x* *mouse-y*))
(format t "Prev mouse win: ~S~%" comp::*previous-mouse-window*)
(format t "Drag ~S~% ~Dx~D origin: ~Dx~D passive: ~S resize: ~S ~Dx~D ~Dx~D~%"
comp::*drag-window* comp::*drag-x* comp::*drag-y*
comp::*drag-x-origin* comp::*drag-y-origin*
comp::*passive-drag* comp::*resize-origin*
comp::*prev-resize-rect-x* comp::*prev-resize-rect-y*
comp::*prev-resize-rect-w* comp::*prev-resize-rect-h*)
(format t "Keymap: ~S~%" comp::*current-keymap*)
(format t "Key mods: ~:S~%" comp::*keyboard-modifier-state*)
(format t "Windows: ~:S~%" comp::*window-list*)
(format t "M-Tab ~S ~:S~%" comp::*m-tab-active* comp::*m-tab-list*)
(format t "Postprocess: ~S~%" comp::*postprocess-matrix*)
(format t "Active window: ~S~%" comp::*active-window*)
(describe (if *spy-report-mouse-window*
comp::(window-at-point *mouse-x* *mouse-y*)
comp::*active-window*))))
(defun spy ()
(with-simple-restart (abort "Close spy")
(catch 'quit
(let ((font (font:open-font
font:*default-monospace-font*
font:*default-monospace-font-size*))
(mbox (sync:make-mailbox :capacity 50)))
(comp:with-window (window mbox 640 700)
(let* ((framebuffer (comp:window-buffer window))
(frame (make-instance 'widgets:frame
:framebuffer framebuffer
:title "Spy"
:close-button-p t
:resizablep t
:damage-function (widgets:default-damage-function window)
:set-cursor-function (widgets:default-cursor-function window)))
(spy (make-instance 'spy-window
:window window
:frame frame))
(text-pane (make-instance 'widgets:text-widget
:font font
:framebuffer framebuffer
:x-position (nth-value 0 (widgets:frame-size frame))
:y-position (nth-value 2 (widgets:frame-size frame))
:width (- (comp:width window)
(nth-value 0 (widgets:frame-size frame))
(nth-value 1 (widgets:frame-size frame)))
:height (- (comp:height window)
(nth-value 2 (widgets:frame-size frame))
(nth-value 3 (widgets:frame-size frame)))
:damage-function (lambda (&rest args)
(declare (ignore args))
(loop
(let ((ev (sync:mailbox-receive mbox :wait-p nil)))
(when (not ev) (return))
(dispatch-event spy ev)))))))
(setf (comp:name window) spy)
(setf (slot-value spy '%text-pane) text-pane)
(widgets:draw-frame frame)
(comp:damage-window window
0 0
(comp:width window)
(comp:height window))
(loop
(when (redraw spy)
(let ((*standard-output* text-pane))
(setf (redraw spy) nil)
(widgets:reset *standard-output*)
(ignore-errors
(update-spy spy))
(comp:damage-window window
0 0
(comp:width window)
(comp:height window))))
(when (not (redraw spy))
;; Spy on the compositor's event queue too, so we refresh
;; immediately when it gets an input or damage event.
(if *spy-immediate-mode*
(sync:wait-for-objects-with-timeout
*spy-refresh-interval*
comp::*event-queue*
mbox)
(sync:wait-for-objects-with-timeout
*spy-refresh-interval*
mbox))
(let ((evt (sync:mailbox-receive mbox :wait-p nil)))
(cond (evt
(dispatch-event spy evt))
(t (setf (redraw spy) t))))))))))))
(defun spawn ()
(mezzano.supervisor:make-thread 'spy
:name "Spy"
:initial-bindings `((*terminal-io* ,(make-instance 'mezzano.gui.popup-io-stream:popup-io-stream
:title "Spy console"))
(*standard-input* ,(make-synonym-stream '*terminal-io*))
(*standard-output* ,(make-synonym-stream '*terminal-io*))
(*error-output* ,(make-synonym-stream '*terminal-io*))
(*trace-output* ,(make-synonym-stream '*terminal-io*))
(*debug-io* ,(make-synonym-stream '*terminal-io*))
(*query-io* ,(make-synonym-stream '*terminal-io*)))))
|
require 'rails_helper'
RSpec.describe TagsController, type: :controller do
describe '#index' do
before do
create(:blog)
@tag = create(:tag)
@tag.contents << create(:article)
end
describe 'normally' do
before do
get 'index'
end
specify { expect(response).to be_success }
specify { expect(response).to render_template('tags/index') }
specify { expect(assigns(:tags)).to match_array([@tag]) }
end
describe 'with views rendered' do
render_views
it 'works' do
get 'index'
end
end
end
describe 'showing a single tag' do
before do
create(:blog)
@tag = create(:tag, name: 'Foo')
end
def do_get
get 'show', params: { id: 'foo' }
end
describe 'with some articles' do
before do
@articles = create_list :article, 2
@tag.contents << @articles
end
it 'should be successful' do
do_get
expect(response).to be_success
end
it 'should retrieve the correct set of articles' do
do_get
expect(assigns[:articles].map(&:id).sort).to eq(@articles.map(&:id).sort)
end
it 'should render :show by default' do
do_get
expect(response).to render_template(:show)
end
it 'should render the tag template if present' do
# NOTE: Stubbing Object under test :-(.
allow(controller).to receive(:template_exists?).and_return(true)
allow(controller).to receive(:render)
do_get
expect(controller).to have_received(:render).with('foo')
end
it 'assigns the correct page title' do
do_get
expect(assigns[:page_title]).to eq 'Tag: foo | test blog'
end
it 'assigns the correct description' do
do_get
expect(assigns(:description)).to eq 'foo | test blog | test subtitle'
end
it 'should render the atom feed for /articles/tag/foo.atom' do
get 'show', params: { id: 'foo', format: 'atom' }
expect(response).to render_template('articles/index_atom_feed', layout: false)
end
it 'should render the rss feed for /articles/tag/foo.rss' do
get 'show', params: { id: 'foo', format: 'rss' }
expect(response).to render_template('articles/index_rss_feed', layout: false)
end
end
describe 'without articles' do
it 'raises RecordNotFound' do
expect { get 'show', params: { id: 'foo' } }.
to raise_error ActiveRecord::RecordNotFound
end
end
end
describe 'showing tag "foo"' do
render_views
let!(:blog) { create(:blog) }
let(:parsed_body) { Capybara.string(response.body) }
before(:each) do
create(:tag, name: 'foo', contents: [create(:article)])
get 'show', params: { id: 'foo' }
end
it 'should have good rss feed link in head' do
rss_link = parsed_body.find "head>link[href='http://test.host/tag/foo.rss']", visible: false
aggregate_failures do
expect(rss_link['rel']).to eq 'alternate'
expect(rss_link['type']).to eq 'application/rss+xml'
expect(rss_link['title']).to eq 'RSS'
end
end
it 'should have good atom feed link in head' do
atom_link = parsed_body.find "head>link[href='http://test.host/tag/foo.atom']", visible: false
aggregate_failures do
expect(atom_link['rel']).to eq 'alternate'
expect(atom_link['type']).to eq 'application/atom+xml'
expect(atom_link['title']).to eq 'Atom'
end
end
it 'should have a canonical URL' do
expect(response.body).to have_selector("head>link[href='#{blog.base_url}/tag/foo']", visible: false)
end
end
describe 'showing a non-existant tag' do
it 'should signal not found' do
create(:blog)
expect { get 'show', params: { id: 'thistagdoesnotexist' } }.
to raise_error ActiveRecord::RecordNotFound
end
end
describe 'password protected article' do
render_views
it 'article in tag should be password protected' do
create(:blog)
article = create(:article, password: 'password')
create(:tag, name: 'foo', contents: [article])
get 'show', params: { id: 'foo' }
assert_select('input[id="article_password"]')
end
end
describe 'SEO Options' do
before(:each) do
@blog = create(:blog)
@a = create(:article)
@foo = create(:tag, name: 'foo', contents: [@a])
end
describe 'keywords' do
it 'does not assign keywords when the blog has no keywords' do
get 'show', params: { id: 'foo' }
expect(assigns(:keywords)).to eq ''
end
it "assigns the blog's keywords if present" do
@blog.meta_keywords = 'foo, bar'
@blog.save
get 'show', params: { id: 'foo' }
expect(assigns(:keywords)).to eq 'foo, bar'
end
end
end
end
|
/*Lab2: Shell-Part2
Name: Kristian Villanueva
Last Modification: 10/8/17
*/
#ifndef MYTOC_H_
#define MYTOC_H_
char ** mytoc(char *str, char delim);
#endif /* MYTOC_H_ */
|
package provider
import (
"fmt"
)
type CodeError struct {
code int
message string
err error
}
func (e CodeError) Error() string {
return fmt.Sprintf(" [%d] :%s (%s)", e.code, e.message, e.err.Error())
}
|
(defproject freqt "0.3.1"
:description "Implementation of the freqt (frequent subtree discovery) algorithm."
:license "ljos.mit-license.org"
:dependencies [[org.clojure/clojure "1.5.1"]]
:profiles {:dev {:dependencies [[midje "1.5.1"]]}})
|
# Copyright 2014, Jeff Buttars, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import acos_client.errors as acos_errors
import acos_client.v30.base as base
class HealthMonitor(base.BaseV30):
# Valid method objects
ICMP = 'icmp'
TCP = 'tcp'
HTTP = 'http'
HTTPS = 'https'
url_prefix = "/health/monitor/"
_method_objects = {
ICMP: {
"icmp": 1
},
HTTP: {
"http": 1,
"http-port": 80,
"http-expect": 1,
"http-response-code": "200",
"http-url": 1,
"url-type": "GET",
"url-path": "/",
},
HTTPS: {
"https": 1,
"web-port": 443,
"https-expect": 1,
"https-response-code": "200",
"https-url": 1,
"url-type": "GET",
"url-path": "/",
"disable-sslv2hello": 0
},
TCP: {
"method-tcp": 1,
"tcp-port": 80
},
}
def get(self, name, **kwargs):
return self._get(self.url_prefix + name, **kwargs)
def _set(self, action, name, mon_method, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, update=False,
**kwargs):
params = {
"monitor": {
"name": name,
"retry": int(max_retries),
"interval": int(interval),
"timeout": int(timeout),
"method": {
mon_method: self._method_objects[mon_method]
}
}
}
if method:
params['monitor']['method'][mon_method]['url-type'] = method
if url:
params['monitor']['method'][mon_method]['url-path'] = url
if expect_code:
k = "%s-response-code" % mon_method
params['monitor']['method'][mon_method][k] = str(expect_code)
if port:
if mon_method == self.HTTPS:
k = 'web-port'
else:
k = '%s-port' % mon_method
params['monitor']['method'][mon_method][k] = int(port)
if update:
action += name
self._post(action, params, **kwargs)
def create(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
try:
self.get(name)
except acos_errors.NotFound:
pass
else:
raise acos_errors.Exists()
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, **kwargs)
def update(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
self.get(name) # We want a NotFound if it does not exist
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, update=True,
**kwargs)
def delete(self, name):
self._delete(self.url_prefix + name)
|
package org.getalp.dbnary.languages.fra.morphology;
public class Utils {
public static String standardizePronunciation(String p) {
p = p.trim();
if (p.startsWith("\\"))
p = p.substring(1);
if (p.endsWith("\\"))
p = p.substring(0, p.length() - 1);
return p;
}
}
|
#!/bin/sh
#
# Copyright (c) 2015 Christian Couder
# MIT Licensed; see the LICENSE file in this repository.
#
test_description="Test docker image"
. lib/test-lib.sh
# if in travis CI on OSX, docker is not available
if ! test_have_prereq DOCKER; then
skip_all='skipping docker tests, docker not available'
test_done
fi
test_expect_success "'docker --version' works" '
docker --version >actual
'
test_expect_success "'docker --version' output looks good" '
egrep "^Docker version" actual
'
test_expect_success "current user is in the 'docker' group" '
groups | egrep "\bdocker\b"
'
TEST_TRASH_DIR=$(pwd)
TEST_SCRIPTS_DIR=$(dirname "$TEST_TRASH_DIR")
TEST_TESTS_DIR=$(dirname "$TEST_SCRIPTS_DIR")
APP_ROOT_DIR=$(dirname "$TEST_TESTS_DIR")
test_expect_success "docker image build succeeds" '
docker_build "$TEST_TESTS_DIR/Dockerfile" "$APP_ROOT_DIR" >actual
'
test_expect_success "docker image build output looks good" '
SUCCESS_LINE=$(egrep "^Successfully built" actual) &&
IMAGE_ID=$(expr "$SUCCESS_LINE" : "^Successfully built \(.*\)") ||
test_fsh cat actual
'
test_expect_success "docker image runs" '
DOC_ID=$(docker_run "$IMAGE_ID")
'
test_expect_success "docker image gateway is up" '
docker_exec "$DOC_ID" "wget --retry-connrefused --waitretry=1 --timeout=30 -t 30 \
-q -O - http://localhost:8080/version >/dev/null"
'
test_expect_success "docker image API is up" '
docker_exec "$DOC_ID" "wget --retry-connrefused --waitretry=1 --timeout=30 -t 30 \
-q -O - http://localhost:5001/api/v0/version >/dev/null"
'
test_expect_success "simple ipfs add/cat can be run in docker container" '
expected="Hello Worlds" &&
HASH=$(docker_exec "$DOC_ID" "echo $(cat expected) | ipfs add | cut -d' ' -f2") &&
docker_exec "$DOC_ID" "ipfs cat $HASH" >actual &&
test_cmp expected actual
'
test_expect_success "stop docker container" '
docker_stop "$DOC_ID"
'
test_done
|
#!/bin/sh
###
set -e
#
if [ $(whoami) != "root" ]; then
echo "${0} should be run as root or via sudo."
exit
fi
#
INAME="badapple"
CNAME="${INAME}_container"
#
###
# Stop and clean up.
docker stop ${CNAME}
docker ps -a
docker rm ${CNAME}
docker rmi ${INAME}
#
|
class AddAutoIncrementInPodcast < ActiveRecord::Migration[6.0]
def change
change_column :podcasts, :id, :integer, limit: 8, auto_increment: true
end
end
|
class SpeedSource {
constructor(eegReceiver) {
this.eegReceiver = eegReceiver
this.speed = 0
this.eegReceiver.on('data', data => {
if (this.stopping) return
this.speed = data.eSense.attention
if (this.speed < 0) this.speed = 0
console.log(`eeg: attention=${data.eSense.attention}`)
})
this.eegReceiver.connect()
}
getSpeed() {
return this.speed
}
}
module.exports = SpeedSource
|
+++
title = "Menu"
description = ""
weight = 1
+++
A slideout menu plugin for Advanced Slides to quickly jump to any slide by title.

{{%alert%}} To enable Menu you have to enable the Plugin in Advanced Slides Settings{{%/alert%}} |
package twitch
import (
"fmt"
"math/rand"
"time"
"github.com/maliur/sodaville/database"
)
func lsCmd(event *IRCEvent, db *database.Database) (string, error) {
msg, err := db.GetAllCommands()
if err != nil {
return "", err
}
return msg, nil
}
func addCmd(event *IRCEvent, db *database.Database) (string, error) {
err := db.InsertCommand(event.NewCmd, event.Arg, false)
if err != nil {
return "", err
}
return fmt.Sprintf("command %s added", event.NewCmd), nil
}
func delCmd(event *IRCEvent, db *database.Database) (string, error) {
err := db.DeleteCommand(event.NewCmd)
if err != nil {
return "", err
}
return fmt.Sprintf("command %s deleted", event.NewCmd), nil
}
func HandleDice() string {
rand.Seed(time.Now().UnixNano())
return fmt.Sprintf("%d", rand.Intn(100))
}
func HandleCmd(event *IRCEvent, db *database.Database) (string, error) {
switch event.Action {
case "ls":
return lsCmd(event, db)
case "add":
return addCmd(event, db)
case "del":
return delCmd(event, db)
}
return "$cmd <action ls|add|del> <command name if add|del> <response for add command if applicable>", nil
}
|
________________________________________________________________________
This file is part of Logtalk <https://logtalk.org/>
Copyright 1998-2021 Paulo Moura <[email protected]>
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
________________________________________________________________________
CONTRIBUTING
============
Thanks for considering contributing to Logtalk. All contributions are most
welcome, including code patches, bug reports, documentation fixes, feature
suggestions, portability improvements, new libraries, new examples, and
improved support for text editors and IDEs. No contribution is too small.
The [ACKNOWLEDGMENTS.md](ACKNOWLEDGMENTS.md) file lists past contributions
and their authors. Looking forward to seeing your name there. Contributors
are expected to follow the [Code of Conduct](CODE_OF_CONDUCT.md).
How to make a contribution
--------------------------
For bug reports and suggestions, create a ticket in our issue tracker at:
https://github.com/LogtalkDotOrg/logtalk3/issues
If you prefer to discuss your contribution first with other developers,
you can join us for live discussion at our chat room:
https://gitter.im/LogtalkDotOrg/logtalk3
In alternative, you can also join our discussion forums at:
https://forums.logtalk.org/
For contributing improvements and fixes to the Handbook, open it in your
web browser, click on link on the top right that says "Edit on GitHub",
make the proposed changes, and submit them as a pull request.
In the specific case of developer tools and libraries documentation, note
that their Handbook sections are automatically generated from the tool and
library directory `NOTES.md` files.
In the case of code contributions, you are required to follow the coding
guidelines described at:
https://github.com/LogtalkDotOrg/logtalk3/wiki/Coding-Style-Guidelines
Submit your code contribution by forking Logtalk, working on a topic branch,
and creating a pull request. If you're not familiar with pull request based
contributions to open source projects, see e.g.
https://egghead.io/courses/how-to-contribute-to-an-open-source-project-on-github
When contributing compiler, runtime, or library enhancements, ensure there
are no regressions by testing your contribution. For details, see:
https://github.com/LogtalkDotOrg/logtalk3/wiki/Testing
Ideally, new code should come with a comprehensive test set. For writing tests,
see our unit testing framework documentation at:
https://github.com/LogtalkDotOrg/logtalk3/blob/master/tools/lgtunit/NOTES.md
Please note that **portability** is a main Logtalk feature. As a general rule,
new code should depend only on standard or de facto standard features available
from the supported backend Prolog systems. But contributions that enhance the
integration with a particular backend Prolog system are also sought as long
they don't break support for other systems.
Integrating your contribution
-----------------------------
Major code and documentation contributions require the contributor to sign-off
that they adhere to the Developer Certificate of Origin (DCO):
https://developercertificate.org
Contributions should be made available under the Logtalk license without any
additional terms or conditions. Contributions using other licensing terms may
also be distributed with Logtalk with the understanding that the terms of their
use depends solely on the authors chosen license terms and may require a
separate, independent, agreement between users and authors.
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class MeleeAttack : Move
{
public List<GameObject> HitBoxes;
// Start is called before the first frame update
void Start()
{
foreach (GameObject hitBox in HitBoxes)
{
hitBox.GetComponent<HitBox>().AttachedMove = this;
}
foreach (Vector2 noTurnTime in NoTurnTimes)
{
StartCoroutine(SetNoTurn(noTurnTime));
}
}
// Update is called once per frame
void Update()
{
moveTime += Time.deltaTime;
foreach (Vector2 time in NoMovementTimes)
{
if (moveTime < time[0] || moveTime > time[1])
{
CanMove = true;
}
else
{
CanMove = false;
}
}
HitDelay -= Time.deltaTime;
if (moveTime >= MaxMoveTime || AttachedExemon.GetComponent<BattleEntity>().ActiveMove != this)
{
Destroy(gameObject);
}
foreach (Vector2 time in lockedTimes)
{
if (moveTime >= time[0] && moveTime <= time[1])
{
canExitAttack = false;
}
else
{
canExitAttack = true;
}
}
foreach (GameObject hitBox in HitBoxes)
{
var hitBoxScript = hitBox.GetComponent<HitBox>();
if ((moveTime >= hitBoxScript.StartTime) && (moveTime <= hitBoxScript.EndTime))
{
hitBoxScript.IsActive = true;
}
else
{
hitBoxScript.IsActive = false;
}
}
}
}
|
using UnityEngine;
public class AudioController {
private static AudioController INSTANCE;
private AudioSource source;
private Playlist playlist;
public static AudioController GetInstance() {
if (INSTANCE == null) {
INSTANCE = new AudioController();
}
return INSTANCE;
}
public void LoadPlaylist(AudioSource source, string name) {
if (this.source == null) {
this.source = GameObject.Instantiate(source) as AudioSource;
GameObject.DontDestroyOnLoad(this.source);
this.playlist = new Playlist(this.source);
}
this.playlist.Play(name);
}
public void Update(float delta) {
playlist.Update(delta);
}
}
|
/*
State management for token persistence in the application mainly ACCESSTOKEN.
*/
import { StoreFromSlice, PersistOptionsSlice } from '../utils/store';
export interface TokenSlice {
// states
accessToken: string;
// actions
setAccessToken: (accessToken: string) => void;
// partial actions
_clearAccessToken: () => void;
_isInitialValueAsAccessToken: () => boolean;
// persist options
_persistToken: PersistOptionsSlice<TokenSlice, PersistedTokenSlice>;
}
interface PersistedTokenSlice {
// states
accessToken: string;
}
export default function createTokenSlice<IStore extends TokenSlice = TokenSlice>(
...[set, get]: Parameters<StoreFromSlice<IStore, TokenSlice>>
): ReturnType<StoreFromSlice<IStore, TokenSlice>> {
return {
// states
accessToken: '',
// actions
setAccessToken: (accessToken: string) => set({ accessToken }),
// partial actions
_clearAccessToken: () => set({ accessToken: '' }),
_isInitialValueAsAccessToken: () => get().accessToken === '',
// persist options
_persistToken: {
partialize: (state: TokenSlice) => ({
accessToken: state.accessToken,
}),
toMerge: (persistedState: PersistedTokenSlice, currentState: TokenSlice) => {
const { accessToken } = persistedState;
return {
accessToken,
};
},
},
};
}
|
package utils;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
public class ConnectionUtil {
private static ConnectionUtil cu = null;
private static Properties props;
//private constructor so we can control the creation
private ConnectionUtil(){
props = new Properties();
InputStream dbProps = ConnectionUtil.class.getClassLoader().getResourceAsStream("connection.properties");
try{
props.load(dbProps);
}
catch (IOException e){
e.printStackTrace();
}
}
public static synchronized ConnectionUtil getConnectionUtil(){
if(cu==null){
cu = new ConnectionUtil();
return cu;
}
else return cu;
}
public Connection getConnection(){
Connection con = null;
try {
Class.forName(props.getProperty("driver"));
}
catch(ClassNotFoundException e){
e.printStackTrace();
}
String url = props.getProperty("url");
String username = props.getProperty("username");
String password = props.getProperty("password");
try {
con = DriverManager.getConnection(url, username, password);
}
catch (SQLException e){
e.printStackTrace();
}
return con;
}
/*
public static void main(String []args){
Connection con = ConnectionUtil.getConnectionUtil().getConnection();
if(con == null){
System.out.println("something went wrong.");
}
else System.out.println("connection successful.");
}
*/
} |
## 1.3.2
- Updated `eslint-config-adidas-es8` semver to patch.
## 1.3.1
- Disabled `prefer-named-capture-group`.
# 1.3.0
- Updated ESLint to version 7.
- Added rules:
- `prefer-named-capture-group`
- `prefer-object-spread`
## 1.2.1
- Updated `eslint-config-adidas-es8` semver to patch.
# 1.2.0
- Updated ESLint to version 6.
# 1.1.0
- Updated ESLint to version 5.
- Updated `eslint-config-adidas-es8` dependency using major range operator.
## 1.0.1
- Renamed folder to match package name.
- Added npm package badge to README.
# 1.0.0
- Initial version: `eslint-config-adidas-es9`.
|
/*
* Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
* for details. All rights reserved. Use of this source code is governed by a
* BSD-style license that can be found in the LICENSE file.
*/
/**
* @assertion We define inherited(J, K) to be
* the set of members m such that all of the following hold:
* • m is accessible to K and
* • A is a direct superinterface of J and either
* – A declares a member m or
* – m is a member of inherited(A, K).
* • m is not overridden by J.
* ...
* Otherwise, if the static types T1,...,Tk of the members m1,...,mk are not
* identical, then there must be a member mx such that Tx <: Ti, 1 ≤ x ≤ k for
* all i ∈ 1..k, or a static type warning occurs. The member that is inherited
* is mx, if it exists; otherwise: let numberOf P ositionals(f) denote the
* number of positional parameters of a function f, and let numberOf
* RequiredParams(f) denote the number of required parameters of a function f.
* Furthermore, let s denote the set of all named parameters of the m1,...,mk.
* Then let
* h = max(numberOf Positionals(mi)),
* r = min(numberOf RequiredParams(mi)), i ∈ 1..k.
* Then I has a method named n, with r required parameters of type dynamic,
* h positional parameters of type dynamic, named parameters s of type dynamic
* and return type dynamic.
* @description Checks that there's no static warning produced when the
* subinterface member being accessed is actually inherited. Members tested
* include variables, methods, getters, setters and operators. There's no way
* to check whether the resulting inherited method signature is actually as
* described above, but we can check that it's inherited at all.
* @static-clean
* @author rodionov
*/
import '../../../../Utils/expect.dart';
class C {}
class D extends C {}
abstract class SI1 {
int method(num v, String p, {int o1, Pattern o2});
int method2(C v, [D o]);
int get gett0r;
void set sett0r(int v);
C operator +(C v);
}
abstract class SI2 {
void method(num v, String p, {int o1, Pattern o2});
num method2(C v, [D o]);
num get gett0r;
void set sett0r(num v);
C operator +(C v);
}
abstract class I implements SI1, SI2 {}
main() {
I i = null;
Expect.throws(() {i.method(null, null, o1:null, o2:null);}, (e) => e is NoSuchMethodError);
Expect.throws(() {var v = i.method2(null, null);}, (e) => e is NoSuchMethodError);
Expect.throws(() {num n = i.gett0r;}, (e) => e is NoSuchMethodError);
Expect.throws(() {i.sett0r = null;}, (e) => e is NoSuchMethodError);
Expect.throws(() {var v = i + null;}, (e) => e is NoSuchMethodError);
}
|
-- --------------------------------------------------------
-- Host: 127.0.0.1
-- Server version: 5.1.53-community-log - MySQL Community Server (GPL)
-- Server OS: Win64
-- HeidiSQL Version: 8.3.0.4694
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- Dumping database structure for lazada_app
CREATE DATABASE IF NOT EXISTS `lazada_app` /*!40100 DEFAULT CHARACTER SET latin1 */;
USE `lazada_app`;
-- Dumping structure for table lazada_app.posts
CREATE TABLE IF NOT EXISTS `posts` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(50) NOT NULL DEFAULT '0',
`body` varchar(100) NOT NULL DEFAULT '0',
`updated_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=latin1;
-- Dumping data for table lazada_app.posts: ~11 rows (approximately)
/*!40000 ALTER TABLE `posts` DISABLE KEYS */;
INSERT INTO `posts` (`id`, `title`, `body`, `updated_at`, `created_at`) VALUES
(1, 'test', 'test', NULL, NULL),
(2, 'test title', 'test body', NULL, NULL),
(3, 'test title', 'test body', NULL, NULL),
(4, 'test title', 'test body', NULL, NULL),
(5, 'test title', 'test body', NULL, NULL),
(6, 'hello title', 'hello body', '2015-07-23 13:16:10', '2015-07-23 13:16:10'),
(8, 'hello title', 'hello body', '2015-07-23 13:19:16', '2015-07-23 13:19:16'),
(9, 'hello title', 'hello body', '2015-07-23 13:19:28', '2015-07-23 13:19:28'),
(10, 'hello title', 'hello body', '2015-07-23 13:20:16', '2015-07-23 13:20:16'),
(12, 'hello', 'A Search Engine', '2015-07-23 13:21:23', '2015-07-23 13:21:23');
/*!40000 ALTER TABLE `posts` ENABLE KEYS */;
-- Dumping structure for table lazada_app.post_tag
CREATE TABLE IF NOT EXISTS `post_tag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`post_id` int(11) NOT NULL DEFAULT '0',
`tag_id` int(11) NOT NULL DEFAULT '0',
`updated_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=latin1;
-- Dumping data for table lazada_app.post_tag: 2 rows
/*!40000 ALTER TABLE `post_tag` DISABLE KEYS */;
INSERT INTO `post_tag` (`id`, `post_id`, `tag_id`, `updated_at`, `created_at`) VALUES
(1, 2, 1, '2015-07-24 00:53:18', '2015-07-24 00:53:21'),
(2, 2, 2, '2015-07-24 00:53:53', '2015-07-24 00:53:55');
/*!40000 ALTER TABLE `post_tag` ENABLE KEYS */;
-- Dumping structure for table lazada_app.tags
CREATE TABLE IF NOT EXISTS `tags` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(50) DEFAULT NULL,
`updated_at` datetime DEFAULT NULL,
`created_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
-- Dumping data for table lazada_app.tags: 4 rows
/*!40000 ALTER TABLE `tags` DISABLE KEYS */;
INSERT INTO `tags` (`id`, `name`, `updated_at`, `created_at`) VALUES
(1, 'red', '2015-07-23 19:14:58', '2015-07-23 19:14:58'),
(2, 'blue', '2015-07-23 19:15:12', '2015-07-23 19:15:12'),
(3, 'yellow', '2015-07-23 19:15:20', '2015-07-23 19:15:20'),
(4, 'green', '2015-07-23 19:15:26', '2015-07-23 19:15:26');
/*!40000 ALTER TABLE `tags` ENABLE KEYS */;
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
# The name of our algorithm
image=$1
MODEL=$2
DATASET=$3
if [ "$image" == "" ]
then
echo "Usage: $0 <image-name>"
exit 1
fi
cd container
cp -r ../deployement .
cp -r ../pyproject.toml .
cp -r ../poetry.lock .
cp -r ../train deployement
cp -r ../src deployement/
pip install poetry
poetry export -f requirements.txt -o requirements.txt --without-hashes
python render_docker.py
chmod +x deployement/train
chmod +x deployement/serve
account=$(aws sts get-caller-identity --query Account --output text)
# Get the region defined in the current configuration (default to us-west-2 if none defined)
region=$(aws configure get region)
# specifically setting to us-east-2 since during the pre-release period, we support only that region.
region=${region:-eu-west-1}
fullname="${account}.dkr.ecr.${region}.amazonaws.com/${image}:latest"
# If the repository doesn't exist in ECR, create it.
aws ecr describe-repositories --repository-names "${image}" > /dev/null 2>&1
if [ $? -ne 0 ]
then
aws ecr create-repository --repository-name "${image}" > /dev/null
fi
# Build the docker image locally with the image name and then push it to ECR
# with the full name.
docker build --build-arg MODEL=$MODEL --build-arg DATASET=$DATASET -t ${image} .
docker tag ${image}:latest ${fullname}
aws ecr get-login-password \
--region ${region} \
| docker login \
--username AWS \
--password-stdin ${account}.dkr.ecr.${region}.amazonaws.com
docker push ${fullname}
# Cleaning
rm -r deployement
rm Dockerfile
rm requirements.txt
rm pyproject.toml
rm poetry.lock
echo ${fullname}
|
package org.comroid.spiroid.chat;
import org.comroid.api.Specifiable;
public interface Notifier extends Specifiable<Notifier> {
void sendMessage(MessageLevel level, String message);
}
|
import Vue from 'vue';
import Vuex from 'vuex';
import axios from 'axios';
import { groupBy, map } from 'lodash-es';
Vue.use(Vuex);
const store = new Vuex.Store({
state: {
funds: 0,
offer: [],
ticket: {},
tickets: [],
bonus: 0
},
getters: {
getFunds(state) {
return state.funds;
},
getBonus(state) {
return state.bonus;
},
getOffer(state) {
return state.offer;
},
getTicket(state) {
return state.ticket;
},
getTickets(state) {
return state.tickets;
}
},
mutations: {
setBonus(state, bonus) {
state.bonus = bonus;
},
getFunds(state) {
return axios.get('/api/wallet/index')
.then(({ data }) => {
state.funds = data.funds;
});
},
setFunds(state, funds) {
state.funds = funds;
},
finishTicket(state, bet) {
if ((state.funds -= bet) < 0) {
return console.log('Funds low!!');
}
// update ticket in db
const ticket = {
id: bet.ticket.id,
isBetted: true
};
return axios.put('/api/ticket/updateTicket', ticket)
.then(response => {
state.ticket = [];
console.log(response.data);
});
},
addOffer(state, offer) {
state.offer.push(offer);
},
addBetted(state, tickets) {
state.tickets = tickets;
},
resetTicket(state) {
state.ticket = {};
},
addGameToTicket(state, bet) {
const indexOnTicket = state.ticket.games.findIndex(e => e.gameId === bet.GameId);
if (indexOnTicket === -1) {
return axios.post('/api/ticket/add', bet)
.then(response => {
console.log(response.data);
})
.catch(err => console.log(err));
} else {
// update par na ticket_game
return axios.put('/api/ticket/updateGame', bet)
.then(res => {
console.log(res.data);
});
}
},
removeGameFromTicket(state, game) {
return axios.delete(`/api/ticket/delete/${game.TicketId}/${game.GameId}`)
.then(response => {
})
.catch(err => console.log(err));
},
addTicket(state, id) {
state.ticket.id = id;
},
addTicketGames(state, games) {
Vue.set(state.ticket, 'games', games);
},
findOrCreate(state, payload) {
return axios.get('/api/ticket/last')
.then(({ data }) => {
state.ticket.id = data.id;
if (data !== '') {
return axios.get(`/api/ticket/find?Id=${data.id}`)
.then((res) => {
state.ticket.games = res.data;
})
.catch(err => console.log(err));
}
const ticket = {
isBetted: false,
bonusId: 3
};
axios.post('/api/ticket/create', ticket)
.then(response => {
console.log(response);
});
});
}
},
actions: {
findOrCreateTicket({ commit }) {
// return commit('findOrCreate');
return axios.get('/api/ticket/last')
.then(({ data }) => {
commit('addTicket', data.id);
if (data !== '') {
return axios.get(`/api/ticket/find?Id=${data.id}`)
.then(res => {
// state.ticket.games = res.data;
commit('addTicketGames', res.data);
})
.catch(err => console.log(err));
}
const ticket = {
isBetted: false,
bonusId: 3
};
axios.post('/api/ticket/create', ticket)
.then(response => {
commit('addTicket', response.data);
});
});
},
updateFunds({ commit }, stake) {
return axios.put(`/api/wallet/updateFunds/${stake}`)
.then(response => {
// commit('getFunds');
return axios.get('/api/wallet/index')
.then(({ data }) => {
commit('setFunds', data.funds);
});
})
.catch(err => console.log(err));
},
placeBet({ dispatch, commit, getters }, bet) {
// commit('finishTicket', bet);
let funds = getters.getFunds;
if ((funds -= bet.stake) < 0) {
return console.log('Funds low!!');
}
// update ticket in db
const ticket = {
id: bet.ticket.id,
stake: bet.stake,
odd: bet.odd,
isBetted: true
};
return axios.put('/api/ticket/updateTicket', ticket)
.then(response => {
commit('resetTicket');
// commit('findOrCreate');
dispatch('findOrCreateTicket');
dispatch('updateFunds', bet.stake);
});
},
getBonus({ commit }, ticketId) {
return axios.get(`/api/ticket/getBonus?TicketId=${ticketId}`)
.then(response => {
commit('setBonus', response.data);
});
},
getOffer({ commit }, sport) {
// get offer from db
return axios.get('/api/offer/index')
.then(response => commit('addOffer', response.data));
},
getBetted({ commit }) {
return axios.get('/api/ticket/getBetted')
.then(response => {
const tickets = groupBy(response.data, e => e.ticketId);
const sortedTickets = [];
map(tickets, item => {
const ticket = {
games: item,
odd: item[0].ticket.odd,
stake: item[0].ticket.stake
};
sortedTickets.push(ticket);
});
commit('addBetted', tickets);
});
},
addToTicket({ dispatch, commit, getters }, bet) {
const ticket = getters.getTicket;
const indexOnTicket = ticket.games.findIndex(e => e.gameId === bet.GameId);
if (indexOnTicket === -1) {
return axios.post('/api/ticket/add', bet)
.then(response => {
dispatch('findOrCreateTicket');
})
.catch(err => console.log(err));
} else {
// update par na ticket_game
return axios.put('/api/ticket/updateGame', bet)
.then(res => {
dispatch('findOrCreateTicket');
});
}
},
removeFromTicket({ dispatch, commit }, game) {
return axios.delete(`/api/ticket/delete/${game.TicketId}/${game.GameId}`)
.then(response => {
commit('findOrCreate');
dispatch('getBonus', game.TicketId);
})
.catch(err => console.log(err));
}
}
});
export default store;
|
package ru.qiwi.devops.mission.control.service.health.cluster
interface ClusterHealthMonitorSource {
fun getMonitor(clusterName: String): ClusterHealthMonitor
} |
<?php
use Norm\Schema\String;
use Norm\Schema\Date;
return array(
'schema'=> array(
'tanggal'=> Date::create('tanggal')
),
); |
#!/bin/bash
if [ $# != 1 ]; then
exit 1
fi
cd $(dirname $0)
mkdir -p tests
node ~/local/bin/TestGetter.js -t $(pwd)"/tests" -u $1
echo -e "cmake_minimum_required(VERSION 3.0.0)
project($(basename $(pwd)) VERSION 0.1.0)
include(CTest)
enable_testing()
add_executable($(basename $(pwd)) main.cpp)
set(CPACK_PROJECT_NAME \${PROJECT_NAME})
set(CPACK_PROJECT_VERSION \${PROJECT_VERSION})
include(CPack)
" > CMakeLists.txt
|
class SendNewCycleHasStartedEmailToCandidate
def self.call(application_form:)
return if already_sent_to?(application_form)
CandidateMailer.new_cycle_has_started(application_form).deliver_later
ChaserSent.create!(chased: application_form, chaser_type: :new_cycle_has_started)
end
def self.already_sent_to?(application_form)
application_form.chasers_sent.where(
chaser_type: :new_cycle_has_started,
).where(
'created_at > ?',
CycleTimetable.apply_opens,
).present?
end
end
|
#!/usr/bin/perl
##########
#The MIT License (MIT)
#
# Copyright (c) 2015 Aiden Lab
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
# Perl script to calculate diploid reads on the infile.
# The infile should be in the merged_nodups form: no duplicates, >= 14 fields,
# laid out as:
#
# str1 chr1 pos1 frag1 str2 chr2 pos2 frag2 mapq1 cigar1 seq1 mapq2 cigar2 seq2
#
# The script also requires two versions of a SNP file:
# - The "chr_pos" site file lists on each line the sorted locations of the SNPs
# - The "paternal_maternal" file lists each SNP as chr:pos paternal_SNP maternal_SNP
#
# These files can be created using the script vcftotxt.awk from a phased VCF file
#
# Usage: diploid.pl -s [chr_pos site file] -o [paternal_maternal SNP file] [infile or stream]
# Juicer version 1.5
use File::Basename;
use POSIX;
use List::Util qw[min max];
use Getopt::Std;
use vars qw/ $opt_s $opt_l $opt_d $opt_o $opt_h /;
# Check arguments
getopts('s:o:hl');
my $site_file;
my $phased_file;
my $star=0;
if ($opt_h) {
print STDERR "Usage: diploid.pl <infile>\n";
print STDERR " <infile>: file in intermediate format to calculate statistics on, can be stream\n";
exit;
}
if ($opt_s) {
$site_file = $opt_s;
}
if ($opt_o) {
$phased_file = $opt_o;
}
if ($opt_l) {
$star=1;
}
if (scalar(@ARGV)==0) {
print STDERR "No input file specified, reading from input stream\n";
}
# Global variables for calculating statistics
my %chromosomes;
my %maternal_snps;
my %paternal_snps;
# read in SNP site file and store as multidimensional array
open FILE, $site_file or die $!;
while (<FILE>) {
my @locs = split;
my $key = shift(@locs);
my $ref = \@locs;
$chromosomes{$key} = $ref;
}
close FILE;
# read in SNP definition file and store in hashtable
open FILE, $phased_file or die $!;
while (<FILE>) {
my @locs = split;
my $key = $locs[0];
$paternal_snps{$key} = $locs[1];
$maternal_snps{$key} = $locs[2];
}
close FILE;
# read in infile and find SNPs
my $checkedfile=0;
while (<>) {
my @record = split;
my $oldrecord = join(' ',@record);
# holds the read assignments
my @read1 = ();
my @read2 = ();
# holds the SNP assignments (nucleotides)
my @snp1 = ();
my @snp2 = ();
# holds the SNP positions
my @snppos1 = ();
my @snppos2 = ();
my $orgpos1 = -100;
my $orgpos2 = -100;
if ($checkedfile == 0) {
# check that the file has the format expected
# right now just check the sequence strings are in proper place
if ($record[10] !~ /\A[acgtn]+\z/i || $record[13] !~ /\A[acgtn]+\z/i) {
print STDERR "Expected DNA strings in fields 11 and 14, instead see " . $record[10] . " and " . $record[13] . "\n";
print STDERR "Exiting.";
exit(1);
}
else {
$checkedfile = 1;
}
}
# set "original" position for both reads. then we can process cigar/sequence string forward.
# ignore mitochrondria
# First read:
# count Ms,Ds,Ns,Xs,=s for sequence length
my $seqlength1=0;
my $currstr=$record[9];
my $where = $currstr =~ /[0-9]+[M|D|N|X|=|S|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
while ($where > 0) {
$seqlength1 += substr($currstr, ($where)-1, $RLENGTH - 1) + 0;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|N|X|=|S|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
if (($record[0] == 0 && $record[1] ne "MT") || $star == 1) {
$orgpos1 = $record[2];
}
elsif ($record[1] ne "MT") {
# reverse strand, find original position
$orgpos1 = $record[2] - $seqlength1 + 1;
}
# count Ms,Ds,Ns,Xs,=s for sequence length
my $seqlength2=0;
my $currstr=$record[12];
my $where = $currstr =~ /[0-9]+[M|D|N|X|=|S|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
while ($where > 0) {
$seqlength2 += substr($currstr, ($where)-1, $RLENGTH - 1) + 0;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|N|X|=|S|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
# Second read:
if (($record[4] == 0 && $record[5] ne "MT") || $star == 1) {
$orgpos2 = $record[6];
}
elsif ($record[5] ne "MT") {
# reverse strand, find original position
$orgpos2 = $record[6] - $seqlength2 + 1;
}
# find first read position in the SNP site array
my $ind1 = &bsearch($orgpos1,$chromosomes{$record[1]});
my $orgpos11 = $orgpos1;
my $orgpos22 = $orgpos2;
# first read might land on SNP: if on forward strand, position + length overlaps, if on
# reverse strand, position - length overlaps.
# ind1 is first hit; keep incrementing it until no longer on read.
# shouldn't matter reverse or forward strand at this point, just use orgpos and cigar and
# count forward
while ($orgpos11 < $chromosomes{$record[1]}->[$ind1] && $orgpos11+$seqlength1 >= $chromosomes{$record[1]}->[$ind1]) {
# need to count forward from orgpos1.
my $currstr=$record[9];
my $where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
my $seqstr=$record[10];
$orgpos1=$orgpos11;
while ($where > 0) {
my $char = substr($currstr, ($where)-1 + $RLENGTH - 1,1);
my $len = substr($currstr, ($where)-1, $RLENGTH - 1) + 0;
my $ind;
# match. check if match spans SNP, if so, take it. if not, advance cigar string
# and sequence string and keep parsing cigar
if ($char eq "M") {
if ($orgpos1 + $len > $chromosomes{$record[1]}->[$ind1]) {
# matching spans SNP
$ind = $chromosomes{$record[1]}->[$ind1] - $orgpos1;
push @snp1,substr $seqstr, $ind, 1;
push @snppos1,$chromosomes{$record[1]}->[$ind1];
$where = 0;
}
else {
# advance cigar and sequence and keep going.
$seqstr = substr($seqstr, $len);
$orgpos1 = $orgpos1 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
elsif ($char eq "D" || $char eq "N") {
# delete. if deletion spans SNP, we don't have it in our read.
if ($orgpos1 + $len > $chromosomes{$record[1]}->[$ind1]) {
$where = 0;
}
else {
# doesn't span SNP, update genomic position and continue.
$orgpos1 = $orgpos1 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
elsif ($char eq "I") {
# insertion, advance sequence string, doesn't affect genomic position
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
$seqstr = substr($seqstr, $len);
}
elsif ($char eq "S" || $char eq "H") {
if ($orgpos1 + $len > $chromosomes{$record[1]}->[$ind1]) {
# skip spans SNP
$where = 0;
}
else {
# skip does not span SNP, advance cigar and sequence and keep going.
# hard clipped bases do not appear in seqstr
if ($char eq "S") {
$seqstr = substr($seqstr, $len);
}
$orgpos1 = $orgpos1 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
}
$ind1++;
}
if (scalar @snp1 > 0) {
for my $i (0 .. $#snp1) {
my $key = $record[1] . ":" . $snppos1[$i];
if ($snp1[$i] eq $paternal_snps{$key}) {
$read1[$i] = "paternal";
}
elsif ($snp1[$i] eq $maternal_snps{$key}) {
$read1[$i] = "maternal";
}
else {
$read1[$i] = "mismatch";
}
}
}
# find read 2 position in SNP array
my $ind1 = &bsearch($orgpos2,$chromosomes{$record[5]});
# check that SNP falls in read.
while ($orgpos22 < $chromosomes{$record[5]}->[$ind1] && $orgpos22+$seqlength2 >= $chromosomes{$record[5]}->[$ind1]) {
# need to count forward from orgpos2.
my $currstr=$record[12];
my $where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
my $seqstr=$record[13];
$orgpos2=$orgpos22;
while ($where > 0) {
my $char = substr($currstr, ($where)-1 + $RLENGTH - 1,1);
my $len = substr($currstr, ($where)-1, $RLENGTH - 1) + 0;
my $ind;
# match. check if match spans SNP, if so, take it. if not, advance cigar string
# and sequence string and keep parsing cigar
if ($char eq "M") {
if ($orgpos2 + $len > $chromosomes{$record[5]}->[$ind1]) {
# matching spans SNP
$ind = $chromosomes{$record[5]}->[$ind1] - $orgpos2;
push @snp2,substr $seqstr, $ind, 1;
push @snppos2,$chromosomes{$record[5]}->[$ind1];
$where = 0;
}
else {
# matching does not span SNP, advance cigar and sequence and keep going.
$seqstr = substr($seqstr, $len);
$orgpos2 = $orgpos2 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
elsif ($char eq "D" || $char eq "N") {
# delete. if deletion spans SNP, we don't have it in our read.
if ($orgpos2 + $len > $chromosomes{$record[5]}->[$ind1]) {
$where = 0;
}
else {
# doesn't span SNP, update genomic position and continue.
$orgpos2 = $orgpos2 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
elsif ($char eq "I") {
# insertion or skip, advance sequence string, doesn't affect genomic position
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
$seqstr = substr($seqstr, $len);
}
elsif ($char eq "S" || $char eq "H") {
if ($orgpos2 + $len > $chromosomes{$record[5]}->[$ind1]) {
# skip spans SNP
$where = 0;
}
else {
# skip does not span SNP, advance cigar and sequence and keep going.
# hard clipped bases do not appear in seqstr
if ($char eq "S") {
$seqstr = substr($seqstr, $len);
}
$orgpos2 = $orgpos2 + $len;
$currstr = substr($currstr, ($where + $RLENGTH)-1);
$where = $currstr =~ /[0-9]+[M|D|S|I|H|N]/ ? scalar($RLENGTH = length($&), $RSTART = length($`)+1) : 0;
}
}
}
$ind1++;
}
if (scalar @snp2 > 0) {
for my $i (0 .. $#snp2) {
my $key = $record[5] . ":" . $snppos2[$i];
if ($snp2[$i] eq $paternal_snps{$key}) {
$read2[$i] = "paternal";
}
elsif ($snp2[$i] eq $maternal_snps{$key}) {
$read2[$i] = "maternal";
}
else {
$read2[$i] = "mismatch";
}
}
}
if (scalar @read1 > 0 || scalar @read2 > 0) {
print STDOUT $record[0] . " " . $record[1] . " " . $record[2] . " " . $record[3] . " " . $record[4] . " " . $record[5] . " " . $record[6] . " " . $record[7] . " " . $record[10] . " " . $record[13] . " " . $record[14] . " " . "SNP1";
for my $i (0 .. $#snp1) {
print STDOUT ":" . $snp1[$i] . ":" . $snppos1[$i] . ":" . $read1[$i];
}
print STDOUT " SNP2";
for my $i (0 .. $#snp2) {
print STDOUT ":" . $snp2[$i] . ":" . $snppos2[$i] . ":" . $read2[$i];
}
print STDOUT "\n";
}
}
# Binary search, array passed by reference
# search array of integers a for given integer x
# return index where found or upper index if not found
sub bsearch {
my ($x, $a) = @_; # search for x in array a
my ($l, $u) = (0, @$a - 1); # lower, upper end of search interval
my $i; # index of probe
while ($l <= $u) {
$i = int(($l + $u)/2);
if ($a->[$i] < $x) {
$l = $i+1;
}
elsif ($a->[$i] > $x) {
$u = $i-1;
}
else {
return $i; # found
}
}
return $l; # not found, return upper
}
|
# Changelog
## [0.2.1] - 2021-07-04
- Fix `genbu.usage`.
- `Genbu` now raises `UnsupportedCallback` if callback has no
signature.
- Added: `"..."` in the `params` list tells Genbu to infer `Param`s
from the callback's signature and to use the other `Param`s in the
list to override the default `Param`s.
## [0.2] - 2021-07-01
- Rename `CLInterface` to `Genbu`.
+ `name` and `description` are now optional arguments.
+ Change `Genbu.params` behavior.
* `params` are now inferred from the callback signature if not
specified explicitly.
* If multiple `Param`s in the `params` list have the same `dest`,
the one closer to the end of the list is used.
+ The CLI is now invoked by calling `Genbu.run`.
+ `Genbu.run` now reads inputs from `sys.argv[1:]` by default.
- Change `Param` attributes.
+ Rename `Param.name` to `Param.dest`
+ Rename `Param.parse` to `Param.parser`
+ Replace `Param.resolve` accumulator with `Param.aggregator`.
- Add `infer_params` function.
## [0.1] - 2021-06-23
- Initial release
[0.2.1]: https://github.com/lggruspe/genbu/releases/tag/v0.2.1
[0.2]: https://github.com/lggruspe/genbu/releases/tag/v0.2
[0.1]: https://github.com/lggruspe/genbu/releases/tag/v0.1
|
using System.Collections.ObjectModel;
using OpenQA.Selenium;
using Selenium.WebDriver.Equip;
namespace TestWebPages.UIFramework.Pages
{
public class AjaxyControlPage : BasePage , IPage
{
public static string Url = "http://rickcasady.com/SeleniumExtentions/v1.0/TestWebPages/AjaxyControl.html";
#region Static By Selectors
public static By ByNewLableText = By.Name("typer");
public static By ByRedRadio = By.Id("red");
public static By ByGreenRadio = By.Id("green");
public static By BySubmitButton = By.Name("submit");
public static By ByLabelsDiv = By.ClassName("label");
#endregion
#region IWebElement properties
public IWebElement NewLabelText
{ get { return Driver.FindElement(ByNewLableText); } }
public IWebElement RedRadio
{ get { return Driver.FindElement(ByRedRadio); } }
public IWebElement GreenRadio
{ get { return Driver.FindElement(ByGreenRadio); } }
public IWebElement SubmitButton
{ get { return Driver.FindElement(BySubmitButton); } }
public ReadOnlyCollection<IWebElement> Labels
{ get { return Driver.FindElements(ByLabelsDiv); } }
#endregion
#region constructors
public AjaxyControlPage(IWebDriver driver)
: base(driver)
{
}
public AjaxyControlPage()
{
}
#endregion
#region public methods
public void AddGreenLabel(string label)
{
GreenRadio.Click();
NewLabelText.SendKeys(label);
SubmitButton.Click();
}
public void AddRedLabel(string label)
{
RedRadio.Click();
NewLabelText.SendKeys(label);
SubmitButton.Click();
}
public bool IsPageLoaded()
{
return Driver.WaitUntilExists(BySubmitButton);
}
#endregion
#region private methods
#endregion
}
}
|
---
title: Spreadsheet
description:
repourl: "/repo/spreadsheet"
---
|
from collections import Counter
from typing import List
import re
import random
class NgramLM(object):
"""N-Gram Language Model
:param n: Size of the n-gram, e.g., n=2 bigrams
:type n: int
:param nlp: Tokenizer, default spacy.lang.en.English
"""
def __init__(self, n: int=2, nlp: object=None) -> None:
from spacy.lang.en import English
self._tokens = Counter()
self._n_grams = Counter()
self.n = n
if nlp is None:
self.nlp = English()
def process_paragraphs(self, para: List[str]):
"""
>>> ngram = NgramLM()
>>> ngram.process_paragraphs(["xxx xyx xxy", "xxy aaa baa"])
>>> len(ngram._tokens)
7
>>> sum(ngram._tokens.values())
10
>>> len(ngram._n_grams)
8
"""
for p in para:
_ = self.tokenize(p)
self._tokens.update(_)
_ = self.n_grams(_)
self._n_grams.update(_)
def process_file(self, fname: str):
txt = open(fname).read()
para = [x for x in re.finditer(r"\n\n", txt)]
index = [0] + [x.end(0) for x in para]
para = [txt[i:j] for i, j in zip(index, index[1:])]
self.process_paragraphs([x for x in para if len(x) > 2])
def tokenize(self, txt: str, markers: bool=True) -> List[str]:
"""Tokenize a text
:param txt: Text
:type txt: str
:param markers: include starting and ending markers
:type markers: bool
>>> ngram = NgramLM(n=2)
>>> ngram.tokenize("Good morning!")
['<P>', 'good', 'morning', '!', '</P>']
>>> ngram.tokenize("Good morning!", markers=False)
['good', 'morning', '!']
"""
_ = [x.norm_.strip() for x in self.nlp(txt)]
_ = [x for x in _ if len(x)]
if len(_) == 0:
return _
if markers:
_.insert(0, "<P>")
_.append("</P>")
return _
def n_grams(self, tokens: list, n: int=None):
"""Create n-grams from a list of tokens
:param tokens: List of tokens
:param n: Size of the n-gram
:type n: int
>>> ngram = NgramLM(n=3)
>>> tokens = ngram.tokenize("Good morning!")
>>> ngram.n_grams(tokens)
['<P>~good~morning', 'good~morning~!', 'morning~!~</P>']
"""
n = self.n if n is None else n
ww = [tokens[i:] for i in range(n)]
_ = ["~".join(x) for x in zip(*ww)]
return _
def inv_n_grams(self, txt: str):
"""Inverse of n_grams, from the string representation
of an n-gram computes the tokens
:param txt: string representation of n-gram
>>> ngram = NgramLM()
>>> ngram.inv_n_grams('good~morning')
['good', 'morning']
"""
return txt.split("~")
def prob(self, n_gram: str) -> float:
"""Probability P(w_n | w_{1:n-1}) where the string
is represented as n-grams
:param n_gram: string representation of an n-gram
>>> ngram = NgramLM()
>>> ngram.process_paragraphs(["xxx xyx xxy", "xxy aaa baa"])
>>> ngram.prob("xxy~aaa")
0.5
>>> ngram.prob("<P>~aaa")
0.0
"""
c_bi = self._n_grams[n_gram]
a, _ = self.inv_n_grams(n_gram)
c_token = self._tokens[a]
return c_bi / c_token
def sentence_prob(self, txt: str, markers: bool=False) -> float:
"""Probability of a sentence P(w_1, w_2, ..., w_n)
:param txt: text
:param markers: include starting and ending markers
:type markers: bool
>>> ngram = NgramLM()
>>> ngram.process_paragraphs(["xxx xyx xxy", "xyx aaa xxx"])
>>> ngram.sentence_prob("xxx xyx aaa")
0.25
"""
tokens = self.tokenize(txt, markers=markers)
ngrams = self.n_grams(tokens)
p = 1
for x in ngrams:
_ = self.prob(x)
p = p * _
return p
def log_sentence_prob(self, txt: str, markers: bool=True) -> float:
pass
def generate_sentence(self, prev: str=None, n_tokens: int=10, random_size: int=5) -> List[str]:
"""
Generate a sentence starting the text (prev)
:param prev: start of the sentence as a string
:type prev: str
:param n_tokens: Number of tokens to generate
:type n_tokens: int
>>> ngram = NgramLM()
>>> ngram.process_paragraphs(["xxx xyx xxy", "xyx aaa xxx"])
>>> ngram.generate_sentence(prev="aaa", n_tokens=1)
['aaa', 'xxx']
"""
if prev is None:
ll = self.tokenize("-")
prev = ll[:len(ll) // 2]
else:
prev = self.tokenize(prev, markers=False)
n = self.n
for _ in range(n_tokens):
_ = prev[-(self.n - 1):]
ngrams = [self.n_grams(_ + [x])[0] for x in self._tokens.keys()]
count_ngrams = [[x, self._n_grams.get(x, 0)] for x in ngrams]
count_ngrams = [x for x in count_ngrams if x[1] > 0]
if len(count_ngrams) == 0:
break
count_ngrams.sort(key=lambda x: x[1], reverse=True)
cnt = min(len(count_ngrams), random_size)
x = count_ngrams[random.randrange(cnt)][0]
prev.append(self.inv_n_grams(x)[-1])
return prev |
---
"@zioroboco/phyla-task": minor
---
Clone the task generator's own package.json
|
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Nucleo.EventArguments
{
[TestClass]
public class ObjectPropertyChangedEventArgsTest
{
#region " Tests "
[TestMethod]
public void CreatingEventArgsAssignsValuesCorrectly()
{
var obj = new EventArgs();
var e = new ObjectPropertyChangedEventArgs(obj, "Test", 1, 2);
Assert.AreEqual(obj, e.Instance);
Assert.AreEqual("Test", e.PropertyName);
Assert.AreEqual(1, e.OldValue);
Assert.AreEqual(2, e.NewValue);
}
#endregion
}
}
|
import javax.swing.{JFrame, JButton, JMenuItem, JMenuBar, JMenu}
import java.awt.event.{ActionListener, ActionEvent}
class Application{
def saveDocument(file: String, data: String) =
println(s"Save document to file $file")
def openFile(file: String) =
println(s"Open file = $file")
def printDocument() =
println("Printing document ...")
def exit() = {
println("Shutdown systems ...")
System.exit(0)
}
}
trait ICommand extends ActionListener{
// Abstract method
def execute(): Unit
// Concrete method
def actionPerformed(evt: ActionEvent) =
execute()
}
class MainGUI extends JFrame{
private val btSave = new JButton("Save")
private val btOpen = new JButton("Open")
private val btClose = new JButton("Close")
private val menuSave = new JMenuItem("Save")
private val menuOpen = new JMenuItem("Open")
private val menuClose = new JMenuItem("Close")
init()
def init(){
setLayout(new java.awt.FlowLayout())
setTitle("FP - Command Design Pattern for GUIs")
// Can be without this, but explicit is better than implicit!
// this.settSize(300, 276)
setSize(300, 276)
// Add buttons
//==============
this.add(btSave)
this.add(btOpen)
this.add(btClose)
// Add menu bar
//================
val menu = new JMenu("Save")
menu.add(menuOpen)
menu.add(menuSave)
menu.add(menuClose)
val menuBar = new JMenuBar()
menuBar.add(menu)
setJMenuBar(menuBar)
}
def setSaveCommand(cmd: ICommand) = {
btSave.addActionListener(cmd)
menuSave.addActionListener(cmd)
this // return this for method chaining
}
def setOpenCommand(cmd: ICommand) = {
btOpen.addActionListener(cmd)
menuOpen.addActionListener(cmd)
this
}
def setExitCommand(cmd: ICommand) = {
btClose.addActionListener(cmd)
menuClose.addActionListener(cmd)
this
}
} //--- End of class MainGUI ---- //
// Approach 1: The function generates anonymous
// classes implementing the interface.
//
def makeCommand(action: => Unit) =
new ICommand{
def execute() = action
}
def makeOpenComand(app: Application, file: String) =
new ICommand{
def execute() = app.openFile(file)
}
val app = new Application()
val gui = new MainGUI()
gui.setVisible(true)
val cmdSave = makeCommand{ app.saveDocument("file1.txt", "some data") }
val cmdOpen = makeOpenComand(app, "/data/fileTest.csv")
val cmdExit = makeCommand{ println("Fake command. DO NOT EXIT during test.")}
gui.setSaveCommand(cmdSave)
gui.setOpenCommand(cmdOpen)
gui.setExitCommand(cmdExit)
|
import os
import subprocess
import tempfile
import pandas as pd
from dsc import dsc_io
import rpy2.robjects as robj
import rpy2.robjects.vectors as rvec
from rpy2.robjects.packages import importr
from rpy2.robjects.conversion import localconverter
from rpy2.robjects import numpy2ri
numpy2ri.activate()
from rpy2.robjects import pandas2ri
pandas2ri.activate()
'''
A Python wrapper for the dscquery in R
Brute force method which saves a temporary RDS file
and loads it in Python.
See below for a rpy2 implementation,
which does not work with pkl files.
'''
def dscquery(dsc_outdir, targets,
conditions = None,
groups = None,
verbose = True,
sep = "::",
dolr = "##"
):
os_handle, \
rds_file = tempfile.mkstemp(suffix = ".rds")
rscript_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "dscquery.R")
def list_to_string(xlist):
x = sep.join(xlist)
x = x.replace("$", dolr)
return x
cmd = ["Rscript", rscript_file]
cmd += ["--outdir", dsc_outdir]
cmd += ["--rdsfile", rds_file]
cmd += ["--separator", sep]
cmd += ["--cmarker", dolr]
cmd += ["--targets", list_to_string(targets)]
if conditions is not None:
cmd += ["--conditions", list_to_string(conditions)]
if groups is not None:
cmd += ["--groups", list_to_string(groups)]
process = subprocess.Popen(cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE
)
res = process.communicate()
#print("OUTPUT ==>")
print(res[0].decode('utf-8'))
if len(res[1].decode('utf-8')) > 0:
print("")
print("ERROR ==>")
print(res[1].decode('utf-8'))
retcode = process.returncode
dscout = pd.DataFrame(dsc_io.load_rds(rds_file)) if retcode == 0 else None
if os.path.exists(rds_file): os.remove(rds_file)
return dscout
'''
A Python wrapper for the dscquery in R using rpy2.
DEPRECATED.
** reticulate does not work in the R subprocess called from Python. **
Hence, it cannot load pkl files and throws error.
'''
def _dscquery(dsc_output, targets,
conditions = None,
verbose = True
):
dscrutils = importr('dscrutils')
r_targets = rvec.StrVector(targets) if targets is not None else robj.NULL
r_conditions = rvec.StrVector(conditions) if conditions is not None else robj.NULL
dscoutr = dscrutils.dscquery(dsc_output, r_targets,
conditions = r_conditions,
verbose = verbose)
with localconverter(robj.default_converter + pandas2ri.converter):
dscout = robj.conversion.rpy2py(dscoutr)
return dscout
|
/**
* A mocked version of Onedata RPC service.
* For properties description see non-mocked `services/onedata-rpc`
*
* @module services/mocks/onedata-crpc
* @author Michal Borzecki
* @copyright (C) 2018 ACK CYFRONET AGH
* @license This software is released under the MIT license cited in 'LICENSE.txt'.
*/
import OnedataRpc from 'onedata-gui-websocket-client/services/mocks/onedata-rpc';
import authorizers from 'onezone-gui/utils/authorizers-mock';
import { Promise } from 'rsvp';
export default OnedataRpc.extend({
__handle_getSupportedIdPs() {
return Promise.resolve({
idps: authorizers,
});
},
});
|
class WeeklyIterationRecommender
def initialize(user:, sorted_recommendable_videos:)
@user = user
@sorted_recommendable_videos = sorted_recommendable_videos
end
def recommend
suggestor.
next_up.
present do |video|
create_recommendation(video)
enqueue_email_for(video)
end.
blank { log_no_further_recommendations(user) }
end
private
attr_reader :user, :sorted_recommendable_videos
def create_recommendation(video)
ContentRecommendation.create!(
user: user,
recommendable: video,
)
end
def enqueue_email_for(video)
WeeklyIterationMailerJob.perform_later(user.id, video.id)
end
def suggestor
ContentSuggestor.new(
user: user,
recommendables: sorted_recommendable_videos,
recommended: previously_recommended,
)
end
def previously_recommended
ContentRecommendation.
where(user: user).
map(&:recommendable)
end
def log_no_further_recommendations(user)
Rails.logger.warn(
"No further recommendable videos for user: #{user.id} <#{user.email}>",
)
end
end
|
// Copyright © 2018 MG <[email protected]>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package router
import (
"errors"
"time"
"github.com/golang/protobuf/proto"
"github.com/master-g/gouno/api/pb"
"github.com/master-g/gouno/internal/sessions"
"go.uber.org/zap"
)
var (
handlerMap = make(map[pb.Cmd]*Handler)
// ErrorHandlerNotFound indicates there is no such handler for request cmd
ErrorHandlerNotFound = errors.New("no corresponding handler")
// ErrorUnauthed indicates session is not authed yet
ErrorUnauthed = errors.New("session unauthenticated")
// ErrorStreamNotOpen indicates error while opening gRPC stream
ErrorStreamNotOpen = errors.New("stream not opened yet")
)
// Register add request handler to router map
func Register(handlers []*Handler) {
for _, h := range handlers {
log.Info("register handler", zap.String("handler", h.String()))
handlerMap[h.ReqCmd] = h
}
}
// Route routes the incoming packet
func Route(s *sessions.Session, pkg []byte) (resp []byte) {
// mark start time
start := time.Now()
// decrypt if needed
if s.IsFlagEncryptedSet() {
s.Decoder.XORKeyStream(pkg, pkg)
}
// unmarshal header
header := &pb.C2SHeader{}
err := proto.Unmarshal(pkg, header)
if err != nil {
log.Error("invalid header", zap.Error(err))
s.SetFlagKicked()
return nil
}
// ensure legal sequence number
if header.Seq != s.ClientSeq {
log.Info("illegal sequence number", zap.String("sess", s.String()))
s.SetFlagKicked()
return nil
}
s.ClientSeq++
// read CMD
cmdValue := header.Cmd
cmd := pb.Cmd(cmdValue)
// route message to different service by command code
var result []byte
var status int32
var h *Handler
var ok bool
if cmd > pb.Cmd_CMD_COMMON_END {
// forward message
if err = forward(s, cmdValue, header.Body); err != nil {
// error while forwarding
log.Error("error while forwarding cmd", zap.String("cmd", cmd.String()), zap.Error(err))
s.SetFlagKicked()
return nil
}
} else {
// route
if h, ok = handlerMap[cmd]; ok {
// check for authentication
if !h.AuthFree && !s.IsFlagAuthSet() {
// need authenticate first
status = int32(pb.StatusCode_STATUS_UNAUTH)
err = ErrorUnauthed
} else {
result, status, err = h.Handler(s, header)
}
} else {
status = int32(pb.StatusCode_STATUS_UNKNOWN_CMD)
err = ErrorHandlerNotFound
}
// make response and check for errors
if status != int32(pb.StatusCode_STATUS_OK) && h != nil {
if status == int32(pb.StatusCode_STATUS_INTERNAL_ERROR) {
log.Warn("error while handling cmd", zap.String("cmd", h.ReqCmd.String()), zap.Error(err))
} else {
log.Info("unable to handle cmd", zap.String("cmd", h.ReqCmd.String()), zap.Error(err))
}
msg := ""
if err != nil {
msg = err.Error()
}
resp = s.ErrorResponse(int32(h.RespCmd), status, msg)
} else if status != int32(pb.StatusCode_STATUS_OK) && h == nil {
msg := ""
if err != nil {
msg = err.Error()
}
resp = s.ErrorResponse(int32(pb.Cmd_KICK_NOTIFY), status, msg)
} else {
resp = s.Response(int32(h.RespCmd), result)
}
}
// profiling
elapsed := time.Now().Sub(start)
log.Info("REQ processed", zap.String("cmd", cmd.String()), zap.Duration("cost", elapsed))
return resp
}
|
/*****************************************************************************
* Copyright (C) Queen's University Belfast, ECIT, 2016 *
* *
* This file is part of libsafecrypto. *
* *
* This file is subject to the terms and conditions defined in the file *
* 'LICENSE', which is part of this source code package. *
*****************************************************************************/
#include <stdlib.h>
#include <check.h>
#include "safecrypto.h"
#include "safecrypto_private.h"
#include "safecrypto_version.h"
//#include "utils/threading/threading.c"
#include "utils/threading/threadpool.c"
START_TEST(test_threadpool_create_min)
{
sc_threadpool_t* sc_threadpool = NULL;
sc_threadpool = threadpool_create(0, 0);
ck_assert_ptr_eq(sc_threadpool, NULL);
sc_threadpool = threadpool_create(1, 0);
ck_assert_ptr_eq(sc_threadpool, NULL);
sc_threadpool = threadpool_create(0, 1);
ck_assert_ptr_eq(sc_threadpool, NULL);
}
END_TEST
START_TEST(test_threadpool_create_max)
{
sc_threadpool_t* sc_threadpool = NULL;
sc_threadpool = threadpool_create(MAX_THREADS+1, MAX_QUEUE+1);
ck_assert_ptr_eq(sc_threadpool, NULL);
sc_threadpool = threadpool_create(MAX_THREADS+1, MAX_QUEUE);
ck_assert_ptr_eq(sc_threadpool, NULL);
sc_threadpool = threadpool_create(MAX_THREADS, MAX_QUEUE+1);
ck_assert_ptr_eq(sc_threadpool, NULL);
}
END_TEST
START_TEST(test_threadpool_create)
{
int32_t retcode;
sc_threadpool_t* sc_threadpool = NULL;
sc_threadpool = threadpool_create(MAX_THREADS, MAX_QUEUE);
ck_assert_ptr_ne(sc_threadpool, NULL);
retcode = threadpool_destroy(sc_threadpool, THREADPOOL_GRACEFUL_EXIT);
ck_assert_int_eq(retcode, SC_OK);
}
END_TEST
START_TEST(test_threadpool_destroy)
{
int32_t retcode;
sc_threadpool_t* sc_threadpool = NULL;
retcode = threadpool_destroy(sc_threadpool, THREADPOOL_GRACEFUL_EXIT);
ck_assert_int_eq(retcode, SC_NULL_POINTER);
sc_threadpool = threadpool_create(MAX_THREADS, MAX_QUEUE);
ck_assert_ptr_ne(sc_threadpool, NULL);
retcode = threadpool_destroy(sc_threadpool, THREADPOOL_GRACEFUL_EXIT);
ck_assert_int_eq(retcode, SC_OK);
}
END_TEST
#define ARRAY_TEST_SIZE 16384
static int32_t array[ARRAY_TEST_SIZE] = {0};
typedef struct _add_args
{
sc_mutex_t *lock;
int32_t *data;
int32_t start;
int32_t end;
} add_args_t;
void * add_function(void *args)
{
int32_t i;
add_args_t *add_args = (add_args_t *) args;
utils_threading()->mtx_lock(add_args->lock);
for (i=add_args->start; i<add_args->end; i++) {
add_args->data[i] = i;
}
utils_threading()->mtx_unlock(add_args->lock);
struct timespec delay = {0, 100000000};
nanosleep(&delay, NULL);
return NULL;
}
START_TEST(test_threadpool_sum)
{
int32_t i;
sc_threadpool_t* sc_threadpool = NULL;
sc_threadpool = threadpool_create(MAX_THREADS, MAX_QUEUE);
ck_assert_ptr_ne(sc_threadpool, NULL);
sc_mutex_t *func_lock = utils_threading()->mtx_create();
add_args_t args[MAX_THREADS];
for (i=0; i<MAX_THREADS; i++) {
args[i].lock = func_lock;
args[i].data = array;
args[i].start = i * (ARRAY_TEST_SIZE / MAX_THREADS);
args[i].end = (i + 1) * (ARRAY_TEST_SIZE / MAX_THREADS);
int32_t retcode = threadpool_add(sc_threadpool, add_function, &args[i]);
ck_assert_int_eq(retcode, SC_OK);
}
struct timespec delay = {1, 0};
while (SC_OK != threadpool_destroy(sc_threadpool, THREADPOOL_GRACEFUL_EXIT)) {
// Wait for 1 second before destroying
nanosleep(&delay, NULL);
}
utils_threading()->mtx_lock(func_lock);
utils_threading()->mtx_destroy(&func_lock);
// Check the contents of the array to ensure that the worker threads have executed correctly
for (i=0; i<ARRAY_TEST_SIZE; i++) {
ck_assert_int_eq(array[i], i);
}
}
END_TEST
Suite *threadpool_suite(void)
{
Suite *s;
TCase *tc_core, *tc_operate;
s = suite_create("threadpool");
/* Test cases */
tc_core = tcase_create("CORE");
tcase_add_test(tc_core, test_threadpool_create_min);
tcase_add_test(tc_core, test_threadpool_create_max);
tcase_add_test(tc_core, test_threadpool_create);
tcase_add_test(tc_core, test_threadpool_destroy);
suite_add_tcase(s, tc_core);
tc_operate = tcase_create("OPERATION");
tcase_add_test(tc_operate, test_threadpool_sum);
suite_add_tcase(s, tc_operate);
return s;
}
int main(void)
{
int number_failed;
Suite *s;
SRunner *sr;
s = threadpool_suite();
sr = srunner_create(s);
srunner_run_all(sr, CK_NORMAL);
number_failed = srunner_ntests_failed(sr);
srunner_free(sr);
return (number_failed == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
}
|
#!/usr/bin/env bash
pushd .
mkdir -p ~/dev
pushd dev
if [ ! -d or-tools ]; then
wget https://github.com/google/or-tools/archive/v6.7.1.zip --quiet -Omaster.zip \
&& unzip master.zip -d . \
&& rm master.zip \
&& mv or-tools-6.7.1 or-tools
fi
pushd or-tools \
&& export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/gurobi751/linux64/lib \
&& make third_party \
&& make cc UNIX_GLPK_DIR=/usr/local UNIX_GUROBI_DIR=/opt/gurobi751 GUROBI_LIB_VERSION=75 GUROBI_PLATFORM=linux64
popd
popd
popd
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.