text
stringlengths 27
775k
|
---|
#!/bin/bash
# CLONE PHASE
git clone https://github.com/libav/libav.git libav
pushd libav
git checkout -f df744e3
git submodule update --init --recursive
popd
# BUILD PHASE
pushd "libav"
./configure --prefix="$pfx" --enable-static --enable-shared
make -j "$(nproc)"
make install
popd
|
import { gl } from "./Context";
import { TPException } from "./error/TPException";
export class Shader {
program: WebGLProgram;
private uniformLocMap: { [key: string]: WebGLUniformLocation } = {};
private attributeLocMap: { [key: string]: number } = {};
constructor(public name: string) {}
create(
vertexSource: string,
fragmentSource: string,
computeShader: string = null
): void {
let vertex = null;
let fragment = null;
let compute = null;
const program = gl.createProgram();
if (vertexSource != null) {
vertex = this.createShader(gl.VERTEX_SHADER, vertexSource);
gl.attachShader(program, vertex);
}
if (fragmentSource != null) {
fragment = this.createShader(gl.FRAGMENT_SHADER, fragmentSource);
gl.attachShader(program, fragment);
}
if (computeShader != null) {
compute = this.createShader(gl.COMPUTE_SHADER, computeShader);
gl.attachShader(program, compute);
}
gl.linkProgram(program);
if (gl.getError() !== gl.NO_ERROR) {
gl.deleteProgram(this.program);
}
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
const errorLog = gl.getProgramInfoLog(program);
throw new TPException(`Failed to link Shader [${errorLog}]: ${errorLog}`);
}
if (vertex != null) {
gl.detachShader(program, vertex);
gl.deleteShader(vertex);
}
if (fragment != null) {
gl.detachShader(program, fragment);
gl.deleteShader(fragment);
}
if (compute != null) {
gl.detachShader(program, compute);
gl.deleteShader(compute);
}
this.program = program;
}
delete(): void {
gl.deleteProgram(this.program);
}
use(): void {
gl.useProgram(this.program);
}
unuse(): void {
gl.useProgram(null);
}
getAttribLocation(attribute: string): number {
if (this.attributeLocMap[attribute]) {
return this.attributeLocMap[attribute];
}
const loc = gl.getAttribLocation(this.program, attribute);
if (loc) {
this.attributeLocMap[attribute] = loc;
}
return loc;
}
getUniformLocation(uniform: string): WebGLUniformLocation {
if (this.uniformLocMap[uniform]) {
return this.uniformLocMap[uniform];
}
const loc = gl.getUniformLocation(this.program, uniform);
if (loc) {
this.uniformLocMap[uniform] = loc;
}
return loc;
}
private shaderTypeToString(type: GLenum): string {
switch (type) {
case gl.VERTEX_SHADER:
return "Vertex";
case gl.FRAGMENT_SHADER:
return "Fragment";
case gl.COMPUTE_SHADER:
return "Compute";
default:
return "Unknown";
}
}
private createShader(type: GLenum, source: string): WebGLShader {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
const errorLog = gl.getShaderInfoLog(shader);
if (errorLog.length > 0) {
// Error or Warning - We do not tolerate either
const typeString = this.shaderTypeToString(type);
throw new TPException(
`Failed to compile ${this.name}[${typeString}]: ${errorLog}`
);
}
return shader;
}
}
|
OctoDroid
=========
Main features
-------------
###Repository###
* List repositories
* Watch/unwatch repository
* View branches/tags
* View pull requests
* View contributors
* View watchers/networks
* View issues
###User###
* View basic information
* Activity feeds
* Follow/unfollow user
* View public/watched repositories
* View followers/following
* View organizations (if type is user)
* View members (if type is organization)
###Issue###
* List issues
* Filter by label, assignee or milestone
* Create/edit/close/reopen issue
* Comment on issue
* Manage labels
* Manage milestones
###Commit###
* View commit (shows files changed/added/deleted)
* Diff viewer with colorized HTML
* View commit history on each file
###Tree/File browser###
* Browse source code
* View code with syntax hightlighting
###Gist###
* List public gists
* View gist content
###Explore Github###
* Public timeline
* Trending repos (today, week, month, forever)
* GitHub blog
*..and many more*
How to Build Octodroid
----------------------
- Ensure Android SDK platform version 19 and build-tools version 19.1.0 are installed
- Build using Gradle
```bash
./gradlew assembleDebug
```
- To get a full list of available tasks
```bash
./gradlew tasks
```
Open Source Libraries
---------------------
* [android-gif-drawable](https://github.com/koral--/android-gif-drawable)
* [AndroidSVG](https://github.com/BigBadaboom/androidsvg)
* [GitHub Java API](https://github.com/maniac103/egit-github/tree/master/org.eclipse.egit.github.core)
* [HoloColorPicker](https://github.com/LarsWerkman/HoloColorPicker)
* [Material Design Icons](https://github.com/google/material-design-icons)
* [Nine Old Androids](https://github.com/JakeWharton/NineOldAndroids)
* [ProgressFragment](https://github.com/johnkil/Android-ProgressFragment)
* [SmoothProgressBar](https://github.com/castorflex/SmoothProgressBar)
|
<?php
namespace WPME\App;
class PluginPath
{
public $plugin_path;
public $plugin_url;
public function __construct()
{
$this->plugin_path = plugin_dir_path(dirname(__FILE__, 5));
$this->plugin_url = plugin_dir_url(dirname(__FILE__, 5));
}
} |
package com.kylecorry.kravtrainer.domain.punches
enum class PunchType {
Straight,
Hook,
Liver,
Uppercut,
Hammer
} |
package cn.zhaosunny.soap
/**
*
* @author zhaoyang 2021/11/30
*/
interface ISoapInterceptor {
fun log(soapRequest: SoapRequest, response: String)
} |
package services.muretail
import com.google.inject.Inject
import play.api.libs.json.Json
import services.database.{ItemRecord, ItemsRepo}
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
case class Item(firstName: String, lastName: String, petId: Int)
object Item {
implicit val itemFormat = Json.format[Item]
}
class ItemService @Inject()(itemsRepo: ItemsRepo) {
import itemsRepo.dbConfig.driver.api._
def addItem(item: Item): Future[Unit] =
itemsRepo.db.run(itemsRepo.items += ItemRecord(0, item.firstName, item.lastName, item.petId)).map(_=>())
def returnAllItems(): Future[List[Item]] =
itemsRepo.db.run(itemsRepo.items.result).map(_.map(item => Item(item.firstName, item.lastName, item.petId)).toList)
}
|
-- Dummy Sequence module
module Sequence where
import Monad
class (Functor s, MonadPlus s) => Sequence s where
empty :: s a
instance Sequence [] where
empty = []
|
import React from 'react'
import Layout from '../components/layout'
import SEO from '../components/seo'
import TypewriterText from '../components/typewriter_text'
const IndexPage = ({ data }) => (
<Layout>
<SEO title="Home" />
<section className="section_container">
<TypewriterText text={'Hi!'} />
</section>
</Layout>
)
export default IndexPage
export const pageQuery = graphql`
query {
allWpPost(sort: { fields: [date] }) {
nodes {
title
excerpt
slug
}
}
}
`
|
String locationQuery = """
query LocationQuery {
regions {
id
name
locations {
id
name
}
}
}
""";
String userByPhone = """
query customerByPhone(\$phone: String) {
customerByPhone(phone: \$phone) {
id
firstName
lastName
phone
location {
routeID
region {
hubID
}
}
}
}
""";
String products = """
query Products(\$hubID: ID) {
hub(id: \$hubID) {
products {
id
name
price
photoURL
}
}
}
""";
String transactionByDate = """
query transactionByDate(\$customerID: String, \$startDate: Date, \$endDate: Date) {
transactionsByCustomerIDAndDate(customerID: \$customerID, startDate: \$startDate, endDate: \$endDate) {
isDebit
subTotal
comment
date
}
}
""";
String customerHome = """
query customerHome(\$id: ID){
customer(id: \$id) {
ordersForToday {
id
items
status
deliveryDate
}
subscriptions{
id
items
nextDeliveryDate
frequency
endDate
status
}
}
}
""";
String past30DaysOneTimeOrders = """
query past30DaysOneTimeOrders(\$customerID: String, \$startDate: Date, \$endDate: Date) {
OneTimeOrdersByCustomerIDAndDate(customerID: \$customerID, startDate: \$startDate, endDate: \$endDate) {
id
items
deliveryDate
status
}
}
""";
String contactQuery = """
query contactQuery(\$customerID: ID) {
customer(id: \$customerID) {
location {
route {
executive {
firstName
lastName
photoURL
phone
}
}
region {
hub {
hubName
address
mobileNo
email
}
}
}
}
}
""";
String addressQuery = """
query addressQuery(\$id: ID){
customer(id: \$id) {
addresses {
id
name
}
}
}
""";
String walletById = """
query customer(\$id: ID) {
customer(id: \$id) {
wallet
}
}
""";
|
# Release 0.8.7
### Added
- Support Isolating users in namespace
- Support --label option
- Support annotations/nodeSelector/tolerations in tensorflow serving jobs
### Fixed
- Fix the bug that allocated gpus is failed of command 'arena top node'
|
program main
implicit none
type foobar
real(8),allocatable,dimension(:) :: foo, bar
end type
type(foobar) :: this
integer, parameter :: n = 1024
allocate(this%foo(n), this%bar(n))
this%foo = 1d0
!$omp target enter data map(to:this%foo) map(alloc:this%bar)
!$omp target
this%bar = 3d0
!$omp end target
!$omp target update from(this%bar)
if (all(this%foo < this%bar)) then
print *,"Success!"
else
stop 1
endif
end program main
|
import fs from 'fs';
import path from 'path';
import cheerio from 'cheerio';
import axios, { Axios, AxiosInstance } from 'axios';
export class Scraper {
baseUrl: string;
axiosInst: AxiosInstance;
constructor() {
this.baseUrl = 'https://cookierunkingdom.fandom.com'; // URL we're scraping
this.axiosInst = axios.create(); // Create a new Axios Instance
}
async getCharactersUrls(): Promise<string[]> {
const $ = cheerio.load((await this.axiosInst.get(`${this.baseUrl}/wiki/List_of_Cookies`)).data);
const allCharacersUrls: string[] = [];
$('.wikitable > tbody th > a:not(.image)').each((_, row) =>{
const url = $(row).attr('href');
if (url) { allCharacersUrls.push(url); }
})
return allCharacersUrls;
}
async getCharacter(url: string): Promise<Types.Character> {
const $ = cheerio.load((await this.axiosInst.get(`${this.baseUrl}${url}`)).data);
const character:Types.Character = {
name: $(".page-header__title#firstHeading").text().replace(/\t|\n/g, ''),
type: $("[data-source='role']").children().last().text() as Types.CharacterType,
imagePath: $(".pi-image-thumbnail").attr('src')?.replace(/\/revision\/.*/, "") || '',
rarity: $("[data-source='rarity'] img").attr('alt')?.replace(/"/g, '') as Types.CharacterRarity,
position: $("td[data-source='position']").text() as Types.CharacterPos
};
return character;
}
async getRarityChances(): Promise<Array<Types.RarityChances>> {
let rarities: Array<Types.RarityChances> = [];
const $ = cheerio.load((await this.axiosInst.get(`${this.baseUrl}/wiki/Gacha`)).data);
const table = $('.mw-parser-output > .wikitable').last();
$(table).find("tr").each((i, e) => {
if (i != 0) {
const element = $(e);
const children = element.children().toArray();
rarities.push({
rarity: $($(children[0]).find('a')).attr('title')?.replace(" Cookie", "") as Types.CharacterRarity,
cookie: Number($(children[1]).text().replace("%", "")),
soulstone: Number($(children[2]).text().replace("%", ""))
} as Types.RarityChances)
}
})
return rarities;
}
async download (url:string, filepath:string): Promise<boolean> {
const writer = fs.createWriteStream(path.resolve(filepath))
const response = await axios({
url,
method: 'GET',
responseType: 'stream'
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on('finish', resolve)
writer.on('error', reject)
})
}
}
export namespace Types {
export interface Character {
name: string;
type: CharacterType | null;
imagePath: string;
rarity: CharacterRarity | null;
position: CharacterPos | null;
}
export type CharacterPos = "Rear" | "Middle" | "Front";
export type CharacterType =
| "Ambush"
| "Bomber"
| "Charge"
| "Defense"
| "Healing"
| "Magic"
| "Ranged"
| "Support";
// enum CharacterType {
// Ambush,
// Bomber,
// Charge,
// Defense,
// Healing,
// Magic,
// Ranged,
// Support
// }
export type CharacterRarity =
| "Special"
| "Common"
| "Rare"
| "Epic"
| "Legendary"
| "Ancient";
export function getProbabilityFromRarity(rarity: CharacterRarity): number {
switch (rarity) {
default:
case 'Special':
return 0;
break;
case 'Common':
return 0.4197;
break;
case 'Rare':
return 0.3766;
break;
case 'Epic':
return 0.1929;
break;
case 'Legendary':
return 0.0036;
break;
case 'Ancient':
return 0.0072;
break;
}
}
export interface RarityChances {
rarity: CharacterRarity | null;
cookie: number;
soulstone: number;
}
// export enum CharacterRarity {
// Special = 0.00,
// Common = 0.4197,
// Rare = 0.3766,
// Epic = 0.1929,
// Legendary = 0.0036,
// Ancient = 0.0072
// }
} |
## Images
Note:
- huge % of typical webpage
- Vox Media brands love images
- an area of passion for me
- so I got to work optimizing images |
require 'rails-perfmon/request_collector'
class RailsPerfmon::Railtie < Rails::Railtie
config.after_initialize do
if RailsPerfmon.configuration.service_url && RailsPerfmon.configuration.api_key
RailsPerfmon::RequestCollector.new
end
end
end
|
<?php
use PHPUnit\Framework\TestCase;
class BucketTest extends TestCase {
public function setUp() {
$this->bucket = new LeakyBucketRateLimiter\Bucket();
}
public function testGetCapacity() {
$this->bucket->setCapacity(20);
$cap = $this->bucket->getCapacity();
$this->assertEquals($cap, 20);
}
public function testGetDrips() {
$this->assertEquals($this->bucket->getDrips(), 0);
}
public function testSetDrips() {
$this->bucket->setDrips(45);
$this->assertEquals($this->bucket->getDrips(), 45);
}
public function testSetTime() {
$time = microtime(true);
$this->bucket->setTime($time);
$this->assertEquals($this->bucket->getTime(), $time);
}
public function testFill() {
$drips = $this->bucket->getDrips();
$this->bucket->fill();
$drips++;
$this->assertEquals($drips, $this->bucket->getDrips());
$this->bucket->fill(15);
$this->assertEquals($drips += 15, $this->bucket->getDrips());
}
public function testLeakRate() {
$this->bucket->setLeakRate(1.33);
$this->assertEquals($this->bucket->getLeakRate(), 1.33);
}
public function testLeak() {
// TODO: Write test method for leak
}
}
|
敦煌曲词笔记
id: 4d762530b8c8417eb085f418e05daab4
created_time: 2021-05-22T09:02:10.206Z
updated_time: 2021-06-27T04:58:11.244Z
user_created_time: 2021-05-22T09:02:10.206Z
user_updated_time: 2021-05-22T09:02:10.206Z
encryption_cipher_text:
encryption_applied: 0
parent_id: 2bfd769f5d9e4a18b0b710f2bf10b818
is_shared: 0
share_id:
type_: 2 |
package cn.qumiandan.saleman.impl;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import com.alibaba.dubbo.config.annotation.Reference;
import com.alibaba.dubbo.config.annotation.Service;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.google.common.collect.Lists;
import cn.qumiandan.address.api.IAddressService;
import cn.qumiandan.address.enums.AddressLevelEnum;
import cn.qumiandan.address.vo.AddressVO;
import cn.qumiandan.bankinfo.api.IBankInfoService;
import cn.qumiandan.bankinfo.vo.BankInfoVO;
import cn.qumiandan.common.exception.QmdException;
import cn.qumiandan.constant.ParentDataEnum;
import cn.qumiandan.constant.StatusEnum;
import cn.qumiandan.payaccount.api.IBankCardService;
import cn.qumiandan.payaccount.api.IPayAccountService;
import cn.qumiandan.payaccount.enums.AccountTypeEnum;
import cn.qumiandan.payaccount.enums.WithdrawEnum;
import cn.qumiandan.payaccount.vo.BankCardVO;
import cn.qumiandan.payaccount.vo.PayAccountVO;
import cn.qumiandan.role.api.IRoleService;
import cn.qumiandan.role.api.IUserRoleService;
import cn.qumiandan.role.vo.AddUserRoleVO;
import cn.qumiandan.role.vo.RoleVO;
import cn.qumiandan.saleman.api.ISalemanService;
import cn.qumiandan.saleman.entity.Saleman;
import cn.qumiandan.saleman.enums.SalemanTypeEnums;
import cn.qumiandan.saleman.mapper.SalemanMapper;
import cn.qumiandan.saleman.vo.AddSalmanVO;
import cn.qumiandan.saleman.vo.SalemanAndRoleVO;
import cn.qumiandan.saleman.vo.SalemanAndUserParamVO;
import cn.qumiandan.saleman.vo.SalemanAndUserVO;
import cn.qumiandan.saleman.vo.SalemanVO;
import cn.qumiandan.saleman.vo.ShopAgentVO;
import cn.qumiandan.saleman.vo.UpdateSalmanVO;
import cn.qumiandan.shop.api.IShopService;
import cn.qumiandan.shop.vo.ShopBasicVO;
import cn.qumiandan.system.api.ISysPropertiesService;
import cn.qumiandan.system.enums.SysPropertiresEnums;
import cn.qumiandan.system.vo.SysPropertiesVO;
import cn.qumiandan.ticket.api.IQualificationTicketService;
import cn.qumiandan.ticket.enums.TicketStatusEnums;
import cn.qumiandan.ticket.vo.TicketNumVO;
import cn.qumiandan.user.api.IUserService;
import cn.qumiandan.user.vo.UserVO;
import cn.qumiandan.utils.AssertUtil;
import cn.qumiandan.utils.CopyBeanUtil;
import cn.qumiandan.utils.ObjectUtils;
import lombok.extern.slf4j.Slf4j;
/**
* 系统管理员实现类
*
* @author lrj
*
*/
@Slf4j
@Component
@Service(interfaceClass = ISalemanService.class)
public class SalemanServiceImpl implements ISalemanService {
@Autowired
private SalemanMapper salemanMapper;
@Autowired
private IUserRoleService userRoleService;
@Autowired
private IUserService userService;
@Reference
private IAddressService addressService;
@Reference
private ISysPropertiesService sysPropertiesService;
@Reference
private IPayAccountService payAccountService;
@Reference
private IBankInfoService bankInfoService;
@Reference
private IBankCardService bankCardService;
@Autowired
private IQualificationTicketService ticketService;
@Autowired
private IRoleService roleService;
@Reference
private IShopService shopService;
/**
* 添加业务员、市代理、省代理
*/
@Override
@Transactional(rollbackFor = QmdException.class)
public SalemanVO addSaleman(AddSalmanVO addSalmanVO) {
UserVO userVO = userService.getUserByUsername(addSalmanVO.getUserName());
if (userVO == null) {
log.error("添加业务员、市代理、省代理 -->该用户不为平台用户,请先注册为平台用户 operator:" + addSalmanVO.getCreateId());
throw new QmdException("该用户不为平台用户,请先注册为平台用户");
}
// 判断该用户是否已为平台业务员
Saleman salemanByUserId = salemanMapper.selectOne(new QueryWrapper<Saleman>()
.eq("user_id", userVO.getId()).eq("status", StatusEnum.normal.getCode()));
if (salemanByUserId != null) {
log.error("添加业务员、市代理、省代理 -->该用户已为平台业务员或市代理或省代理 operator:" + addSalmanVO.getCreateId());
throw new QmdException("该用户已为平台业务员或市代理或省代理 ");
}
addSalmanVO.setUserId(userVO.getId());
addSalmanVO.setCreateDate(new Date());
SalemanVO salemanVO = new SalemanVO();
switch (addSalmanVO.getType()) {
case 1:
addSalmanVO.setAccountType(AccountTypeEnum.Saleman.getCode());
salemanVO = addSalemanInfo(addSalmanVO);
break;
case 2:
addSalmanVO.setAccountType(AccountTypeEnum.CountryAgent.getCode());
salemanVO = addCountryAgent(addSalmanVO);
break;
case 3:
addSalmanVO.setAccountType(AccountTypeEnum.CityAgent.getCode());
salemanVO = addCityAgent(addSalmanVO);
break;
case 4:
addSalmanVO.setAccountType(AccountTypeEnum.ProvinceAgent.getCode());
salemanVO = addProAgent(addSalmanVO);
break;
default:
log.error("添加业务员 、省代理、市代理-->添加类型参数错误:" + addSalmanVO.getType());
throw new QmdException("添加类型参数错误");
}
// 添加账户
addPayAccount(addSalmanVO);
return salemanVO;
}
/**
* 添加账户
*/
@Transactional(rollbackFor = QmdException.class)
private void addPayAccount(AddSalmanVO addSalmanVO) {
Date now = new Date();
// 添加业务员账户
BigDecimal zero = new BigDecimal(0);
PayAccountVO account = new PayAccountVO();
account.setName(addSalmanVO.getUserName());
account.setType(addSalmanVO.getAccountType());
account.setUserId(addSalmanVO.getUserId());
account.setBalance(zero);
account.setSettBalance(zero);
account.setUnbalance(zero);
account.setSecurityMoney(zero);
account.setWithdrawStatus(WithdrawEnum.ABLE.getCode());
account.setStatus(StatusEnum.normal.getCode());
account.setCreateDate(now);
account.setCreateId(addSalmanVO.getCreateId());
PayAccountVO addPayAccount = payAccountService.addPayAccount(account);
// 创建银行卡信息
BankInfoVO bankInfoVO = bankInfoService.getBankInfoById(addSalmanVO.getBankId());
BankCardVO bankCardVO = new BankCardVO();
bankCardVO.setAccountId(addPayAccount.getId());
bankCardVO.setBankName(bankInfoVO.getSubBankName());
bankCardVO.setBankCardNo(addSalmanVO.getBankCardNo());
bankCardVO.setBankCardHolder(addSalmanVO.getBankCardHolder());
bankCardVO.setBankMobile(addSalmanVO.getBankMobile());
bankCardVO.setCardType(addSalmanVO.getCardType());
bankCardVO.setUseTimes(1);
bankCardVO.setSort(1);
bankCardVO.setStatus(StatusEnum.normal.getCode());
bankCardVO.setCreateDate(now);
bankCardVO.setCreateId(addSalmanVO.getCreateId());
bankCardService.addBankCard(bankCardVO);
}
/**
* 添加市代理
*/
@Transactional(rollbackFor = QmdException.class)
private SalemanVO addCityAgent(AddSalmanVO addSalmanVO) {
// 查询市代理
Saleman cityAgent = salemanMapper
.selectOne(new QueryWrapper<Saleman>().eq("city_code", addSalmanVO.getCityCode())
.eq("status", StatusEnum.normal.getCode()).eq("type", SalemanTypeEnums.CityAgent.getCode()));
if (cityAgent != null) {
log.error("该市代理已存在:" + addSalmanVO.getCityCode());
throw new QmdException("该市代理已存在");
}
// 给用户添加市代理角色
SysPropertiesVO sysPropertiesInfoById = sysPropertiesService
.getSysPropertiesInfoById(SysPropertiresEnums.CityAgent.getId());
AddUserRoleVO userRoleVO = new AddUserRoleVO();
userRoleVO.setSysUserId(addSalmanVO.getUserId());
userRoleVO.setSysRoleId(Long.parseLong(sysPropertiesInfoById.getValue()));
userRoleVO.setStatus(StatusEnum.normal.getCode());
AddUserRoleVO addUserRoleVO = userRoleService.addUserRole(userRoleVO);
if (addUserRoleVO == null) {
log.error("添加市代理角色--> 用户添加市代理角色失败 operator:" + addSalmanVO.getCreateId());
throw new QmdException("给用户添加市代理角色失败");
}
addSalmanVO.setUserRoleId(userRoleVO.getId());
// 查询省代理,省代理不为空则将addSalmanVO中Pro_user_id设置为省代理的userId
/*
* Saleman proAgent = salemanMapper.selectOne(new
* QueryWrapper<Saleman>().eq("pro_code", addSalmanVO.getProCode())
* .eq("status", StatusEnum.normal.getCode()).eq("type",
* SalemanTypeEnums.ProAgent.getCode())); if (proAgent != null) {
* addSalmanVO.setProUserId(proAgent.getUserId()); }
*/
// 添加市代理
Saleman saleman = CopyBeanUtil.copyBean(addSalmanVO, Saleman.class);
if (!checkCUD(salemanMapper.insert(saleman))) {
log.error("添加市代理 -->添加市代理失败,受影响行数不为1:" + addSalmanVO.getCreateId());
throw new QmdException("添加市代理失败");
}
// 将业务员的city_user_id改为对应市代理user_id
// UpdateWrapper<Saleman> updateWrapper = new UpdateWrapper<Saleman>();
// updateWrapper.set("city_user_id", addSalmanVO.getUserId());
// 如果省代理不为空,将业务员的pro_user_id改为对应省代理user_id
/*
* if (proAgent != null) { updateWrapper.set("pro_user_id",
* proAgent.getUserId());
*
* }
*/
/*
* updateWrapper.eq("pro_code", addSalmanVO.getProCode()).eq("type",
* SalemanTypeEnums.Saleman.getCode()); salemanMapper.update(new Saleman(),
* updateWrapper);
*/
return CopyBeanUtil.copyBean(saleman, SalemanVO.class);
}
/**
* 添加省代理
*/
@Transactional(rollbackFor = QmdException.class)
private SalemanVO addProAgent(AddSalmanVO addSalmanVO) {
// 查询省代理
Saleman proAgent = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("pro_code", addSalmanVO.getProCode())
.eq("status", StatusEnum.normal.getCode()).eq("type", SalemanTypeEnums.ProAgent.getCode()));
if (proAgent != null) {
log.error("该省代理已存在:" + addSalmanVO.getProCode());
throw new QmdException("该省代理已存在");
}
// 给用户添加省代理角色
SysPropertiesVO sysPropertiesInfoById = sysPropertiesService
.getSysPropertiesInfoById(SysPropertiresEnums.ProvinceAgent.getId());
AddUserRoleVO userRoleVO = new AddUserRoleVO();
userRoleVO.setSysUserId(addSalmanVO.getUserId());
userRoleVO.setSysRoleId(Long.parseLong(sysPropertiesInfoById.getValue()));
userRoleVO.setStatus(StatusEnum.normal.getCode());
AddUserRoleVO addUserRoleVO = userRoleService.addUserRole(userRoleVO);
if (addUserRoleVO == null) {
log.error("添加省代理 --> 用户添加省代理员角色失败 operator:" + addSalmanVO.getCreateId());
throw new QmdException("给用户添加省代理角色失败");
}
addSalmanVO.setUserRoleId(userRoleVO.getId());
// 添加省代理
Saleman saleman = CopyBeanUtil.copyBean(addSalmanVO, Saleman.class);
if (!checkCUD(salemanMapper.insert(saleman))) {
log.error("添加省代理 -->添加省代理失败,受影响行数不为1:" + addSalmanVO.getCreateId());
throw new QmdException("添加省代理失败");
}
// 将该省的市代理、业务员的pro_userId改为该省代理userId
/*
* salemanMapper.update(new Saleman(), new
* UpdateWrapper<Saleman>().set("pro_user_id", addSalmanVO.getUserId())
* .eq("pro_code", addSalmanVO.getProCode()).in("type",
* Lists.newArrayList(SalemanTypeEnums.CityAgent.getCode(),
* SalemanTypeEnums.Saleman.getCode())));
*/
return CopyBeanUtil.copyBean(saleman, SalemanVO.class);
}
/**
* 添加业务员
*
* @param addSalmanVO
* @return
*/
@Transactional(rollbackFor = QmdException.class)
private SalemanVO addSalemanInfo(AddSalmanVO addSalmanVO) {
// 查询省代理
/*
* Saleman proAgent = salemanMapper.selectOne(new
* QueryWrapper<Saleman>().eq("pro_code", addSalmanVO.getProCode())
* .eq("status", StatusEnum.normal.getCode()).eq("type",
* SalemanTypeEnums.ProAgent.getCode())); if (proAgent != null) {
* addSalmanVO.setProUserId(proAgent.getUserId()); }
*/
// web端添加时省市代理和业务员都为同一接口,因此在此验证citycode不能为空
if (addSalmanVO.getCityCode() == null || addSalmanVO.getCityCode().equals("")) {
log.error("添加业务员 -->城市code不能为空:" + addSalmanVO.getCreateId());
throw new QmdException("城市code不能为空");
}
// web端添加时省市代理和业务员都为同一接口,因此在此验证countryCode不能为空
if (addSalmanVO.getCountryCode() == null || addSalmanVO.getCountryCode().equals("")) {
log.error("添加业务员 -->区县code不能为空:" + addSalmanVO.getCreateId());
throw new QmdException("区县code不能为空");
}
// 查询市代理
/*
* Saleman cityAgent = salemanMapper .selectOne(new
* QueryWrapper<Saleman>().eq("city_code", addSalmanVO.getCityCode())
* .eq("status", StatusEnum.normal.getCode()).eq("type",
* SalemanTypeEnums.CityAgent.getCode())); if (proAgent != null) {
* addSalmanVO.setProUserId(cityAgent.getUserId()); }
*/
// 给用户添加业务员角色
SysPropertiesVO sysPropertiesInfoById = sysPropertiesService
.getSysPropertiesInfoById(SysPropertiresEnums.Saleman.getId());
AddUserRoleVO userRoleVO = new AddUserRoleVO();
userRoleVO.setSysUserId(addSalmanVO.getUserId());
userRoleVO.setSysRoleId(Long.parseLong(sysPropertiesInfoById.getValue()));
userRoleVO.setStatus(StatusEnum.normal.getCode());
AddUserRoleVO addUserRoleVO = userRoleService.addUserRole(userRoleVO);
if (addUserRoleVO == null) {
log.error("添加业务员 --> 用户添加业务员角色失败 operator:" + addSalmanVO.getCreateId());
throw new QmdException("给用户添加业务员角色失败");
}
addSalmanVO.setUserRoleId(userRoleVO.getId());
// 在业务员表里插入数据
Saleman saleman = CopyBeanUtil.copyBean(addSalmanVO, Saleman.class);
if (saleman.getParentId() == null) {
saleman.setParentId(ParentDataEnum.RootId.getCode());
}
if (!checkCUD(salemanMapper.insert(saleman))) {
log.error("添加业务员 -->添加业务员失败,受影响行数不为1:" + addSalmanVO.getCreateId());
throw new QmdException("添加业务员失败");
}
SalemanVO salemanVO = CopyBeanUtil.copyBean(saleman, SalemanVO.class);
return salemanVO;
}
/**
* 添加区级代理
*
* @param addSalmanVO
* @return
*/
@Transactional(rollbackFor = QmdException.class)
private SalemanVO addCountryAgent(AddSalmanVO addSalmanVO) {
// web端添加时省市区代理和业务员都为同一接口,因此在此验证citycode不能为空
if (addSalmanVO.getCityCode() == null || addSalmanVO.getCityCode().equals("")) {
log.error("添加区代理 -->城市code不能为空:" + addSalmanVO.getCreateId());
throw new QmdException("城市code不能为空");
}
// web端添加时省市区代理和业务员都为同一接口,因此在此验证countryCode不能为空
if (addSalmanVO.getCountryCode() == null || addSalmanVO.getCountryCode().equals("")) {
log.error("添加区代理 -->区县code不能为空:" + addSalmanVO.getCreateId());
throw new QmdException("区县code不能为空");
}
// 给用户添加区代理角色
SysPropertiesVO sysPropertiesInfoById = sysPropertiesService
.getSysPropertiesInfoById(SysPropertiresEnums.CountryAgent.getId());
AddUserRoleVO userRoleVO = new AddUserRoleVO();
userRoleVO.setSysUserId(addSalmanVO.getUserId());
userRoleVO.setSysRoleId(Long.parseLong(sysPropertiesInfoById.getValue()));
userRoleVO.setStatus(StatusEnum.normal.getCode());
AddUserRoleVO addUserRoleVO = userRoleService.addUserRole(userRoleVO);
if (addUserRoleVO == null) {
log.error("添加区代理 --> 用户添加区代理角色失败 operator:" + addSalmanVO.getCreateId());
throw new QmdException("给用户添加区代理角色失败");
}
addSalmanVO.setUserRoleId(userRoleVO.getId());
// 在业务员表里插入数据
Saleman saleman = CopyBeanUtil.copyBean(addSalmanVO, Saleman.class);
if (saleman.getParentId() == null) {
saleman.setParentId(ParentDataEnum.RootId.getCode());
}
if (!checkCUD(salemanMapper.insert(saleman))) {
log.error("添加区代理 -->添加区代理失败,受影响行数不为1:" + addSalmanVO.getCreateId());
throw new QmdException("添加区代理失败");
}
SalemanVO salemanVO = CopyBeanUtil.copyBean(saleman, SalemanVO.class);
return salemanVO;
}
/**
* 修改系统管理员
*
* @param managerVO
* @return
*/
@Override
public int updateSaleman(SalemanVO managerVO) {
Saleman manager = salemanMapper.selectById(managerVO.getId());
if (manager == null) {
throw new QmdException("该人员信息不存在");
}
Saleman updateManager = CopyBeanUtil.copyBean(managerVO, Saleman.class);
int i = salemanMapper.updateById(updateManager);
if (i != 1) {
throw new QmdException("修改人员信息失败");
}
return i;
}
/**
* 删除管理员
*
* @param id
* @return
*/
@Override
public int deleteSaleman(Long id) {
int i = salemanMapper.deleteById(id);
if (i != 1) {
throw new QmdException("删除人员信息失败");
}
return i;
}
/**
* 查询管理员信息
*
* @param id
* @return
*/
@Override
public SalemanVO getSalemanById(Long id) {
Saleman manager = salemanMapper.selectById(id);
if (manager == null) {
return null;
}
SalemanVO managerVO = CopyBeanUtil.copyBean(manager, SalemanVO.class);
return managerVO;
}
@Override
public SalemanVO getSalemanByUserId(Long userId) {
AssertUtil.isNull(userId, "ManagerServiceImpl|getManagerByUserId|传入参数userId为空");
Saleman manager = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("user_id", userId)
.eq("status", StatusEnum.normal.getCode()));
if (Objects.nonNull(manager)) {
return CopyBeanUtil.copyBean(manager, SalemanVO.class);
}
return null;
}
/**
* 根据业务员手机号查询业务员信息
*/
@Override
public SalemanVO getSalemanByUserName(String userName) {
UserVO userVO = userService.getUserByUsername(userName);
if (userVO == null) {
throw new QmdException("用户不存在");
}
Saleman manager = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("user_id", userVO.getId())
.eq("status", StatusEnum.normal.getCode()));
if (Objects.nonNull(manager)) {
return CopyBeanUtil.copyBean(manager, SalemanVO.class);
}
return null;
}
/**
* 总后台查询业务员列表
*
* @param params
* @return
*/
@Override
public PageInfo<SalemanAndUserVO> querySalemanAndUser(SalemanAndUserParamVO params) {
PageHelper.startPage(params.getPageNum(), params.getPageSize());
List<SalemanAndUserVO> list = salemanMapper.querySalemanAndUser(params);
if (ObjectUtils.isEmpty(list)) {
return null;
}
List<Integer> proCodeList = new ArrayList<Integer>();
List<Integer> cityCodeList = new ArrayList<Integer>();
List<Integer> countryCodeList = new ArrayList<Integer>();
List<Long> userIdList = new ArrayList<>();
for (SalemanAndUserVO andUserVO : list) {
// 组装省代理省code
if (andUserVO.getType().equals(SalemanTypeEnums.ProAgent.getCode()) && andUserVO.getProCode() != null) {
proCodeList.add(Integer.parseInt(andUserVO.getProCode()));
}
// 组装市代理市code
if (andUserVO.getType().equals(SalemanTypeEnums.CityAgent.getCode()) && andUserVO.getCityCode() != null) {
cityCodeList.add(Integer.parseInt(andUserVO.getCityCode()));
}
// 组装区code
if ((andUserVO.getType().equals(SalemanTypeEnums.Saleman.getCode())
|| andUserVO.getType().equals(SalemanTypeEnums.CountryAgent.getCode()))
&& andUserVO.getCountryCode() != null) {
countryCodeList.add(Integer.parseInt(andUserVO.getCountryCode()));
}
userIdList.add(andUserVO.getUserId());
}
// 根据省code集合查询省信息
List<AddressVO> addressByProCodeList = new ArrayList<>();
if (!ObjectUtils.isEmpty(proCodeList)) {
addressByProCodeList = addressService.getAddressByProCodeList(proCodeList);
}
// 根据市code集合查询市
List<AddressVO> addressByCityCodeList = new ArrayList<>();
if (!ObjectUtils.isEmpty(cityCodeList)) {
addressByCityCodeList = addressService.getAddressByCityCodeList(cityCodeList);
}
// 根据区code集合查询区
List<AddressVO> addressByDistrictCodeList = new ArrayList<>();
if (!ObjectUtils.isEmpty(countryCodeList)) {
addressByDistrictCodeList = addressService.getAddressByDistrictCodeList(countryCodeList);
}
// 查询资格券数量
List<TicketNumVO> tiketNumByUserIdList = ticketService.getTiketNumByUserIdList(userIdList,
TicketStatusEnums.UNUSE.getCode());
for (SalemanAndUserVO andUserVO : list) {
// 组装省代理省名
if (!ObjectUtils.isEmpty(addressByProCodeList)) {
for (AddressVO addressVO : addressByProCodeList) {
if (andUserVO.getProCode() != null && addressVO.getProvinceCode() != null
&& andUserVO.getProCode().equals(addressVO.getProvinceCode().toString())) {
andUserVO.setProName(addressVO.getProvinceName());
}
}
}
// 组装市代理省、市名
if (!ObjectUtils.isEmpty(addressByCityCodeList)) {
for (AddressVO addressVO : addressByCityCodeList) {
if (andUserVO.getCityCode() != null && addressVO.getCityCode() != null
&& andUserVO.getCityCode().equals(addressVO.getCityCode().toString())) {
andUserVO.setProName(addressVO.getProvinceName());
andUserVO.setCityName(addressVO.getCityName());
}
}
}
// 组装市代理省、市、区 名
if (!ObjectUtils.isEmpty(addressByDistrictCodeList)) {
for (AddressVO addressVO : addressByDistrictCodeList) {
if (andUserVO.getCountryCode() != null && addressVO.getDistrictCode() != null
&& andUserVO.getCountryCode().equals(addressVO.getDistrictCode().toString())) {
andUserVO.setProName(addressVO.getProvinceName());
andUserVO.setCityName(addressVO.getCityName());
andUserVO.setCountryName(addressVO.getDistrictName());
}
}
}
// 查询用户可用资格券数量
if (!ObjectUtils.isEmpty(tiketNumByUserIdList)) {
for (TicketNumVO numVO : tiketNumByUserIdList) {
if (numVO.getUserId().equals(andUserVO.getUserId())) {
andUserVO.setTicketNum(numVO.getTicketNum());
}
}
}
}
PageInfo<SalemanAndUserVO> pageInfo = new PageInfo<>(list);
return pageInfo;
}
@Override
public ShopAgentVO getAgentAndSalemanByShopId(Long shopId) {
ShopBasicVO shopBasicInfoByShopId = shopService.getShopBasicById(shopId);
if (shopBasicInfoByShopId == null) {
return null;
}
List<SalemanVO> agentAndSalemanByCode = salemanMapper.getAgentAndSalemanByCode(
shopBasicInfoByShopId.getProCode(), shopBasicInfoByShopId.getCityCode(),
shopBasicInfoByShopId.getCountyCode(), shopBasicInfoByShopId.getSalemanId());
if (ObjectUtils.isEmpty(agentAndSalemanByCode)) {
return null;
}
ShopAgentVO shopAgentVO = new ShopAgentVO();
for (SalemanVO salemanVO : agentAndSalemanByCode) {
if (salemanVO.getType() != null) {
if (salemanVO.getType().equals(SalemanTypeEnums.ProAgent.getCode())) {
shopAgentVO.setProAgent(salemanVO);
}
if (salemanVO.getType().equals(SalemanTypeEnums.CityAgent.getCode())) {
shopAgentVO.setCityAgent(salemanVO);
}
if (salemanVO.getType().equals(SalemanTypeEnums.CountryAgent.getCode())) {
shopAgentVO.setCountryAgent(salemanVO);
}
if (salemanVO.getType().equals(SalemanTypeEnums.Saleman.getCode())) {
shopAgentVO.setSaleman(salemanVO);
}
}
}
return shopAgentVO;
}
@Override
public SalemanVO getAgentByCodeAndType(String code, Byte type) {
Saleman saleman = new Saleman();
switch (type) {
case 4:
saleman = salemanMapper.selectOne(
new QueryWrapper<Saleman>().eq("pro_code", code).eq("type", SalemanTypeEnums.ProAgent.getCode())
.eq("status", StatusEnum.normal.getCode()));
break;
case 3:
saleman = salemanMapper.selectOne(
new QueryWrapper<Saleman>().eq("city_code", code).eq("type", SalemanTypeEnums.CityAgent.getCode())
.eq("status", StatusEnum.normal.getCode()));
break;
case 2:
saleman = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("country_code", code)
.eq("type",SalemanTypeEnums.CountryAgent.getCode())
.eq("status", StatusEnum.normal.getCode()));
break;
default:
break;
}
if (saleman == null) {
return null;
}
return CopyBeanUtil.copyBean(saleman, SalemanVO.class);
}
@Override
public List<SalemanVO> getAgentSalemenByShopId(Long shopId) {
AssertUtil.isNull(shopId, "ISalemanService|getAgentSalemenByShopId|传入参数shopId为空");
ShopBasicVO shopBasicInfoByShopId = shopService.getShopBasicById(shopId);
if (shopBasicInfoByShopId == null) {
return null;
}
List<SalemanVO> agentAndSalemanByCode = salemanMapper.getAgentAndSalemanByCode(
shopBasicInfoByShopId.getProCode(), shopBasicInfoByShopId.getCityCode(),
shopBasicInfoByShopId.getCountyCode(), shopBasicInfoByShopId.getSalemanId());
return agentAndSalemanByCode;
}
/**
* 查询省、市、区代理、业务员信息及其角色信息
* @param userId
* @return
*/
@Override
public SalemanAndRoleVO getSalemanAndRoleByUserId(Long userId) {
Saleman saleman = salemanMapper
.selectOne(new QueryWrapper<Saleman>().eq("user_id", userId).eq("status", StatusEnum.normal.getCode()));
if (saleman == null) {
return null;
}
SalemanAndUserVO salemanVO = CopyBeanUtil.copyBean(saleman, SalemanAndUserVO.class);
getSalemanAddress(salemanVO);
List<RoleVO> roleList = roleService.getRoleListByUserId(userId);
SalemanAndRoleVO salemanAndRoleVO = new SalemanAndRoleVO();
salemanAndRoleVO.setSalemanVO(salemanVO);
salemanAndRoleVO.setRoleList(roleList);
return salemanAndRoleVO;
}
/**
* 根据用户id查询业务员信息
*
* @param userId
* @return
*/
@Override
public SalemanAndUserVO getSalemanAndUserByUserId(Long userId) {
Saleman saleman = salemanMapper
.selectOne(new QueryWrapper<Saleman>().eq("user_id", userId).eq("status", StatusEnum.normal.getCode()));
if (saleman == null) {
return null;
}
SalemanAndUserVO salemanVO = CopyBeanUtil.copyBean(saleman, SalemanAndUserVO.class);
getSalemanAddress(salemanVO);
UserVO userVO = userService.getUserById(userId);
if(userVO != null) {
salemanVO.setUserName(userVO.getUserName());
salemanVO.setNickName(userVO.getNickName());
salemanVO.setName(userVO.getName());
salemanVO.setSex(userVO.getSex());
}
return salemanVO;
}
/**
* 查询业务员的省市区地址
* @param salemanVO
*/
private void getSalemanAddress(SalemanAndUserVO salemanVO) {
AddressVO addressVO = new AddressVO();
// 当用户为省代理时,根据salemanVO省code查询地址
if (salemanVO.getType().equals(SalemanTypeEnums.ProAgent.getCode())) {
addressVO = addressService.getAddressByCode(Integer.parseInt(salemanVO.getProCode()),
AddressLevelEnum.Province.getCode());
if (addressVO != null) {
salemanVO.setProName(addressVO.getProvinceName());
}
}
// 当用户为市代理时,根据salemanVO市code查询地址
if (salemanVO.getType().equals(SalemanTypeEnums.CityAgent.getCode())) {
addressVO = addressService.getAddressByCode(Integer.parseInt(salemanVO.getCityCode()),
AddressLevelEnum.City.getCode());
if (addressVO != null) {
salemanVO.setProName(addressVO.getProvinceName());
salemanVO.setCityName(addressVO.getCityName());
}
}
// 当用户为区代理或业务员时,根据salemanVO区code查询地址
if (salemanVO.getType().equals(SalemanTypeEnums.CountryAgent.getCode())
|| salemanVO.getType().equals(SalemanTypeEnums.Saleman.getCode())) {
addressVO = addressService.getAddressByCode(Integer.parseInt(salemanVO.getCountryCode()),
AddressLevelEnum.District.getCode());
if (addressVO != null) {
salemanVO.setProName(addressVO.getProvinceName());
salemanVO.setCityName(addressVO.getCityName());
salemanVO.setCountryName(addressVO.getDistrictName());
}
}
}
/**
* 判断下线关系
*
* @param agentUseId
* 上线用户id
* @param OfflineUserId
* 下线用户id
* @return 是上下线关系则返回true,否则返回false
*/
@Override
public Boolean isOffline(Long agentUseId, Long OfflineUserId) {
// 上线代理
Saleman agent = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("user_id", agentUseId)
.eq("status", StatusEnum.normal.getCode()));
// 下线代理或业务员
Saleman offlineSaleman = salemanMapper.selectOne(new QueryWrapper<Saleman>().eq("user_id", OfflineUserId)
.eq("status", StatusEnum.normal.getCode()));
// 当前用户或下线不存在时
if (agent == null || offlineSaleman == null) {
return false;
}
// agent的级别不大于offlineSaleman级别时
if (agent.getType() <= offlineSaleman.getType()) {
return false;
}
switch (agent.getType()) {
case 2:
// 上线为区代理时,若offlineSaleman不为其下线则返回false
if (!agent.getCountryCode().equals(offlineSaleman.getCountryCode())) {
return false;
}
break;
case 3:
// 上线为市代理时,若offlineSaleman不为其下线则返回false
if (!agent.getCityCode().equals(offlineSaleman.getCityCode())) {
return false;
}
break;
case 4:
// 上线为省代理时,若offlineSaleman不为其下线则返回false
if (!agent.getProCode().equals(offlineSaleman.getProCode())) {
return false;
}
break;
default:
return false;
}
return true;
}
@Override
@Transactional(rollbackFor = {QmdException.class, Exception.class})
public void updateSalemanInfo(UpdateSalmanVO vo) {
AssertUtil.isNull(vo, "SalemanServiceImpl|updateSalemanInfo|传入参数vo为空");
SalemanVO old = getSalemanById(vo.getId());
if (Objects.nonNull(old)) {
Saleman entity = CopyBeanUtil.copyBean(vo, Saleman.class);
if (!checkCUD(salemanMapper.updateById(entity))) {
throw new QmdException("更新 业务员信息失败");
}
PayAccountVO payAccount = payAccountService.getPayAccountByUserId(old.getUserId());
if (Objects.nonNull(payAccount)) {
BankCardVO bankCardVO = bankCardService.getBankCardByAccountId(payAccount.getId());
BankInfoVO bankInfoVO = bankInfoService.getBankInfoById(vo.getBankId());
if (Objects.nonNull(bankCardVO)) {
bankCardVO.setAccountId(vo.getId());
bankCardVO.setBankName(bankInfoVO.getSubBankName());
bankCardVO.setBankCardNo(vo.getBankCardNo());
bankCardVO.setBankCardHolder(vo.getBankCardHolder());
bankCardVO.setBankMobile(vo.getBankMobile());
bankCardVO.setCardType(vo.getCardType());
bankCardVO.setStatus(StatusEnum.normal.getCode());
bankCardVO.setUpdateDate(vo.getUpdateDate());
bankCardVO.setUpdateId(vo.getUpdateId());
bankCardService.updateBankCard(bankCardVO);
} else {
bankCardVO = new BankCardVO();
bankCardVO.setAccountId(payAccount.getId());
bankCardVO.setBankName(bankInfoVO.getSubBankName());
bankCardVO.setBankCardNo(vo.getBankCardNo());
bankCardVO.setBankCardHolder(vo.getBankCardHolder());
bankCardVO.setBankMobile(vo.getBankMobile());
bankCardVO.setCardType(vo.getCardType());
bankCardVO.setUseTimes(1);
bankCardVO.setSort(1);
bankCardVO.setStatus(StatusEnum.normal.getCode());
bankCardVO.setCreateDate(vo.getUpdateDate());
bankCardVO.setCreateId(vo.getUpdateId());
bankCardService.addBankCard(bankCardVO);
}
} else {
throw new QmdException("该业务员或代理没有账户, 请及时联系系统管理员.");
}
} else {
throw new QmdException("未找到更新的业务员信息");
}
}
@Override
@Transactional(rollbackFor = {QmdException.class, Exception.class})
public void unBindSaleman(Long id, Long operator) {
AssertUtil.isNull(id, "SalemanServiceImpl|unBindSaleman|传入id为空");
Saleman saleman = salemanMapper.selectById(id);
if (Objects.isNull(saleman)) {
throw new QmdException("未找到需要解绑的业务员信息或代理信息");
}
saleman.setUpdateId(operator);
saleman.setUpdateDate(new Date());
saleman.setStatus(StatusEnum.deleted.getCode());
if (!checkCUD(salemanMapper.updateById(saleman))) {
throw new QmdException("删除业务员或代理信息失败");
}
// 删除角色
userRoleService.deleteUserRoleById(saleman.getUserRoleId());
}
@Override
public Boolean existSaleman(Long userId, Byte...type) {
AssertUtil.isNull(userId, "SalemanServiceImpl|existSaleman|传入参数userId为空");
QueryWrapper<Saleman> queryWrapper = new QueryWrapper<Saleman>()
.eq("user_id", userId)
.eq("status", StatusEnum.normal.getCode());
if (Objects.nonNull(type) && type.length > 0) {
queryWrapper.in("type", Lists.newArrayList(type));
}
List<Saleman> resultList = salemanMapper.selectList(queryWrapper);
if (CollectionUtils.isEmpty(resultList)) {
return false;
}
return true;
}
}
|
---
layout: post
title: LeetCode 0053 题解
description: "最大子序和"
keywords: test
category: LeetCode
tags: [solving LeetCode]
---
### 题目描述
[最大子序和](https://leetcode-cn.com/problems/maximum-subarray/)
### 思路
在线处理:若前面的序列使和大于0,则加入`nums[i]`中,否则抛弃之,只保留当前项`nums[i]`,并更新最大和。
### 题解
```java
class Solution {
public int maxSubArray(int[] nums) {
int thissum = 0, maxsum = nums[0];
for(int i = 0; i < nums.length; i++){
if(thissum > 0)
thissum += nums[i];
else
thissum = nums[i];
maxsum = Math.max(thissum, maxsum);
}
return maxsum;
}
}
```
* 法二:分而治之
```java
class Solution {
public static int maxSubArray(int[] nums) {
return maxSubArray(nums, 0, nums.length - 1);
}
public static int max3(int a, int b, int c) {
if(a >= b && a >= c) return a;
if(b >= a && b >= c) return b;
return c;
}
public static int maxSubArray(int[] nums, int lo, int hi) {
if(lo == hi) return nums[lo];
int mid = lo + (hi - lo) / 2;
// 左子列最大和
int maxleft = maxSubArray(nums, lo, mid);
// 右子列最大和
int maxright = maxSubArray(nums, mid+1, hi);
// 跨越边界的最大和
int sumleft = nums[mid], sumright = nums[mid+1];
int thissum = sumleft;
for (int i = mid-1; i >= lo; i--) {
thissum += nums[i];
if(thissum > sumleft)
sumleft = thissum;
}
thissum = sumright;
for (int i = mid+2; i <= hi; i++) {
thissum += nums[i];
if(thissum > sumright)
sumright = thissum;
}
int maxmid = sumleft + sumright;
return max3(maxleft, maxright, maxmid);
}
}
```
### 思考
在线处理是一个很好的方法,其时间与$N$成正比,实际上是动态规划的问题;而分治法则是典型的递归,与mergesort有相似之处。 |
# [grafanads] section
[Grafanads](../services/grafanads.md) service configuration
## db_threads
| | |
| -------------- | -------------------------- |
| Default value | `10` |
| YAML Path | `grafanads.db_threads` |
| Key-Value Path | `grafanads/db_threads` |
| Environment | `NOC_GRAFANADS_DB_THREADS` |
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.foundation.common.event;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.reflect.MethodUtils;
import org.apache.servicecomb.foundation.common.concurrent.ConcurrentHashMapEx;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
/**
* for register/unregister rarely
*/
public class SimpleEventBus extends EventBus {
private final Map<Object, List<SimpleSubscriber>> subscribersMap = new ConcurrentHashMapEx<>();
// key is event class
private Map<Class<?>, List<SimpleSubscriber>> subscribersCache = new ConcurrentHashMapEx<>();
private List<SimpleSubscriber> collectSubscribers(Object instance) {
List<SimpleSubscriber> subscribers = new ArrayList<>();
Method[] methods = MethodUtils.getMethodsWithAnnotation(instance.getClass(), Subscribe.class, true, true);
for (Method method : methods) {
SimpleSubscriber subscriber = new SimpleSubscriber(instance, method);
subscribers.add(subscriber);
}
return subscribers;
}
@Override
public void register(Object instance) {
subscribersMap.computeIfAbsent(instance, this::collectSubscribers);
// even ignored cause of duplicate register
// still reset cache
// this makes logic simpler
subscribersCache = new ConcurrentHashMapEx<>();
}
@Override
public void unregister(Object instance) {
if (subscribersMap.remove(instance) != null) {
subscribersCache = new ConcurrentHashMapEx<>();
}
}
public void post(Object event) {
// cache always reset after register/unregister
// so cache always match latest subscribersMap at last
// te worst scenes is invoke collectSubscriberForEvent multiple times, no problem
List<SimpleSubscriber> subscribers = subscribersCache
.computeIfAbsent(event.getClass(), this::collectSubscriberForEvent);
for (SimpleSubscriber subscriber : subscribers) {
subscriber.dispatchEvent(event);
}
}
/**
* subscribersMap almost stable<br>
* so we not care for performance of collectSubscriberForEvent
* @param eventClass
*/
private List<SimpleSubscriber> collectSubscriberForEvent(Class<?> eventClass) {
List<SimpleSubscriber> subscribersForEvent = new ArrayList<>();
for (List<SimpleSubscriber> subscribers : subscribersMap.values()) {
for (SimpleSubscriber subscriber : subscribers) {
if (subscriber.getMethod().getParameterTypes()[0].isAssignableFrom(eventClass)) {
subscribersForEvent.add(subscriber);
}
}
}
return subscribersForEvent;
}
}
|
# `bustd` Pacman Hooks
`bustd` is designed and expected to operate well in harsh environemnts that
may be resource hungry or memory starved. Therefore, extra preference is given to this
package to ensure that the latest (and greatest) version is always running.

## Why are Pacman Hooks Used
`pacman` is arch's system package manager, therefore, any changes to our executable's
binary will be caused by this program. Whether that be an installation, upgrade, or
un-installation. So, from the momemt we install `bustd` the most recent version available
is running on our system. After an update is pushed to the *AUR*, `pacman` will be able
to fetch and install it. At this moment there is a newer version of the binary locally
available, but there is no indication that the old version will be turned-off and the new
version kicked-off. Pacman hooks solve this problem by forcing a restart of `bustd`
whenever the executable is modified.
---
## Hooks Used
- [Install](#hooks-used-install)
- [Remove](#hooks-used-remove)
- [Upgrade](#hooks-used-upgrade)
### Install<a name="hooks-used-install"></a>
This hook is triggered on the event that the package **bustd** is freshly installed
locally. Following this install, the program `bustd` will be immediately kicked-off via
a systemd service unit, which will also begin the program on system startup.
### Remove<a name="hooks-used-remove"></a>
This hook is triggered on the event that the package **bustd** is uninstalled from the
system. Once the uninstallation process is begun, before any files are removed, this hook
will halt the currently running instance and not begin another one, allowing for a smooth
uninstallation.
### Upgrade<a name="hooks-used-upgrade"></a>
This hook is triggered on the event that a new version of the package **bustd** becomes
available on the *AUR*. Once the executable has been upgraded, the currently running
instance is halted and an instance of the new version is started.
## Installation
Install this package before, at the same time as, or after the *bustd* package.
> `$ yay -S bustd-pacman-hooks`
Once installed, the hooks will simply be set up but will not be triggered until the
appropriate event takes place. Therefore, once the package bustd is installed, the binary
will be registered with systemd and started. Any updates to this package will cause
a restart in the binary, and a removal of the program also halts the executable. Should
any of these events not trigger the associated hook, it will simply keep its current
instance running and not restart to the fresh version until a reboot.
|
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
require "arvados/collection"
require "minitest/autorun"
require "sdk_fixtures"
class CollectionTest < Minitest::Test
include SDKFixtures
TWO_BY_TWO_BLOCKS = SDKFixtures.random_blocks(2, 9)
TWO_BY_TWO_MANIFEST_A =
[". #{TWO_BY_TWO_BLOCKS.first} 0:5:f1 5:4:f2\n",
"./s1 #{TWO_BY_TWO_BLOCKS.last} 0:5:f1 5:4:f3\n"]
TWO_BY_TWO_MANIFEST_S = TWO_BY_TWO_MANIFEST_A.join("")
def abcde_blocks
["aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa+9", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb+9", "cccccccccccccccccccccccccccccccc+9", "dddddddddddddddddddddddddddddddd+9", "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee+9"]
end
### .new
def test_empty_construction
coll = Arv::Collection.new
assert_equal("", coll.manifest_text)
end
def test_successful_construction
[:SIMPLEST_MANIFEST, :MULTIBLOCK_FILE_MANIFEST, :MULTILEVEL_MANIFEST].
each do |manifest_name|
manifest_text = SDKFixtures.const_get(manifest_name)
coll = Arv::Collection.new(manifest_text)
assert_equal(manifest_text, coll.manifest_text,
"did not get same manifest back out from #{manifest_name}")
end
end
def test_range_edge_cases
[
". d41d8cd98f00b204e9800998ecf8427e+0 0:0:file1\n",
". d41d8cd98f00b204e9800998ecf8427e+0 0:0:file1 0:0:file2\n",
". d41d8cd98f00b204e9800998ecf8427e+0 0:0:file1 0:0:file1\n",
". d41d8cd98f00b204e9800998ecf8427e+0 0:0:file1 0:0:file2 0:0:file1\n",
". 0cc175b9c0f1b6a831c399e269772661+1 0:0:file1 1:0:file2 1:0:file1\n",
].each do |txt|
coll = Arv::Collection.new(txt)
coll.normalize
assert_match(/ 0:0:file1/, coll.manifest_text)
end
[
". d41d8cd98f00b204e9800998ecf8427e+0 1:0:file1\n",
". 0cc175b9c0f1b6a831c399e269772661+1 0:0:file1 2:0:file2 1:0:file1\n",
].each do |txt|
assert_raises(RangeError) do
coll = Arv::Collection.new(txt)
coll.normalize
end
end
end
def test_non_manifest_construction_error
["word", ". abc def", ". #{random_block} 0:", ". / !"].each do |m_text|
assert_raises(ArgumentError,
"built collection from manifest #{m_text.inspect}") do
Arv::Collection.new(m_text)
end
end
end
def test_file_directory_conflict_construction_error
assert_raises(ArgumentError) do
Arv::Collection.new(NAME_CONFLICT_MANIFEST)
end
end
def test_no_implicit_normalization
coll = Arv::Collection.new(NONNORMALIZED_MANIFEST)
assert_equal(NONNORMALIZED_MANIFEST, coll.manifest_text)
end
### .normalize
def test_non_posix_path_handling
m_text = "./.. #{random_block(9)} 0:5:. 5:4:..\n"
coll = Arv::Collection.new(m_text.dup)
coll.normalize
assert_equal(m_text, coll.manifest_text)
end
def test_escaping_through_normalization
coll = Arv::Collection.new(MANY_ESCAPES_MANIFEST)
coll.normalize
# The result should simply duplicate the file spec.
# The source file spec has an unescaped backslash in it.
# It's OK for the Collection class to properly escape that.
expect_text = MANY_ESCAPES_MANIFEST.sub(/ \d+:\d+:\S+/) do |file_spec|
file_spec.gsub(/([^\\])(\\[^\\\d])/, '\1\\\\\2')
end
assert_equal(expect_text, coll.manifest_text)
end
def test_concatenation_with_locator_overlap(over_index=0)
blocks = random_blocks(4, 2)
blocks_s = blocks.join(" ")
coll = Arv::Collection.new(". %s 0:8:file\n. %s 0:4:file\n" %
[blocks_s, blocks[over_index, 2].join(" ")])
coll.normalize
assert_equal(". #{blocks_s} 0:8:file #{over_index * 2}:4:file\n",
coll.manifest_text)
end
def test_concatenation_with_middle_locator_overlap
test_concatenation_with_locator_overlap(1)
end
def test_concatenation_with_end_locator_overlap
test_concatenation_with_locator_overlap(2)
end
def test_concatenation_with_partial_locator_overlap
blocks = random_blocks(3, 3)
coll = Arv::Collection
.new(". %s 0:6:overlap\n. %s 0:6:overlap\n" %
[blocks[0, 2].join(" "), blocks[1, 2].join(" ")])
coll.normalize
assert_equal(". #{blocks.join(' ')} 0:6:overlap 3:6:overlap\n",
coll.manifest_text)
end
def test_normalize
block = random_block
coll = Arv::Collection.new(". #{block} 0:0:f2 0:0:f1\n")
coll.normalize
assert_equal(". #{block} 0:0:f1 0:0:f2\n", coll.manifest_text)
end
def test_normalization_file_spans_two_whole_blocks(file_specs="0:10:f1",
num_blocks=2)
blocks = random_blocks(num_blocks, 5)
m_text = ". #{blocks.join(' ')} #{file_specs}\n"
coll = Arv::Collection.new(m_text.dup)
coll.normalize
assert_equal(m_text, coll.manifest_text)
end
def test_normalization_file_fits_beginning_block
test_normalization_file_spans_two_whole_blocks("0:7:f1")
end
def test_normalization_file_fits_end_block
test_normalization_file_spans_two_whole_blocks("3:7:f1")
end
def test_normalization_file_spans_middle
test_normalization_file_spans_two_whole_blocks("3:5:f1")
end
def test_normalization_file_spans_three_whole_blocks
test_normalization_file_spans_two_whole_blocks("0:15:f1", 3)
end
def test_normalization_file_skips_bytes
test_normalization_file_spans_two_whole_blocks("0:3:f1 5:5:f1")
end
def test_normalization_file_inserts_bytes
test_normalization_file_spans_two_whole_blocks("0:3:f1 5:3:f1 3:2:f1")
end
def test_normalization_file_duplicates_bytes
test_normalization_file_spans_two_whole_blocks("2:3:f1 2:3:f1", 1)
end
def test_normalization_handles_duplicate_locator
blocks = random_blocks(2, 5)
coll = Arv::Collection.new(". %s %s 1:8:f1 11:8:f1\n" %
[blocks.join(" "), blocks.reverse.join(" ")])
coll.normalize
assert_equal(". #{blocks.join(' ')} #{blocks[0]} 1:8:f1 6:8:f1\n",
coll.manifest_text)
end
### .cp_r
def test_simple_file_copy
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r("./simple.txt", "./new")
assert_equal(SIMPLEST_MANIFEST.sub(" 0:9:", " 0:9:new 0:9:"),
coll.manifest_text)
end
def test_copy_file_into_other_stream(target="./s1/f2", basename="f2")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.cp_r("./f2", target)
expected = "%s./s1 %s 0:5:f1 14:4:%s 5:4:f3\n" %
[TWO_BY_TWO_MANIFEST_A.first,
TWO_BY_TWO_BLOCKS.reverse.join(" "), basename]
assert_equal(expected, coll.manifest_text)
end
def test_implicit_copy_file_into_other_stream
test_copy_file_into_other_stream("./s1")
end
def test_copy_file_into_other_stream_with_new_name
test_copy_file_into_other_stream("./s1/f2a", "f2a")
end
def test_copy_file_over_in_other_stream(target="./s1/f1")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.cp_r("./f1", target)
expected = "%s./s1 %s 0:5:f1 14:4:f3\n" %
[TWO_BY_TWO_MANIFEST_A.first, TWO_BY_TWO_BLOCKS.join(" ")]
assert_equal(expected, coll.manifest_text)
end
def test_implicit_copy_file_over_in_other_stream
test_copy_file_over_in_other_stream("./s1")
end
def test_simple_stream_copy
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.cp_r("./s1", "./sNew")
new_line = TWO_BY_TWO_MANIFEST_A.last.sub("./s1 ", "./sNew ")
assert_equal(TWO_BY_TWO_MANIFEST_S + new_line, coll.manifest_text)
end
def test_copy_stream_into_other_stream(target="./dir2/subdir",
basename="subdir")
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.cp_r("./dir1/subdir", target)
new_line = MULTILEVEL_MANIFEST.lines[4].sub("./dir1/subdir ",
"./dir2/#{basename} ")
assert_equal(MULTILEVEL_MANIFEST + new_line, coll.manifest_text)
end
def test_implicit_copy_stream_into_other_stream
test_copy_stream_into_other_stream("./dir2")
end
def test_copy_stream_into_other_stream_with_new_name
test_copy_stream_into_other_stream("./dir2/newsub", "newsub")
end
def test_copy_stream_over_empty_stream
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
(1..3).each do |file_num|
coll.rm("./dir0/subdir/file#{file_num}")
end
coll.cp_r("./dir1/subdir", "./dir0")
expected = MULTILEVEL_MANIFEST.lines
expected[2] = expected[4].sub("./dir1/", "./dir0/")
assert_equal(expected.join(""), coll.manifest_text)
end
def test_copy_stream_over_file_raises_ENOTDIR(source="./s1", target="./f2")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
assert_raises(Errno::ENOTDIR) do
coll.cp_r(source, target)
end
end
def test_copy_file_under_file_raises_ENOTDIR
test_copy_stream_over_file_raises_ENOTDIR("./f1", "./f2/newfile")
end
def test_copy_stream_over_nonempty_stream_merges_and_overwrites
blocks = random_blocks(3, 9)
manifest_a =
["./subdir #{blocks[0]} 0:1:s1 1:2:zero\n",
"./zdir #{blocks[1]} 0:9:zfile\n",
"./zdir/subdir #{blocks[2]} 0:1:s2 1:2:zero\n"]
coll = Arv::Collection.new(manifest_a.join(""))
coll.cp_r("./subdir", "./zdir")
manifest_a[2] = "./zdir/subdir %s %s 0:1:s1 9:1:s2 1:2:zero\n" %
[blocks[0], blocks[2]]
assert_equal(manifest_a.join(""), coll.manifest_text)
end
def test_copy_stream_into_substream(source="./dir1",
target="./dir1/subdir/dir1")
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.cp_r(source, target)
expected = MULTILEVEL_MANIFEST.lines.flat_map do |line|
[line, line.gsub(/^#{Regexp.escape(source)}([\/ ])/, "#{target}\\1")].uniq
end
assert_equal(expected.sort.join(""), coll.manifest_text)
end
def test_copy_root
test_copy_stream_into_substream(".", "./root")
end
def test_adding_to_root_after_copy
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r(".", "./root")
src_coll = Arv::Collection.new(COLON_FILENAME_MANIFEST)
coll.cp_r("./file:test.txt", ".", src_coll)
got_lines = coll.manifest_text.lines
assert_equal(2, got_lines.size)
assert_match(/^\. \S{33,} \S{33,} 0:9:file:test\.txt 9:9:simple\.txt\n/,
got_lines.first)
assert_equal(SIMPLEST_MANIFEST.sub(". ", "./root "), got_lines.last)
end
def test_copy_chaining
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r("./simple.txt", "./a").cp_r("./a", "./b")
assert_equal(SIMPLEST_MANIFEST.sub(" 0:9:", " 0:9:a 0:9:b 0:9:"),
coll.manifest_text)
end
def prep_two_collections_for_copy(src_stream, dst_stream)
blocks = random_blocks(2, 8)
src_text = "#{src_stream} #{blocks.first} 0:8:f1\n"
dst_text = "#{dst_stream} #{blocks.last} 0:8:f2\n"
return [blocks, src_text, dst_text,
Arv::Collection.new(src_text.dup),
Arv::Collection.new(dst_text.dup)]
end
def test_copy_file_from_other_collection(src_stream=".", dst_stream="./s1")
blocks, src_text, dst_text, src_coll, dst_coll =
prep_two_collections_for_copy(src_stream, dst_stream)
dst_coll.cp_r("#{src_stream}/f1", dst_stream, src_coll)
assert_equal("#{dst_stream} #{blocks.join(' ')} 0:8:f1 8:8:f2\n",
dst_coll.manifest_text)
assert_equal(src_text, src_coll.manifest_text)
end
def test_copy_file_from_other_collection_to_root
test_copy_file_from_other_collection("./s1", ".")
end
def test_copy_stream_from_other_collection
blocks, src_text, dst_text, src_coll, dst_coll =
prep_two_collections_for_copy("./s2", "./s1")
dst_coll.cp_r("./s2", "./s1", src_coll)
assert_equal(dst_text + src_text.sub("./s2 ", "./s1/s2 "),
dst_coll.manifest_text)
assert_equal(src_text, src_coll.manifest_text)
end
def test_copy_stream_from_other_collection_to_root
blocks, src_text, dst_text, src_coll, dst_coll =
prep_two_collections_for_copy("./s1", ".")
dst_coll.cp_r("./s1", ".", src_coll)
assert_equal(dst_text + src_text, dst_coll.manifest_text)
assert_equal(src_text, src_coll.manifest_text)
end
def test_copy_stream_contents
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.cp_r("./dir0/subdir/", "./dir1/subdir")
expect_lines = MULTILEVEL_MANIFEST.lines
expect_lines[4] = expect_lines[2].sub("./dir0/", "./dir1/")
assert_equal(expect_lines.join(""), coll.manifest_text)
end
def test_copy_file_into_new_stream_with_implicit_filename
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r("./simple.txt", "./new/")
assert_equal(SIMPLEST_MANIFEST + SIMPLEST_MANIFEST.sub(". ", "./new "),
coll.manifest_text)
end
def test_copy_file_into_new_stream_with_explicit_filename
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r("./simple.txt", "./new/newfile.txt")
new_line = SIMPLEST_MANIFEST.sub(". ", "./new ").sub(":simple", ":newfile")
assert_equal(SIMPLEST_MANIFEST + new_line, coll.manifest_text)
end
def test_copy_stream_contents_into_root
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.cp_r("./s1/", ".")
assert_equal(". %s 0:5:f1 14:4:f2 5:4:f3\n%s" %
[TWO_BY_TWO_BLOCKS.reverse.join(" "),
TWO_BY_TWO_MANIFEST_A.last],
coll.manifest_text)
end
def test_copy_root_contents_into_stream
# This is especially fun, because we're copying a parent into its child.
# Make sure that happens depth-first.
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.cp_r("./", "./s1")
assert_equal("%s./s1 %s 0:5:f1 5:4:f2 14:4:f3\n%s" %
[TWO_BY_TWO_MANIFEST_A.first, TWO_BY_TWO_BLOCKS.join(" "),
TWO_BY_TWO_MANIFEST_A.last.sub("./s1 ", "./s1/s1 ")],
coll.manifest_text)
end
def test_copy_stream_contents_across_collections
block = random_block(8)
src_coll = Arv::Collection.new("./s1 #{block} 0:8:f1\n")
dst_coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
dst_coll.cp_r("./s1/", "./s1", src_coll)
assert_equal("%s./s1 %s %s 0:8:f1 13:4:f3\n" %
[TWO_BY_TWO_MANIFEST_A.first, block, TWO_BY_TWO_BLOCKS.last],
dst_coll.manifest_text)
end
def test_copy_root_contents_across_collections
block = random_block(8)
src_coll = Arv::Collection.new(". #{block} 0:8:f1\n")
dst_coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
dst_coll.cp_r("./", ".", src_coll)
assert_equal(". %s %s 0:8:f1 13:4:f2\n%s" %
[block, TWO_BY_TWO_BLOCKS.first, TWO_BY_TWO_MANIFEST_A.last],
dst_coll.manifest_text)
end
def test_copy_root_into_empty_collection
block = random_block(8)
src_coll = Arv::Collection.new(". #{block} 0:8:f1\n")
dst_coll = Arv::Collection.new()
dst_coll.cp_r("./", ".", src_coll)
assert_equal(". %s 0:8:f1\n" %
[block],
dst_coll.manifest_text)
end
def test_copy_with_repeated_blocks
blocks = abcde_blocks
src_coll = Arv::Collection.new(". #{blocks[0]} #{blocks[1]} #{blocks[2]} #{blocks[0]} #{blocks[1]} #{blocks[2]} #{blocks[3]} #{blocks[4]} 27:27:f1\n")
dst_coll = Arv::Collection.new()
dst_coll.cp_r("f1", "./", src_coll)
assert_equal(". #{blocks[0]} #{blocks[1]} #{blocks[2]} 0:27:f1\n", dst_coll.manifest_text, "mangled by cp_r")
end
def test_copy_with_repeated_split_blocks
blocks = abcde_blocks
src_coll = Arv::Collection.new(". #{blocks[0]} #{blocks[1]} #{blocks[2]} #{blocks[0]} #{blocks[1]} #{blocks[2]} #{blocks[3]} #{blocks[4]} 20:27:f1\n")
dst_coll = Arv::Collection.new()
src_coll.normalize
assert_equal(". #{blocks[2]} #{blocks[0]} #{blocks[1]} #{blocks[2]} 2:27:f1\n", src_coll.manifest_text, "mangled by normalize()")
dst_coll.cp_r("f1", "./", src_coll)
assert_equal(". #{blocks[2]} #{blocks[0]} #{blocks[1]} #{blocks[2]} 2:27:f1\n", dst_coll.manifest_text, "mangled by cp_r")
end
def test_copy_empty_source_path_raises_ArgumentError(src="", dst="./s1")
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
assert_raises(ArgumentError) do
coll.cp_r(src, dst)
end
end
def test_copy_empty_destination_path_raises_ArgumentError
test_copy_empty_source_path_raises_ArgumentError(".", "")
end
### .each_file_path
def test_each_file_path
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
if block_given?
result = yield(coll)
else
result = []
coll.each_file_path { |path| result << path }
end
assert_equal(["./f1", "./f2", "./s1/f1", "./s1/f3"], result.sort)
end
def test_each_file_path_without_block
test_each_file_path { |coll| coll.each_file_path.to_a }
end
def test_each_file_path_empty_collection
assert_empty(Arv::Collection.new.each_file_path.to_a)
end
def test_each_file_path_after_collection_emptied
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.rm("simple.txt")
assert_empty(coll.each_file_path.to_a)
end
def test_each_file_path_deduplicates_manifest_listings
coll = Arv::Collection.new(MULTIBLOCK_FILE_MANIFEST)
assert_equal(["./repfile", "./s1/repfile", "./s1/uniqfile",
"./uniqfile", "./uniqfile2"],
coll.each_file_path.to_a.sort)
end
### .exist?
def test_exist(test_method=:assert, path="f2")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
send(test_method, coll.exist?(path))
end
def test_file_not_exist
test_exist(:refute, "f3")
end
def test_stream_exist
test_exist(:assert, "s1")
end
def test_file_inside_stream_exist
test_exist(:assert, "s1/f1")
end
def test_path_inside_stream_not_exist
test_exist(:refute, "s1/f2")
end
def test_path_under_file_not_exist
test_exist(:refute, "f2/nonexistent")
end
def test_deep_substreams_not_exist
test_exist(:refute, "a/b/c/d/e/f/g")
end
### .rename
def test_simple_file_rename
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.rename("./simple.txt", "./new")
assert_equal(SIMPLEST_MANIFEST.sub(":simple.txt", ":new"),
coll.manifest_text)
end
def test_rename_file_into_other_stream(target="./s1/f2", basename="f2")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.rename("./f2", target)
expected = ". %s 0:5:f1\n./s1 %s 0:5:f1 14:4:%s 5:4:f3\n" %
[TWO_BY_TWO_BLOCKS.first,
TWO_BY_TWO_BLOCKS.reverse.join(" "), basename]
assert_equal(expected, coll.manifest_text)
end
def test_implicit_rename_file_into_other_stream
test_rename_file_into_other_stream("./s1")
end
def test_rename_file_into_other_stream_with_new_name
test_rename_file_into_other_stream("./s1/f2a", "f2a")
end
def test_rename_file_over_in_other_stream(target="./s1/f1")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.rename("./f1", target)
expected = ". %s 5:4:f2\n./s1 %s 0:5:f1 14:4:f3\n" %
[TWO_BY_TWO_BLOCKS.first, TWO_BY_TWO_BLOCKS.join(" ")]
assert_equal(expected, coll.manifest_text)
end
def test_implicit_rename_file_over_in_other_stream
test_rename_file_over_in_other_stream("./s1")
end
def test_simple_stream_rename
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
coll.rename("./s1", "./newS")
assert_equal(TWO_BY_TWO_MANIFEST_S.sub("\n./s1 ", "\n./newS "),
coll.manifest_text)
end
def test_rename_stream_into_other_stream(target="./dir2/subdir",
basename="subdir")
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.rename("./dir1/subdir", target)
expected = MULTILEVEL_MANIFEST.lines
replaced_line = expected.delete_at(4)
expected << replaced_line.sub("./dir1/subdir ", "./dir2/#{basename} ")
assert_equal(expected.join(""), coll.manifest_text)
end
def test_implicit_rename_stream_into_other_stream
test_rename_stream_into_other_stream("./dir2")
end
def test_rename_stream_into_other_stream_with_new_name
test_rename_stream_into_other_stream("./dir2/newsub", "newsub")
end
def test_rename_stream_over_empty_stream
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
(1..3).each do |file_num|
coll.rm("./dir0/subdir/file#{file_num}")
end
coll.rename("./dir1/subdir", "./dir0")
expected = MULTILEVEL_MANIFEST.lines
expected[2] = expected.delete_at(4).sub("./dir1/", "./dir0/")
assert_equal(expected.sort.join(""), coll.manifest_text)
end
def test_rename_stream_over_file_raises_ENOTDIR
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
assert_raises(Errno::ENOTDIR) do
coll.rename("./s1", "./f2")
end
end
def test_rename_stream_over_nonempty_stream_raises_ENOTEMPTY
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
assert_raises(Errno::ENOTEMPTY) do
coll.rename("./dir1/subdir", "./dir0")
end
end
def test_rename_stream_into_substream(source="./dir1",
target="./dir1/subdir/dir1")
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.rename(source, target)
assert_equal(MULTILEVEL_MANIFEST.gsub(/^#{Regexp.escape(source)}([\/ ])/m,
"#{target}\\1"),
coll.manifest_text)
end
def test_rename_root
test_rename_stream_into_substream(".", "./root")
end
def test_adding_to_root_after_rename
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.rename(".", "./root")
src_coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.cp_r("./simple.txt", ".", src_coll)
assert_equal(SIMPLEST_MANIFEST + SIMPLEST_MANIFEST.sub(". ", "./root "),
coll.manifest_text)
end
def test_rename_chaining
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.rename("./simple.txt", "./x").rename("./x", "./simple.txt")
assert_equal(SIMPLEST_MANIFEST, coll.manifest_text)
end
### .rm
def test_simple_remove
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S.dup)
coll.rm("./f2")
assert_equal(TWO_BY_TWO_MANIFEST_S.sub(" 5:4:f2", ""), coll.manifest_text)
end
def empty_stream_and_assert(expect_index=0)
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
yield coll
assert_equal(TWO_BY_TWO_MANIFEST_A[expect_index], coll.manifest_text)
end
def test_remove_all_files_in_substream
empty_stream_and_assert do |coll|
coll.rm("./s1/f1")
coll.rm("./s1/f3")
end
end
def test_remove_all_files_in_root_stream
empty_stream_and_assert(1) do |coll|
coll.rm("./f1")
coll.rm("./f2")
end
end
def test_chaining_removes
empty_stream_and_assert do |coll|
coll.rm("./s1/f1").rm("./s1/f3")
end
end
def test_remove_last_file
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
coll.rm("./simple.txt")
assert_equal("", coll.manifest_text)
end
def test_remove_nonexistent_file_raises_ENOENT(path="./NoSuchFile",
method=:rm)
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
assert_raises(Errno::ENOENT) do
coll.send(method, path)
end
end
def test_remove_from_nonexistent_stream_raises_ENOENT
test_remove_nonexistent_file_raises_ENOENT("./NoSuchStream/simple.txt")
end
def test_remove_stream_raises_EISDIR(path="./s1")
coll = Arv::Collection.new(TWO_BY_TWO_MANIFEST_S)
assert_raises(Errno::EISDIR) do
coll.rm(path)
end
end
def test_remove_root_raises_EISDIR
test_remove_stream_raises_EISDIR(".")
end
def test_remove_empty_string_raises_ArgumentError
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
assert_raises(ArgumentError) do
coll.rm("")
end
end
### rm_r
def test_recursive_remove
empty_stream_and_assert do |coll|
coll.rm_r("./s1")
end
end
def test_recursive_remove_on_files
empty_stream_and_assert do |coll|
coll.rm_r("./s1/f1")
coll.rm_r("./s1/f3")
end
end
def test_recursive_remove_root
coll = Arv::Collection.new(MULTILEVEL_MANIFEST)
coll.rm_r(".")
assert_equal("", coll.manifest_text)
end
def test_rm_r_nonexistent_file_raises_ENOENT(path="./NoSuchFile")
test_remove_nonexistent_file_raises_ENOENT("./NoSuchFile", :rm_r)
end
def test_rm_r_from_nonexistent_stream_raises_ENOENT
test_remove_nonexistent_file_raises_ENOENT("./NoSuchStream/file", :rm_r)
end
def test_rm_r_empty_string_raises_ArgumentError
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
assert_raises(ArgumentError) do
coll.rm_r("")
end
end
### .modified?
def test_new_collection_unmodified(*args)
coll = Arv::Collection.new(*args)
yield coll if block_given?
refute(coll.modified?)
end
def test_collection_unmodified_after_instantiation
test_new_collection_unmodified(SIMPLEST_MANIFEST)
end
def test_collection_unmodified_after_mark
test_new_collection_unmodified(SIMPLEST_MANIFEST) do |coll|
coll.cp_r("./simple.txt", "./copy")
coll.unmodified
end
end
def check_collection_modified
coll = Arv::Collection.new(SIMPLEST_MANIFEST)
yield coll
assert(coll.modified?)
end
def test_collection_modified_after_copy
check_collection_modified do |coll|
coll.cp_r("./simple.txt", "./copy")
end
end
def test_collection_modified_after_remove
check_collection_modified do |coll|
coll.rm("./simple.txt")
end
end
def test_collection_modified_after_rename
check_collection_modified do |coll|
coll.rename("./simple.txt", "./newname")
end
end
end
|
#!/usr/bin/env bash
set -e
DIRNAME="$(dirname "$0")"
DIR="$(cd "$DIRNAME" && pwd)"
echoerr() {
echo "$@" 1>&2
}
init_submodules() {
(cd "$DIR" && git submodule init)
(cd "$DIR" && git submodule update)
}
git_clone() {
if [ ! -e "$HOME/$2" ]; then
echo "Cloning '$1'..."
git clone "$1" "$HOME/$2"
else
# shellcheck disable=SC2088
echoerr "~/$2 already exists."
fi
}
rename_with_backup() {
if [ ! -e "$2" ]; then
if mv "$1" "$2"; then
return 0
fi
else
local num
num=1
while [ -e "$2.~$num~" ]; do
(( num++ ))
done
if mv "$2" "$2.~$num~" && mv "$1" "$2"; then
return 0
fi
fi
return 1
}
replace_file() {
DEST=${2:-.$1}
if [ -e "$DIR/$1" ]; then
SRC="$DIR/$1"
else
SRC="$HOME/$1"
if [ ! -e "$SRC" ]; then
echoerr "Failed to find $1"
return
fi
fi
# http://www.tldp.org/LDP/Bash-Beginners-Guide/html/sect_07_01.html
# File exists and is a directory.
[ ! -d "$(dirname "$HOME/$DEST")" ] && mkdir -p "$(dirname "$HOME/$DEST")"
# FILE exists and is a symbolic link.
if [ -h "$HOME/$DEST" ]; then
if rm "$HOME/$DEST" && ln -s "$SRC" "$HOME/$DEST"; then
echo "Updated ~/$DEST"
else
echoerr "Failed to update ~/$DEST"
fi
# FILE exists.
elif [ -e "$HOME/$DEST" ]; then
if rename_with_backup "$HOME/$DEST" "$HOME/$DEST.old"; then
echo "Renamed ~/$DEST to ~/$DEST.old"
if ln -s "$SRC" "$HOME/$DEST"; then
echo "Created ~/$DEST"
else
echoerr "Failed to create ~/$DEST"
fi
else
echoerr "Failed to rename ~/$DEST to ~/$DEST.old"
fi
else
if ln -s "$SRC" "$HOME/$DEST"; then
echo "Created ~/$DEST"
else
echoerr "Failed to create ~/$DEST"
fi
fi
}
install_link() {
init_submodules
for FILENAME in \
'aliases' \
'bashrc' \
'ctags' \
'gemrc' \
'git-templates' \
'gitattributes_global' \
'gitconfig' \
'gitconfig.user' \
'gitignore_global' \
'ideavimrc' \
'inputrc' \
'irbrc' \
'minttyrc' \
'npmrc' \
'p10k.zsh' \
'profile' \
'screenrc' \
'tigrc' \
'tmux.conf' \
'vimrc' \
'vintrc.yaml' \
'zprofile' \
'zshrc'
do
replace_file "$FILENAME"
done
replace_file 'pip.conf' '.pip/pip.conf'
replace_file 'tpm' '.tmux/plugins/tpm'
[ ! -d "$HOME/.vim" ] && mkdir "$HOME/.vim"
replace_file '.vim' '.config/nvim'
replace_file 'vimrc' '.config/nvim/init.vim'
for FILENAME in \
'diff-highlight' \
'diff-hunk-list' \
'pyg' \
'server'
do
replace_file "bin/$FILENAME" "bin/$FILENAME"
done
echo 'Done.'
}
case "$1" in
update)
if [[ "$(uname)" != 'Darwin' ]]; then
# update package list
sudo apt-get update
sudo apt-get -y dist-upgrade
else
brew update
brew upgrade
fi
# dotfiles update
git remote add downstream https://github.com/0xdkay/dotfiles.git 2</dev/null
git pull downstream master
# vim update
vim +PlugUpgrade +PlugClean\! +PlugUpdate +qall\!
;;
base)
# change archive from us to kr
sudo sed -i 's/us.archive/kr.archive/g' /etc/apt/sources.list
# update package list
sudo apt-get update
# upgrade before doing
sudo apt-get -y dist-upgrade
# install softwares
sudo apt-get install -y vim zsh tmux git
#sudo apt-get install build-essential python-dev python-pip
# sudo apt-get install -y exuberant-ctags
;;
github)
# setup github
echo "Type your github account: "
read -r GITHUB_ACCOUNT
ssh-keygen -t rsa -C "$GITHUB_ACCOUNT"
eval $(ssh-agent)
ssh-add "$HOME/.ssh/id_rsa"
echo "need to add below public key to github"
cat "$HOME/.ssh/id_rsa.pub"
echo -n "press enter when you done..."
read -r t
ssh -T [email protected]
;;
link)
install_link
;;
ycm)
sudo apt-get install -y build-essential cmake
sudo apt-get install -y python-dev python3-dev
cd "$HOME/.vim/plugged/YouCompleteMe"
./install.py --clang-completer
;;
antibody)
curl -sL https://git.io/antibody | bash -s
;;
brew)
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
;;
chruby)
if [ "$(uname)" = 'Darwin' ]; then
brew install chruby
else
wget -O chruby-0.3.9.tar.gz https://github.com/postmodern/chruby/archive/v0.3.9.tar.gz
tar -xzvf chruby-0.3.9.tar.gz
cd chruby-0.3.9/
sudo make install
fi
;;
formulae)
brew bundle --file="${DIR}/Brewfile" --no-lock
;;
pwndbg)
init_submodules
cd "${DIR}/pwndbg"
./setup.sh
;;
pyenv)
if [ "$(uname)" = 'Darwin' ]; then
brew install pyenv
brew install pyenv-virtualenv
else
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash
fi
;;
rbenv)
if [ "$(uname)" = 'Darwin' ]; then
brew install rbenv
else
git_clone https://github.com/rbenv/rbenv.git .rbenv
git_clone https://github.com/rbenv/ruby-build.git .rbenv/plugins/ruby-build
fi
echo 'Done.'
;;
ruby-install)
if [ "$(uname)" = 'Darwin' ]; then
brew install ruby-install
else
wget -O ruby-install-0.7.0.tar.gz https://github.com/postmodern/ruby-install/archive/v0.7.0.tar.gz
tar -xzvf ruby-install-0.7.0.tar.gz
cd ruby-install-0.7.0/
sudo make install
fi
;;
rustup)
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
;;
rvm)
command curl -sSL https://get.rvm.io | bash -s stable
;;
weechat)
replace_file 'weechat'
;;
*)
echo "usage: $(basename "$0") <command>"
echo ''
echo 'Available commands:'
echo ' update Update installed packages'
echo ' base Install basic packages'
echo ' link Install symbolic links'
echo ' antibody Install Antibody'
echo ' pwndbg Install pwndbg'
echo ' github Install github account'
echo ' brew Install Homebrew on macOS (or Linux)'
echo ' formulae Install Homebrew formulae using Brewfile'
echo ' pyenv Install pyenv with pyenv-virtualenv'
echo ' rustup Install rustup'
echo ' ruby-install Install ruby-install'
echo ' chruby Install chruby'
echo ' rbenv Install rbenv'
echo ' rvm Install RVM'
echo ' weechat Install WeeChat configuration'
;;
esac
|
#include <iostream>
#include <fstream>
#include <vector>
#include <tr1/unordered_map>
#include <algorithm>
#define DN 100005
using namespace std;
using namespace tr1;
typedef vector<int>::iterator it;
unordered_map<int, int> hs[DN];
int n,k,poz[DN],r[DN],cont[DN],c[DN];
vector<int> gi[DN],gf[DN];
void dfs(int s, int t) {
for(it i=gi[s].begin(); i!=gi[s].end(); ++i) if( *i != t ) {
gf[s].push_back(*i);
dfs(*i, s);
}
if(!gf[s].size()) {
cont[s]=1;
r[s]=c[s];
poz[s]=++k;
hs[k][c[s]]=1;
return;
}
int fr=gf[s].front();
r[s]=r[fr]; cont[s]=cont[fr]; poz[s]=poz[fr]; fr=poz[fr];
it i=gf[s].begin();
for(++i;i!=gf[s].end(); ++i) {
int f=poz[*i],v;
for(unordered_map<int,int>::iterator j=hs[f].begin(); j!=hs[f].end(); ++j)
if((v=(hs[fr][j->first]+=j->second))>cont[s]) {
cont[s]=v;
r[s]=j->first;
}else if(v==cont[s] && j->first<r[s])
r[s]=j->first;
}
if(++hs[fr][c[s]]>cont[s] || (hs[fr][c[s]]==cont[s] && c[s]<r[s])) {
cont[s]=hs[fr][c[s]];
r[s]=c[s];
}
}
int main()
{
ifstream f("egal.in");
ofstream g("egal.out");
f>>n;
for(int i=1; i<n; ++i) {
int a,b;
f>>a>>b;
gi[a].push_back(b);
gi[b].push_back(a);
}
for(int i=1; i<=n; ++i) f>>c[i];
dfs(1, 0);
for(int i=1; i<=n; ++i) g<<r[i]<<' '<<cont[i] <<'\n';
return 0;
}
|
const app= require("../index");
const supertest = require("supertest");
const Category =require("../models/Category")
const Product =require("../models/Product")
const request = supertest(app);
const mongoose = require("mongoose");
const databaseName = "testuserroute";
const path =require("path")
beforeAll(async () => {
const url = `mongodb://127.0.0.1/${databaseName}`
await mongoose.connect(url, { useNewUrlParser: true });
});
afterAll(async () => {
//disconnect mongoose
await mongoose.connection.close()
});
describe("GET Products /:id /GET", ()=>{
beforeEach(async ()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
const product = {
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false
}
await Product(product).save();
await Category(categorycreated).save();
});
afterEach(async()=>{
await Category.deleteMany();
await Product.deleteMany();
})
it("Get product - success ", async()=>{
const res = await request.get("/api/v1/products/6234a9c563734dda3c74e62b")
expect(res.statusCode).toBe(200);
expect(res.body.message).toEqual("Product found")
expect(res.body.data).toBeDefined();
})
it("Get product - failure - Product doesn't exist", async()=>{
const res = await request.get("/api/v1/products/6234aa30dbe66dfc1633323c");
expect(res.statusCode).toBe(404);
expect(res.body).toEqual({
success: false,
message : "Product you requested doesn't exist"
})
})
it("Get all products -success", async()=>{
const res =await request.get("/api/v1/products");
expect(res.body).toBeTruthy();
expect(res.body.message).toEqual("All products fetching success");
expect(res.statusCode).toBe(200);
});
});
describe("Delete Products /DELETE", ()=>{
beforeAll(async()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
const product = {
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false
}
await Product(product).save();
await Category(categorycreated).save();
});
afterAll(async()=>{
await Category.deleteMany();
});
it("Delete Product - Success - Deleted", async()=>{
const res = await request.delete("/api/v1/products/6234a9c563734dda3c74e62b");
expect(res.statusCode).toBe(200);
expect(res.body).toEqual({
success: true,
message : "Product successfully deleted"
});
})
it("Delete Product - failure - Doesn't exist", async()=>{
const res = await request.delete("/api/v1/products/6234aa30dbe66dfc1633323c");
expect(res.statusCode).toBe(404);
expect(res.body).toEqual({
success: false,
message : "Product doesn't exist"
})
})
});
describe("Aggregrate Products /GET/SUCCESS", ()=>{
beforeAll(async()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
const product = {
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false
}
await Product(product).save();
await Category(categorycreated).save();
});
afterAll(async()=>{
await Category.deleteMany();
await Product.deleteMany();
});
it("Get Products count /DB", async()=>{
const res = await request.get("/api/v1/products/get/count")
expect(res.statusCode).toBe(200);
expect(res.body).toBeDefined();
})
});
describe("Aggregrate Products /GET/FAILURE", ()=>{
it("Get Products /NULL", async()=>{
const res = await request.get("/api/v1/products/get/count")
expect(res.statusCode).toBe(400);
expect(res.body).toEqual({
success: false,
message: "Count error!"
})
})
});
describe("Featured Products /FEATURED/COUNT/SUCCESS", ()=>{
beforeAll(async()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
const product =
{
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: true
}
await Product(product).save();
await Category(categorycreated).save();
});
afterAll(async()=>{
await Category.deleteMany();
await Product.deleteMany();
});
it("Get Featured /GET/ SUCCESS", async()=>{
const res =await request.get("/api/v1/products/get/featured/0");
expect(res.statusCode).toBe(200);
expect(res.body.success).toBeTruthy();
});
});
describe("Featured Products /FEATURED/COUNT/ FAILURE", ()=>{
beforeAll(async()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
const product =
{
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false
}
await Product(product).save();
await Category(categorycreated).save();
});
afterAll(async()=>{
await Category.deleteMany();
await Product.deleteMany();
});
it("Get Featured /GET/ FAILURE", async()=>{
const res =await request.get("/api/v1/products/get/featured/0");
expect(res.body.data).toEqual([]);
});
});
describe("POST Product /POST", ()=>{
beforeEach(async()=>{
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
await Category(categorycreated).save();
});
afterEach(async()=>{
await Category.deleteMany();
await Product.deleteMany();
});
it("Product post - success-statuscode/201", async ()=>{
const file = path.resolve(__dirname, `./file.jpg`);
const res =await request.post("/api/v1/products").send({
_id: "6234a9c563734dda3c74e62b",
name:"phje",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false,
file: file
})
expect(res.statusCode).toBe(201);
});
it("Content Type Json In headers", async()=>{
const file = path.resolve(__dirname, `./file.jpg`);
const res = await request.post("/api/v1/products").send({
_id: "6234a9c563734dda3c74e62b",
name:"phje",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false,
file: file
})
expect(res.headers['content-type']).toEqual(expect.stringContaining("json"));
});
it("Response Body- Valid", async()=>{
const file = path.resolve(__dirname, `./file.jpg`);
const res = await request.post("/api/v1/products").send({
_id: "6234a9c563734dda3c74e62b",
description: "testing product",
name:"phje",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false,
file: file
})
console.log(file);
expect(res.body).toBeTruthy();
expect(res.body._id).toBeDefined();
});
it('Input field missing/invalid - failure 500', async()=>{
const file = path.resolve(__dirname, `./file.jpg`);
const res = await request.post("/api/v1/products").send({
_id: "6234a9c563734dda3c74e62b",
name:"phje",
description: "",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false,
file: file
})
expect(res.statusCode).toBe(500);
});
});
describe("Product Update /PUT", ()=>{
beforeEach(async()=>{
const product = {
_id: "6234a9c563734dda3c74e62b",
name:"phel",
description: "testing product",
richDescription: "maybe working perfect",
image: "img/98-a-63es",
images:[
],
brand: "apple",
price: 96352,
category: "628cd6d47eb286cf58a2e363",
countInStock : 96,
rating: 4,
numReviews: 65436,
isFeatured: false
}
const categorycreated = {
_id: "628cd6d47eb286cf58a2e363",
name:"phone",
icon:"dev-9665",
color:"blue",
}
await Category(categorycreated).save();
await Product(product).save();
});
afterEach(async()=>{
await Category.deleteMany();
await Product.deleteMany();
});
it("Product update - Failure - Not updated", async()=>{
const res = await request.put("/api/v1/products/6234a9c563714dda3c74e62b").send({
name: "updated",
description:"updated one"
});
expect(res.statusCode).toBe(400);
expect(res.body).toEqual({
status: false,
message: "Product doesnt exist"
})
})
it("Product update - Success - Successfully updated", async()=>{
const res = await request.put("/api/v1/products/6234a9c563734dda3c74e62b").send({
name: "updated",
description:"updated one"
});
expect(res.statusCode).toBe(200);
expect(res.body.success).toBeTruthy();
expect(res.body.message).toEqual("Successfully updated")
})
})
|
@testset "get_n_words!" begin
line = "#=GF AC PF00571"
@test get_n_words(line, 1) == String[line]
@test get_n_words(line, 2) == String["#=GF", "AC PF00571"]
@test get_n_words(line, 3) == String["#=GF", "AC", "PF00571"]
@test get_n_words(line, 4) == String["#=GF", "AC", "PF00571"]
@test get_n_words("\n",1) == String["\n"]
@test get_n_words("#", 1) == String["#"]
# ASCII
str = "#=GR O31698/18-71 SS CCCHHHHHHHHHHHHHHHEEEEEEEEEEEEEEEEHHH"
@test get_n_words(str, 3) ==
String["#=GR", "O31698/18-71", "SS CCCHHHHHHHHHHHHHHHEEEEEEEEEEEEEEEEHHH"]
# UTF-8
str = "#=GF CC (Römling U. and Galperin M.Y. “Bacterial cellulose"
@test get_n_words(str, 3) ==
String["#=GF", "CC", "(Römling U. and Galperin M.Y. “Bacterial cellulose"]
str = "#=GF CC not present in all SecA2–SecY2 systems. This family of Asp5 is"
@test get_n_words(str, 3) ==
String["#=GF", "CC", "not present in all SecA2–SecY2 systems. This family of Asp5 is"]
end
@testset "hascoordinates" begin
@test hascoordinates("O83071/192-246")
@test !hascoordinates("O83071")
end
@testset "select_element" begin
@test select_element([1]) == 1
@test_throws ErrorException select_element([])
end
@testset "Matrices to and from lists" begin
@testset "matrix2list" begin
mat = [ 1 2 3
4 5 6
7 8 9 ]
@test matrix2list(mat) == [2, 3, 6]
@test matrix2list(mat, diagonal=true) == [1, 2, 3, 5, 6, 9]
@test matrix2list(mat, part="lower") == [4, 7, 8]
@test matrix2list(mat, part="lower", diagonal=true) == [1, 4, 7, 5, 8, 9]
end
@testset "list2matrix" begin
mat = [ 1 2 3
2 5 6
3 6 9 ]
@test triu(list2matrix([2, 3, 6], 3), 1) == triu(mat, 1)
@test list2matrix([1, 2, 3, 5, 6, 9], 3, diagonal=true) == mat
end
end
@testset "General IO" begin
@testset "lineiterator" begin
# Julia 0.6: eachline return lines without line endings by default
ppap = "pen\npineapple\napple\npen\n"
@test collect(lineiterator(ppap)) == collect(eachline(IOBuffer(ppap)))
@test collect(lineiterator("Hola")) == ["Hola"]
@test collect(lineiterator("Hola\n")) == ["Hola"]
@test collect(lineiterator("\n")) == [""]
@test collect(lineiterator("Hola\nMundo")) == ["Hola", "Mundo"]
@test collect(lineiterator("Hola\nMundo\n")) == ["Hola", "Mundo"]
@test collect(lineiterator("Hola\nMundo\n\n")) == ["Hola", "Mundo", ""]
end
@testset "File checking" begin
file_path = joinpath(DATA, "simple.fasta")
@test isnotemptyfile(file_path)
@test !isnotemptyfile(joinpath(DATA, "emptyfile"))
@test check_file(file_path) == file_path
@test_throws ErrorException check_file("nonexistentfile")
end
@testset "Download file" begin
try
@test ".tmp" == download_file("http://www.uniprot.org/uniprot/P69905.fasta",
".tmp",
headers = Dict("User-Agent" => "Mozilla/5.0 (compatible; MSIE 7.01; Windows NT 5.0)"),
redirect =true)
finally
if isfile(".tmp")
rm(".tmp")
end
end
end
end
|
module Pickle.Types where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Either
import Control.Monad.Trans.Reader
import Data.Bifunctor
import Data.List
import Data.Maybe
import Data.Either
import System.Directory
import System.FilePath.Posix
import Text.Pandoc
import Text.Pandoc.Error
import qualified Data.Text.Lazy as T
import qualified Pickle.Config as Config
--------------------------------------------------------------------------------
-- * Pickle monad
type Pickle = ReaderT Config.SiteConfig (EitherT PickleError IO)
runPickle :: Config.SiteConfig -> Pickle a -> IO a
runPickle cfg = fmap (either (error . show) id)
. runEitherT
. flip runReaderT cfg
pickleAssertIO :: (a -> IO Bool) -> a -> Pickle ()
pickleAssertIO t a = lift $ do
p <- liftIO $ t a
if p then right () else left FileNotFound
pickleAssert :: (a -> Bool) -> a -> Pickle ()
pickleAssert t a = lift $ if t a then right () else left FileNotFound -- FIXME
--------------------------------------------------------------------------------
-- * Errors
data PickleError
= UnknownError
| FileNotFound
| UnknownPropertyError
| PicklePandocError PandocError
deriving (Show)
instance Monoid PickleError where
mempty = UnknownError
a `mappend` _ = a
--------------------------------------------------------------------------------
-- * Posts
-- | Structure for posts
data Post = Post
{ postContent :: Pandoc
-- ^ the pandoc
, postName :: FilePath
-- ^ source file name
, postOutName :: Maybe FilePath
-- ^ out file name, if different
, postSrcBundle :: Maybe (FilePath, [FilePath])
-- ^ if it is in a folder, save the folder name for copying assets
-- TODO: maybe use the 'MediaBag' from Pandoc?
, postDate :: String
-- , postMeta :: PostMeta
} deriving Show
-- | Metadata for posts
data PostMeta = PostMeta
{ postMetaFilename :: FilePath
-- ^ full location in output
, postMetaDstPath :: FilePath
-- ^ calculated from metadata (relative to generated site)
} deriving Show
|
/*
Copyright 1995-2017 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, contact:
Environmental Systems Research Institute, Inc.
Attn: Contracts Dept
380 New York Street
Redlands, California, USA 92373
email: [email protected]
*/
package com.esri.core.geometry;
import junit.framework.TestCase;
import org.junit.Test;
public class TestGeneralize extends TestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
}
@Test
public static void test1() {
OperatorFactoryLocal engine = OperatorFactoryLocal.getInstance();
OperatorGeneralize op = (OperatorGeneralize) engine
.getOperator(Operator.Type.Generalize);
Polygon poly = new Polygon();
poly.startPath(0, 0);
poly.lineTo(1, 1);
poly.lineTo(2, 0);
poly.lineTo(3, 2);
poly.lineTo(4, 1);
poly.lineTo(5, 0);
poly.lineTo(5, 10);
poly.lineTo(0, 10);
Geometry geom = op.execute(poly, 2, true, null);
Polygon p = (Polygon) geom;
Point2D[] points = p.getCoordinates2D();
assertTrue(points.length == 4);
assertTrue(points[0].x == 0 && points[0].y == 0);
assertTrue(points[1].x == 5 && points[1].y == 0);
assertTrue(points[2].x == 5 && points[2].y == 10);
assertTrue(points[3].x == 0 && points[3].y == 10);
Geometry geom1 = op.execute(geom, 5, false, null);
p = (Polygon) geom1;
points = p.getCoordinates2D();
assertTrue(points.length == 3);
assertTrue(points[0].x == 0 && points[0].y == 0);
assertTrue(points[1].x == 5 && points[1].y == 10);
assertTrue(points[2].x == 5 && points[2].y == 10);
geom1 = op.execute(geom, 5, true, null);
p = (Polygon) geom1;
points = p.getCoordinates2D();
assertTrue(points.length == 0);
}
@Test
public static void test2() {
OperatorFactoryLocal engine = OperatorFactoryLocal.getInstance();
OperatorGeneralize op = (OperatorGeneralize) engine
.getOperator(Operator.Type.Generalize);
Polyline polyline = new Polyline();
polyline.startPath(0, 0);
polyline.lineTo(1, 1);
polyline.lineTo(2, 0);
polyline.lineTo(3, 2);
polyline.lineTo(4, 1);
polyline.lineTo(5, 0);
polyline.lineTo(5, 10);
polyline.lineTo(0, 10);
Geometry geom = op.execute(polyline, 2, true, null);
Polyline p = (Polyline) geom;
Point2D[] points = p.getCoordinates2D();
assertTrue(points.length == 4);
assertTrue(points[0].x == 0 && points[0].y == 0);
assertTrue(points[1].x == 5 && points[1].y == 0);
assertTrue(points[2].x == 5 && points[2].y == 10);
assertTrue(points[3].x == 0 && points[3].y == 10);
Geometry geom1 = op.execute(geom, 5, false, null);
p = (Polyline) geom1;
points = p.getCoordinates2D();
assertTrue(points.length == 2);
assertTrue(points[0].x == 0 && points[0].y == 0);
assertTrue(points[1].x == 0 && points[1].y == 10);
geom1 = op.execute(geom, 5, true, null);
p = (Polyline) geom1;
points = p.getCoordinates2D();
assertTrue(points.length == 2);
assertTrue(points[0].x == 0 && points[0].y == 0);
assertTrue(points[1].x == 0 && points[1].y == 10);
}
@Test
public static void testLargeDeviation() {
{
Polygon input_polygon = new Polygon();
input_polygon
.addEnvelope(Envelope2D.construct(0, 0, 20, 10), false);
Geometry densified_geom = OperatorDensifyByLength.local().execute(
input_polygon, 1, null);
Geometry geom = OperatorGeneralize.local().execute(densified_geom,
1, true, null);
int pc = ((MultiPath) geom).getPointCount();
assertTrue(pc == 4);
Geometry large_dev1 = OperatorGeneralize.local().execute(
densified_geom, 40, true, null);
int pc1 = ((MultiPath) large_dev1).getPointCount();
assertTrue(pc1 == 0);
Geometry large_dev2 = OperatorGeneralize.local().execute(
densified_geom, 40, false, null);
int pc2 = ((MultiPath) large_dev2).getPointCount();
assertTrue(pc2 == 3);
}
}
}
|
// Copyright 2019 Ross Light
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: Apache-2.0
package graphql_test
import (
"context"
"fmt"
"log"
"zombiezen.com/go/graphql-server/graphql"
)
// Query is the GraphQL object read from the server.
type Query struct {
// GenericGreeting is a no-arguments field that is read directly.
GenericGreeting string
}
// Greet is a field that takes arguments.
func (q *Query) Greet(args *GreetArgs) (string, error) {
message := fmt.Sprintf("Hello, %s!", args.Subject)
return message, nil
}
// GreetArgs are arguments passed to the Query.greet field. The arguments are
// validated through GraphQL's type system and converted into this struct before
// the Greet method is called.
type GreetArgs struct {
Subject string
}
func Example() {
// Parse the GraphQL schema to establish type information. The schema
// is usually a string constant in your Go server or loaded from your
// server's filesystem.
schema, err := graphql.ParseSchema(`
type Query {
genericGreeting: String!
greet(subject: String!): String!
}
`, nil)
if err != nil {
log.Fatal(err)
}
// A *graphql.Server binds a schema to a Go value. The structure of
// the Go type should reflect the GraphQL query type.
queryObject := &Query{GenericGreeting: "Hiya!"}
server, err := graphql.NewServer(schema, queryObject, nil)
if err != nil {
log.Fatal(err)
}
// Once created, a *graphql.Server can execute requests.
response := server.Execute(context.Background(), graphql.Request{
Query: `
query($subject: String!) {
genericGreeting
greet(subject: $subject)
}
`,
Variables: map[string]graphql.Input{
"subject": graphql.ScalarInput("World"),
},
})
// GraphQL responses can be serialized however you want. Typically,
// you would use JSON, but this example displays the results directly.
if len(response.Errors) > 0 {
log.Fatal(response.Errors)
}
fmt.Println(response.Data.ValueFor("genericGreeting").Scalar())
fmt.Println(response.Data.ValueFor("greet").Scalar())
// Output:
// Hiya!
// Hello, World!
}
|
import React from 'react';
import { Link } from 'react-router-dom';
import styles from './Pagination.module.css'
const Pagination = ({current, total, fetch, currentQuery}) => {
const prevPath = () => {
if (currentQuery) {
return `/gallery?q=${currentQuery}&page=${parseInt(current - 1)}`
} else {
return `/gallery?page=${parseInt(current - 1)}`
}
}
const nextPath = () => {
if (currentQuery) {
return `/gallery?q=${currentQuery}&page=${parseInt(current + 1)}`
} else {
return `/gallery?page=${parseInt(current + 1)}`
}
}
return (
<div className={styles.container}>
{current !== 1 &&
<Link to={prevPath()} className={styles.pageLink} onClick={() => fetch(currentQuery, parseInt(current - 1))}>
Previous
</Link>
}
{current !== total &&
<Link to={nextPath()} className={styles.pageLink} onClick={() => fetch(currentQuery, parseInt(current + 1))}>
Next
</Link>
}
</div>
)
}
export default Pagination |
---
title: Dynamic Progamming
tags: algorithm ds
key: page-dp
cover: /assets/cover/algorithm.png
mathjax: true
mathjax_autoNumber: true
---
## 문제풀이 요령
* 재귀로 여러번 써야되는 것을 memoization 기법으로 계산 수를 줄이는데 효과적이다. 따라서 중복 계산이 많은 문제(sub array)에서 쓰면 좋다.
* 이 유형은 가장 흔한 유형이기 때문에 한 가지 패턴을 정해두고 항상 같은 형태로 구현해버리면 작성도 쉽고 버그 찾는 것도 쉬워지니 자신만의 패턴을 만드는 것이 좋다.
* continuous sub array 문제라면 일단 kadane - 지금까지 축적해온 합을 이용한 알고리즘을 이용해서 풀어본다.
|
<?php
use App\Models\Management\Origin;
use Illuminate\Database\Seeder;
class OriginSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
$origin = new Origin();
$origin->name = 'Cmdo. Btl';
$origin->save();
$origin = new Origin();
$origin->name = 'Subcmdo. Btl';
$origin->save();
$origin = new Origin();
$origin->name = 'E.M. 1º BPM';
$origin->save();
$origin = new Origin();
$origin->name = 'CI';
$origin->save();
$origin = new Origin();
$origin->name = 'ARI';
$origin->save();
$origin = new Origin();
$origin->name = 'ALI';
$origin->save();
$origin = new Origin();
$origin->name = 'MP';
$origin->save();
$origin = new Origin();
$origin->name = 'Judiciário';
$origin->save();
$origin = new Origin();
$origin->name = 'GAECO';
$origin->save();
}
}
|
import axios from 'axios';
import { config } from './config';
/**
* HTTP Client - Axios Instance with predefined base API url
*/
export const httpClient = axios.create({
baseURL: config.bringg.apiUrl,
});
|
module Coroutine
# This module is an acts_as extension that teaches an ActiveRecord model how to provide a reference
# to the instance owned by the current thread through a class method.
#
# The module includes class methods and instance methods that simplify the process of storing the
# current reference in a thread safe manner.
#
# Because acts_as_current relies on the Thread.current hash, it should probably be used sparingly.
#
module ActsAsCurrent
# This module provides the base functionality for the acts_as_current extension. It declares the
# class method acts_as_current and handles including all necessary sub modules when the class
# method is invoked.
#
module Base
def self.included(klass) #:nodoc:
klass.class_eval do
# This class method extends an ActiveRecord class with behavior appropriate for providing a
# current instance to the request.
#
# Including this method in a model definition adds two public class methods and two public
# instance methods to the model. See modules below for method defintions. Here's a simple
# skeleton that demonstrates the resulting interface.
#
#
# class MyClass < ActiveRecord::Base
#
# def self.current
# end
#
# def self.current=(instance)
# end
#
# def current?
# end
#
# def current!
# end
#
# end
#
def self.acts_as_current
# mixin methods
extend Coroutine::ActsAsCurrent::ClassMethods
include Coroutine::ActsAsCurrent::InstanceMethods
end
end
end
end
end
end |
package log
import "testing"
func TestLogInfo(t *testing.T) {
Info("hello info")
}
func TestLogDebug(t *testing.T) {
Debug("hello debug")
}
func TestLogWaring(t *testing.T) {
Waring("hello Waring")
}
func TestLogError(t *testing.T) {
Error("hello", "Error")
}
|
(function(root) {
// Method declarations
var _u = {
/**
* Conditionally throw an error.
*/
throwIf: function(condition, message) {
message = message === undefined ? 'Error' : message;
if (condition) {
throw new Error(message);
}
}
};
// Aliases
_u.raiseIf = _u.throwIf;
// Exporting
// CommonJS export
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports)
module.exports = _u;
exports._u = _u;
}
// AMD export
if (typeof define === 'function' && define.amd) {
define('underscore.utils', [], function(){ return _u; });
}
root._ = root._ || {};
root._.utils = root._.u = _u;
}(this));
|
{-# Language MagicHash #-}
module ADPfusion.PointL.Core where
import GHC.Generics (Generic, Generic1)
import Control.DeepSeq
import Data.Proxy
import Data.Vector.Fusion.Stream.Monadic (singleton,map,filter,Step(..))
import Debug.Trace
import Prelude hiding (map,filter)
import GHC.Exts
import GHC.TypeLits
import Data.PrimitiveArray hiding (map)
import ADPfusion.Core.Classes
import ADPfusion.Core.Multi
-- * Contexts, and running indices.
type instance InitialContext (PointL I) = IStatic 0
type instance InitialContext (PointL O) = OStatic 0
type instance InitialContext (PointL C) = Complement
newtype instance RunningIndex (PointL I) = RiPlI Int
deriving Generic
deriving newtype NFData
data instance RunningIndex (PointL O) = RiPlO !Int !Int
deriving (Generic)
newtype instance RunningIndex (PointL C) = RiPlC Int
deriving (Generic)
-- * Inside
-- ** Single-tape
--
-- TODO should IStatic do these additional control of @I <=# d@? cf. Epsilon Local.
instance
( Monad m
, KnownNat d
)
⇒ MkStream m (IStatic d) S (PointL I) where
mkStream Proxy S grd (LtPointL (I# u)) (PointL (I# i))
= staticCheck# ( grd `andI#` (i >=# 0#) `andI#` (i <=# d) `andI#` (i <=# u) )
. singleton . ElmS $ RiPlI 0
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
instance
( Monad m
, KnownNat d
)
⇒ MkStream m (IVariable d) S (PointL I) where
mkStream Proxy S grd (LtPointL (I# u)) (PointL (I# i))
= staticCheck# (grd `andI#` (i >=# 0#) `andI#` (i <=# u) )
. singleton . ElmS $ RiPlI 0
{-# Inline mkStream #-}
-- ** Multi-tape
instance
( Monad m
, MkStream m ps S is
, KnownNat d
) ⇒ MkStream m (ps:.IStatic d) S (is:.PointL I) where
mkStream Proxy S grd (lus:..LtPointL (I# u)) (is:.PointL (I# i))
= map (\(ElmS e) -> ElmS $ e :.: RiPlI 0)
$ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#) `andI#` (i <=# d) `andI#` (i <=# u)) lus is
-- $ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#)) lus is
-- NOTE we should optimize which parameters are actually required, the gain is about 10% on the
-- NeedlemanWunsch algorithm
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
instance
( Monad m
, MkStream m ps S is
, KnownNat d
) ⇒ MkStream m (ps:.IVariable d) S (is:.PointL I) where
mkStream Proxy S grd (lus:..LtPointL (I# u)) (is:.PointL (I# i))
= map (\(ElmS e) -> ElmS $ e :.: RiPlI 0)
$ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#) `andI#` (i <=# u)) lus is
-- $ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#)) lus is
{-# Inline mkStream #-}
-- * Outside
-- ** Single-tape
instance
( Monad m
, KnownNat d
) ⇒ MkStream m (OStatic d) S (PointL O) where
mkStream Proxy S grd (LtPointL (I# u)) (PointL (I# i))
= staticCheck# (grd `andI#` (i >=# 0#) `andI#` (i +# d ==# u))
-- ??? `andI#` (u ==# i)
. singleton . ElmS $ RiPlO (I# i) (I# (i +# d))
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
instance
( Monad m
, KnownNat d
) ⇒ MkStream m (OFirstLeft d) S (PointL O) where
mkStream Proxy s grd (LtPointL (I# u)) (PointL (I# i))
= staticCheck# (grd `andI#` (i >=# 0#) `andI#` (i +# d <=# u))
. singleton . ElmS $ RiPlO (I# i) (I# (i +# d))
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
-- ** Multi-tape
instance
( Monad m
, MkStream m ps S is
, KnownNat d
) ⇒ MkStream m (ps:.OStatic d) S (is:.PointL O) where
mkStream Proxy S grd (lus:..LtPointL (I# u)) (is:.PointL (I# i))
= map (\(ElmS zi) -> ElmS $ zi :.: RiPlO (I# i) (I# (i +# d)))
-- ??? `andI#` (u ==# i)
$ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#) `andI#` (i +# d ==# u)) lus is
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
instance
( Monad m
, MkStream m ps S is
, KnownNat d
) ⇒ MkStream m (ps:.OFirstLeft d) S (is:.PointL O) where
mkStream Proxy S grd (lus:..LtPointL (I# u)) (is:.PointL (I# i))
= map (\(ElmS zi) -> ElmS $ zi :.: RiPlO (I# i) (I# (i +# d)))
$ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#) `andI#` (i +# d <=# u)) lus is
where (I# d) = fromIntegral $ natVal (Proxy ∷ Proxy d)
{-# Inline mkStream #-}
-- * Complemented
-- ** Single-tape
instance
( Monad m
) ⇒ MkStream m Complement S (PointL C) where
mkStream Proxy S grd (LtPointL (I# u)) (PointL (I# i))
= error "write me" -- staticCheck# (grd `andI#` (i >=# 0#) `andI#` (i <=# u)) . singleton . ElmS $ RiPlC (I# i)
{-# Inline mkStream #-}
-- ** Multi-tape
instance
( Monad m
, MkStream m ps S is
) ⇒ MkStream m (ps:.Complement) S (is:.PointL C) where
mkStream Proxy S grd (lus:..LtPointL (I# u)) (is:.PointL (I# i))
= error "write me"
-- -- = map (\(ElmS zi) → ElmS $ zi :.: RiPlC (I# i))
-- -- $ mkStream (Proxy ∷ Proxy ps) S (grd `andI#` (i >=# 0#) `andI#` (i <=# u)) lus is
{-# Inline mkStream #-}
-- * Table index modification
instance (MinSize minSize) ⇒ TableStaticVar pos minSize u (PointL I) where
-- NOTE this code used to destroy fusion. If we inline tableStreamIndex
-- very late (after 'mkStream', probably) then everything works out.
tableStreamIndex Proxy minSz _upperBound (PointL j) = PointL $ j - minSize minSz
{-# INLINE [0] tableStreamIndex #-}
instance (MinSize minSize) ⇒ TableStaticVar pos minSize u (PointL O) where
tableStreamIndex Proxy minSz _upperBound (PointL j) = PointL $ j - minSize minSz
{-# INLINE [0] tableStreamIndex #-}
instance (MinSize minSize) ⇒ TableStaticVar pos minSize u (PointL C) where
tableStreamIndex Proxy minSz _upperBound (PointL k) = PointL $ k - minSize minSz
{-# INLINE [0] tableStreamIndex #-}
|
#!/usr/bin/env puma
stage = ENV['RACK_ENV']
shared_path = '/home/tryredis/try.redis/shared'
puma_pid = "#{shared_path}/pids/puma.pid"
puma_sock = "unix://#{shared_path}/sockets/puma.sock"
puma_control = "unix://#{shared_path}/sockets/pumactl.sock"
puma_state = "#{shared_path}/sockets/puma.state"
directory '/home/tryredis/try.redis/current'
rackup 'config.ru'
environment stage
daemonize false
pidfile puma_pid
state_path puma_state
threads 2, 4
bind puma_sock
activate_control_app puma_control
|
/*
* Copyright 2021 Peter Kenji Yamanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pyamsoft.tickertape.alert.workmanager.worker
import android.content.Context
import androidx.annotation.CheckResult
import androidx.work.CoroutineWorker
import androidx.work.Data
import androidx.work.WorkerParameters
import com.pyamsoft.tickertape.alert.inject.BaseInjector
import com.pyamsoft.tickertape.alert.params.BaseParameters
import com.pyamsoft.tickertape.alert.runner.WorkResult
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import timber.log.Timber
internal abstract class BaseWorker<P : BaseParameters>
protected constructor(context: Context, params: WorkerParameters) :
CoroutineWorker(context.applicationContext, params) {
final override suspend fun doWork(): Result =
withContext(context = Dispatchers.Default) {
val injector = getInjector(applicationContext)
return@withContext when (val result =
injector.execute(id, tags.toSet(), getParams(inputData))) {
is WorkResult.Success -> {
Timber.d("Work succeeded ${result.id}")
Result.success()
}
is WorkResult.Cancel -> {
Timber.w("Work cancelled: ${result.id}")
// Return success so that the work chain continues, even though the work was cancelled
Result.success()
}
is WorkResult.Failure -> {
Timber.e("Work failed: ${result.id}")
Result.failure()
}
}
}
@CheckResult protected abstract fun getInjector(context: Context): BaseInjector<P>
@CheckResult protected abstract fun getParams(data: Data): P
}
|
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Profile extends Model
{
//Disable mass assigment since we are validating in the controller.
protected $guarded =[];
/**
* Each profile belongs to a user in the users table.
* This means that $myProfile->user is a property.
*/
public function user()
{
return $this->belongsTo(User::class);
}
/**
* This controlls this profile being followed by a class.
*/
public function following()
{
return $this->belongsToMany(User::class);
}
/**
* Returns the profile image if it exists.
* Otherwise, it returns the placeholder image.
*/
public function profileImage()
{
return ($this->image) ? '/storage/' . $this->image : "/images/profile_pic.png";
}
}
|
/* Allow chai assertions which don't end in a function call, e.g. expect(thing).to.be.undefined */
/* tslint:disable:no-unused-expression */
import { expect } from 'chai'
import { Validator } from '@hmcts/class-validator'
import { ExpertEvidence, ValidationErrors } from 'directions-questionnaire/forms/models/expertEvidence'
import { expectValidationError } from 'test/app/forms/models/validationUtils'
import { ValidationErrors as GlobalValidationErrors } from 'forms/validation/validationErrors'
import { YesNoOption } from 'models/yesNoOption'
import { ExceptionalCircumstances } from 'directions-questionnaire/forms/models/exceptionalCircumstances'
describe('ExpertEvidence', () => {
describe('validation', () => {
const validator: Validator = new Validator()
it('should reject when null', () => {
const errors = validator.validateSync(new ExpertEvidence(null, null))
expect(errors).to.not.be.empty
expectValidationError(errors, GlobalValidationErrors.YES_NO_REQUIRED)
})
it('should reject with no expert evidence option', () => {
const errors = validator.validateSync(new ExpertEvidence())
expect(errors).to.not.be.empty
expectValidationError(errors, GlobalValidationErrors.YES_NO_REQUIRED)
})
it('should reject expert evidence with yes option and no whatToExamine', () => {
const errors = validator.validateSync(new ExpertEvidence(YesNoOption.YES))
expect(errors).to.not.be.empty
expectValidationError(errors, ValidationErrors.REASON_REQUIRED)
})
it('should accept expert evidence with option and what to expect reason', () => {
const errors = validator.validateSync(new ExpertEvidence(YesNoOption.YES, 'bank statements'))
expect(errors).to.be.empty
})
})
describe('deserialize', () => {
it('should return an instance initialised with defaults for undefined', () => {
expect(new ExpertEvidence().deserialize(undefined)).to.deep.equal(new ExpertEvidence())
})
it('should deserialize expert evidence to return instance of expert evidence', () => {
const expertEvidence: ExpertEvidence = new ExpertEvidence(YesNoOption.YES, 'bank statements')
expect(expertEvidence.deserialize(expertEvidence)).to.be.instanceOf(ExpertEvidence)
})
})
describe('from object', () => {
it('should return instance of expert evidence when passed ExpertEvidence object - Yes', () => {
const yes: YesNoOption = YesNoOption.YES
const whatToExamine: string = 'bank statements'
expect(ExpertEvidence.fromObject({ yes, whatToExamine })).to.be.instanceOf(ExpertEvidence)
})
it('should return instance of expert evidence when passed ExpertEvidence object - No', () => {
const no: YesNoOption = YesNoOption.NO
expect(ExpertEvidence.fromObject({ no })).to.be.instanceOf(ExpertEvidence)
})
})
describe('isCompleted', () => {
it('should be marked not completed when no option is present', () => {
const expertEvidence: ExpertEvidence = new ExpertEvidence(undefined)
expect(expertEvidence.isCompleted()).to.be.false
})
it('should be marked complete when no option is selected', () => {
const expertEvidence: ExpertEvidence = new ExpertEvidence(YesNoOption.NO)
expect(expertEvidence.isCompleted()).to.be.true
})
it('Should be marked not complete when the yes option is selected and no reason is entered', () => {
const expertEvidence: ExpertEvidence = new ExpertEvidence(YesNoOption.YES)
expect(expertEvidence.isCompleted()).to.be.false
})
it('Should be marked complete when the yes option is selected and what to examine is present', () => {
const exceptionalCircumstances: ExceptionalCircumstances =
new ExceptionalCircumstances(YesNoOption.YES, 'bank statements')
expect(exceptionalCircumstances.isDefendantCompleted()).to.be.true
})
})
})
|
import fs from 'fs';
import zlib from 'zlib';
import readline from 'readline';
import inchiwasm from './lib/inchi-wasm.js';
import inchidylib from './lib/inchi-dylib.js';
let runinchi;
if (process.argv[2] == 'wasm') {
console.log('running wasm');
runinchi = inchiwasm;
} else if (process.argv[2] == 'dylib') {
console.log('running dylib');
runinchi = inchidylib;
} else {
throw Error('arg must be one of \'wasm\' or \'dylib\'');
}
console.log('building records...');
const path = './data/SureChEMBL_20210101_27.sdf.gz';
const reader = readline.createInterface({
input: fs.createReadStream(path).pipe(zlib.createGunzip())
});
const records = [ ];
let molfile = '';
let target = '';
let captureInchi = false;
reader.on('line', line => {
if (captureInchi) {
target = line;
captureInchi = false;
} else if (line === '$$$$') {
records.push({ molfile, target });
molfile = '';
target = '';
} else if (line == '> <InChI>') {
captureInchi = true;
} else if (!molfile.endsWith('M END')) {
molfile += line;
if (line !== 'M END') {
molfile += '\n';
}
}
});
reader.on('close', async () => {
console.log('benchmarking...');
const instance = await runinchi();
let misses = [ ];
let errors = [ ];
let count = 0;
const start = Date.now();
for (const [ i, { molfile, target } ] of records.entries()) {
try {
const inchi = instance.molfileToInChI(molfile);
if (inchi !== target) {
misses.push(i);
}
} catch (e) {
errors.push(i);
}
count += 1;
}
console.log('time', `${Date.now() - start} ms`);
console.log('count:', count);
console.log('misses:', `[${misses.join(', ')}]`);
console.log('errors:', `[${errors.join(', ')}]`);
});
|
package com.nokia.library.nokiainnovativeproject.entities;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import lombok.*;
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
import javax.persistence.*;
import javax.validation.constraints.Email;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import java.io.Serializable;
import java.util.List;
@Entity
@Table(name = "\"User\"")
@Data
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
@EntityListeners(AuditingEntityListener.class)
@JsonIgnoreProperties({"hibernateLazyInitializer", "handler"})
public class User implements Serializable {
@Id
@Setter(AccessLevel.NONE)
@GeneratedValue(strategy = GenerationType.IDENTITY)
@EqualsAndHashCode.Include
private Long id;
@Size(min = 3, max = 30, message = "User's name must be 3-30 characters long")
@NotBlank(message = "User's name can't be null and can't contain whitespace")
private String firstName;
@Size(min = 3, max = 30, message = "User's surname must be 3-30 characters long")
@NotBlank(message = "User's surname can't be null and can't contain whitespace")
private String lastName;
@Email(message = "Email should be valid")
@NotBlank(message = "Email can't be empty")
@Size(max = 255, message = "The maximum size of the email is 255")
private String email;
@Getter(AccessLevel.NONE)
@NotNull(message = "Password can't be null")
@Size(min = 7, max = 255, message = "Password must be 7-255 characters long")
private String password;
@OneToOne(cascade = {
CascadeType.MERGE,
CascadeType.PERSIST},
fetch = FetchType.LAZY)
@JoinColumn(name = "address_id")
private Address address;
@OneToMany(cascade = {
CascadeType.MERGE,
CascadeType.PERSIST,},
fetch = FetchType.LAZY)
@JoinColumn(name = "user_books")
private List<Book> books;
@ManyToMany(fetch = FetchType.LAZY,
cascade = {CascadeType.PERSIST,
CascadeType.MERGE})
@JoinTable(name = "user_roles",
joinColumns = @JoinColumn(name = "user_id"),
inverseJoinColumns = @JoinColumn(name = "roles_id"))
private List<Role> roles;
private Boolean isAccountActive;
} |
import { IStringKeyedCollection } from "./i-string-keyed-collection";
export class StringKeyedCollection<TValue> implements IStringKeyedCollection<TValue> {
private _items: { [index: string]: TValue } = {};
private _count: number = 0;
public containsKey(key: string): boolean {
return this._items.hasOwnProperty(key);
}
public count(): number {
return this._count;
}
public add(key: string, value: TValue): void {
if (!this.containsKey(key)) {
this._count++;
}
this._items[key] = value;
}
public addOrUpdate(
key: string,
value: TValue,
updateFunc: (k: string, v: TValue) => TValue): TValue {
if (this.containsKey(key)) {
this._items[key] = updateFunc(key, this.item(key));
} else {
this.add(key, value);
}
return this.item(key);
}
public remove(key: string): TValue {
let val: TValue = this._items[key];
if (this.containsKey(key)) {
delete this._items[key];
this._count--;
}
return val;
}
public item(key: string): TValue {
return this._items[key];
}
public keys(): string[] {
let keys: string[] = [];
for (let property in this._items) {
if (this._items.hasOwnProperty(property)) {
keys.push(property);
}
}
return keys;
}
public values(): TValue[] {
let values: TValue[] = [];
for (let property in this._items) {
if (this._items.hasOwnProperty(property)) {
values.push(this._items[property]);
}
}
return values;
}
} |
package day6.exercise12;
import java.util.Calendar;
public class Exercise12 {
public static void main(String[] args) {
Calendar cal = Calendar.getInstance();
System.out.println();
System.out.println("\nCurrent Date and Time:" + cal.getTime());
int actualMaxMonth = cal.getActualMaximum(Calendar.MONTH);
int actualMaxWeek = cal.getActualMaximum(Calendar.WEEK_OF_YEAR);
int actualMaxDate = cal.getActualMaximum(Calendar.DATE);
System.out.println("Actual Maximum Year: "+cal.getActualMaximum(Calendar.YEAR));
System.out.println("Actual Maximum Month: "+actualMaxMonth);
System.out.println("Actual Maximum Week of Year: "+actualMaxWeek);
System.out.println("Actual Maximum Date: "+actualMaxDate+"\n");
System.out.println();
}
}
|
package agh.queueFreeShop.exception;
/**
* Used when explicitly throwing 422 exceptions.
*/
public class UnprocessableEntityException extends RuntimeException {
public UnprocessableEntityException(String message) {
super(message);
}
}
|
/*
* Copyright 2020-2021 Dynatrace LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dynatrace.dynahist.demo;
import static org.junit.Assert.assertEquals;
import com.dynatrace.dynahist.Histogram;
import com.dynatrace.dynahist.layout.Layout;
import com.dynatrace.dynahist.layout.LogLinearLayout;
import com.dynatrace.dynahist.layout.LogQuadraticLayout;
import com.dynatrace.dynahist.util.PrintUtil;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import org.junit.Test;
/** Simple example of using DynaHist */
public class HistogramUsage {
/**
* The {@link Layout} defines the bins for a {@link Histogram} and maps a given value to a
* histogram bin index. {@link LogLinearLayout#create(double, double, double, double)} creates a
* {@link Layout} Choose {@link LogLinearLayout}, if speed is more important than memory
* efficiency. {@link LogQuadraticLayout#create(double, double, double, double)} creates a {@link
* Layout} Choose {@link LogQuadraticLayout}, if memory efficiency is more important than speed.
* LogLinearLayout and LogQuadraticLayout guarantee that the bins cover a given interval and that
* the bin widths either satisfy an absolute bin width limit or a relative bin width limit.
*
* <p>{@link Histogram#createDynamic(Layout)} creates a dynamic {@link Histogram} {@link
* Histogram#createStatic(Layout)} creates a static {@link Histogram}.
*/
@Test
public void createHistogram() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
assertEquals(
histogram.getClass().getSimpleName()
+ " [layout="
+ layout
+ ", underFlowCount=0, overFlowCount=0, totalCount=0, min=Infinity, max=-Infinity, counts={}]",
histogram.toString());
}
/** Add values using {@link Histogram#addValue(double)} adds a given value to the histogram. */
@Test
public void addSingleValue() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
histogram.addValue(-5.5);
assertEquals(
PrintUtil.print(histogram),
"-5.50000000000000000E+00 - -5.50000000000000000E+00 : 1\n");
}
/**
* Add values with multiplicity using {@link Histogram#addValue(double, long) } adds a given value
* to the histogram with a given multiplicity.
*/
@Test
public void addValueWithMultiplicity() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
histogram.addValue(-5.5, 5);
assertEquals(
PrintUtil.print(histogram),
"-5.50000000000000000E+00 - -5.50000000000000000E+00 : 5\n");
}
/**
* Get quantile values using {@link Histogram#getQuantile(double)} returns an estimate for the
* quantile value. p = 0.5 returns median.
*/
@Test
public void getMedianSingleValue() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
histogram.addValue(5.5);
double median = histogram.getQuantile(0.5);
assertEquals(5.5, median, Math.max(1e-5, 5.5 * 1e-2));
}
@Test
public void getMedianMultipleValues() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
for (int i = 0; i <= 100; i++) {
histogram.addValue(i, 5);
}
assertEquals(50, histogram.getQuantile(0.5), Math.max(1e-5, 50 * 1e-2));
}
/**
* Merge histograms using {@link Histogram#addHistogram(Histogram)}. If the given histograms have
* a different layout, this operation may lead to an unwanted loss of precision.
*/
@Test
public void mergeHistogram() {
Layout layout1 = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Layout layout2 = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram1 = Histogram.createDynamic(layout1);
Histogram histogram2 = Histogram.createDynamic(layout2);
Histogram histogramTotal = Histogram.createDynamic(layout1);
histogram1.addValue(-55.5);
histogram1.addValue(100);
histogram2.addValue(5);
histogram2.addValue(-7.5);
histogramTotal.addValue(-55.5);
histogramTotal.addValue(100);
histogramTotal.addValue(5);
histogramTotal.addValue(-7.5);
histogram1.addHistogram(histogram2);
assertEquals(histogramTotal, histogram1);
assertEquals(histogramTotal.hashCode(), histogram1.hashCode());
}
/**
* Write the histograms to to a given {@link DataOutput} using {@link
* Histogram#write(DataOutput)}. The {@link Layout} information will not be written. Therefore, it
* is necessary to provide the layout when reading using {@link Histogram#readAsDynamic(Layout,
* DataInput)} or {@link Histogram#readAsStatic(Layout, DataInput)}.
*/
@Test
public void serializeAndDeserializeHistogram() {
Layout layout = LogQuadraticLayout.create(1e-5, 1e-2, -1e6, 1e6);
Histogram histogram = Histogram.createDynamic(layout);
histogram.addValue(-5.5);
// serialization
byte[] serializedHistogram = null;
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream)) {
histogram.write(dataOutputStream);
serializedHistogram = byteArrayOutputStream.toByteArray();
} catch (IOException e) {
e.printStackTrace();
}
// deserialization
Histogram deserializedHistogram = null;
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(serializedHistogram);
DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream)) {
deserializedHistogram = Histogram.readAsDynamic(layout, dataInputStream);
} catch (IOException e) {
e.printStackTrace();
}
assertEquals(histogram, deserializedHistogram);
assertEquals(histogram.hashCode(), deserializedHistogram.hashCode());
}
}
|
# encoding: utf-8
require 'spec_helper'
describe MiyauchiScheduler do
it 'has a version number' do
expect(MiyauchiScheduler::VERSION).not_to be nil
end
it 'does generate a calendar' do
expect(subject.generate_calendar.class).to eq(MiyauchiCalendar)
end
it 'each days should have two workers by default' do
cal = subject.generate_calendar
cal.days.each do |day, workers|
expect(workers.size).to eq(2)
end
end
it 'should be able to set a list of workers with different maximum working days' do
subject.add_worker "Hito 1", 22
subject.add_worker "Hito 2", 28
subject.add_worker "Hito 3", 16
subject.add_worker "Hito 4", 13
expect(subject.workers).to eq(["Hito 1", "Hito 2", "Hito 3", "Hito 4"])
expect(subject.max_days_for("Hito 1")).to eq 22
expect(subject.max_days_for("Hito 2")).to eq 28
expect(subject.max_days_for("Hito 3")).to eq 16
expect(subject.max_days_for("Hito 4")).to eq 13
@work_cal = subject.generate_calendar
expect(subject.workers).to eq(["Hito 1", "Hito 2", "Hito 3", "Hito 4"])
end
context 'when setting workers with different amount of working days' do
100.times do
before do
subject.add_worker "Hito 1", 20
subject.add_worker "Hito 2", 21
subject.add_worker "Hito 3", 15
subject.add_worker "Hito 4", 15
@work_cal = subject.generate_calendar
end
it 'should not schedule them for more than expected (100 times)' do
expect(@work_cal.days_for("Hito 1").size).to be <= 20
expect(@work_cal.days_for("Hito 2").size).to be <= 21
expect(@work_cal.days_for("Hito 3").size).to be <= 15
expect(@work_cal.days_for("Hito 4").size).to be <= 15
subject.print if ENV['DEBUG']
end
it 'each days should contains valid workers' do
# trap no nils
subject.print if ENV['DEBUG']
subject.working_schedule.days.each do |d, worker_names|
#debugger unless (worker_names - subject.workers).empty?
expect(worker_names - subject.workers).to be_empty
end
end
end
end
it 'should be able to return a list of workers' do
expect(subject.workers).to eq(["worker 1", "worker 2", "worker 3", "worker 4"])
end
it 'each day should have two different workers' do
cal = subject.generate_calendar
cal.days.each do |day, workers|
expect(workers.sort.uniq.size).to eq(2)
end
end
100.times do
it 'each worker should have at least 8 days off (by default) (100 times)' do
work_cal = subject.generate_calendar
off_cal = subject.days_off
subject.workers.each do |worker|
expect(off_cal.days_for(worker).size).to be >= 8
end
end
end
it 'each worker should not work twice on the same day' do
work_cal = subject.generate_calendar
subject.workers do |worker|
days = work_cal.days_for(worker)
expect(days.size).to eq(days.sort.uniq.size)
end
end
1.times do
it 'each worker should not work more than 5 days in a row (100 times)' do
# ok these tests should be somewhere else.
expect(Math.sum_up_to(5)).to eq(15)
expect(Math.sum_up_to(4)).to eq(10)
expect(Math.sum_up_to(3)).to eq(6)
max_days = 5
cal = subject.generate_calendar
subject.workers.each do |worker|
days = cal.days_for(worker)
(days.size - max_days).times do |i|
# x0 .. x4 + 1,2,3,4 = sum of continuous numbers.
continuous_sum = days[i] * max_days + Math.sum_up_to(max_days-1)
actual_sum = days[i..(i+max_days)].inject(0) { |t, x| t += x }
# if they are sorted and there is no duplicates, it can't possibly be the same for a non-continuous series.
expect(actual_sum).not_to eq continuous_sum
end
end
end
end
100.times do
it 'each worker should not work more than (31 - 8) days (100 times)' do
work_cal = subject.generate_calendar
subject.workers.each do |worker|
days = work_cal.days_for(worker)
expect(days.size).to be <= (31-8)
end
end
end
it 'should be able to print the schedule' do
work_cal = subject.generate_calendar
expect{subject.print}.to output.to_stdout
end
it 'should support japanese' do
o = MiyauchiScheduler.new
o.set_workers ["宮内一美", "宮内琉嘉", "宮内美保子", "Mathieu Jobin"]
o.generate_calendar
o.print if ENV['DEBUG']
end
it 'should be able to find the first day of the month properly' do
o = MiyauchiScheduler.new(current_year: 1969, current_month: 1)
expect(o.send(:first_day_of_month)).to eq 3
o = MiyauchiScheduler.new(current_year: 1969, current_month: 2)
expect(o.send(:first_day_of_month)).to eq 6
o = MiyauchiScheduler.new(current_year: 1969, current_month: 3)
expect(o.send(:first_day_of_month)).to eq 6
o = MiyauchiScheduler.new(current_year: 1969, current_month: 4)
expect(o.send(:first_day_of_month)).to eq 2
o = MiyauchiScheduler.new(current_year: 1969, current_month: 5)
expect(o.send(:first_day_of_month)).to eq 4
o = MiyauchiScheduler.new(current_year: 1969, current_month: 6)
expect(o.send(:first_day_of_month)).to eq 0
o = MiyauchiScheduler.new(current_year: 1969, current_month: 7)
expect(o.send(:first_day_of_month)).to eq 2
end
it 'should be able to find how many days there is properly' do
o = MiyauchiScheduler.new(current_year: 1969, current_month: 1)
expect(o.send(:days)).to eq 31
o = MiyauchiScheduler.new(current_year: 1969, current_month: 3)
expect(o.send(:days)).to eq 31
o = MiyauchiScheduler.new(current_year: 1969, current_month: 4)
expect(o.send(:days)).to eq 30
o = MiyauchiScheduler.new(current_year: 1969, current_month: 5)
expect(o.send(:days)).to eq 31
o = MiyauchiScheduler.new(current_year: 1969, current_month: 6)
expect(o.send(:days)).to eq 30
o = MiyauchiScheduler.new(current_year: 1969, current_month: 7)
expect(o.send(:days)).to eq 31
o = MiyauchiScheduler.new(current_year: 1969, current_month: 8)
expect(o.send(:days)).to eq 31
end
it 'should be able to render it for any month' do
o = MiyauchiScheduler.new(current_year: 1969, current_month: 7)
o.set_workers ["宮内一美", "宮内琉嘉", "宮内美保子", "Mathieu Jobin"]
o.generate_calendar
expect(o.send(:days)).to eq 31
expect(o.send(:first_day_of_month)).to eq 2
o.print if ENV['DEBUG']
end
end
|
# Elasticsearch
## Backend
Using the `Elasticsearch` backend class, you can query any metrics available in
Elasticsearch to create an SLO.
The following methods are available to compute SLOs with the `Elasticsearch`
backend:
* `good_bad_ratio` for computing good / bad metrics ratios.
### Good / bad ratio
The `good_bad_ratio` method is used to compute the ratio between two metrics:
- **Good events**, i.e events we consider as 'good' from the user perspective.
- **Bad or valid events**, i.e events we consider either as 'bad' from the user
perspective, or all events we consider as 'valid' for the computation of the
SLO.
This method is often used for availability SLOs, but can be used for other
purposes as well (see examples).
**Config example:**
```yaml
backend:
class: Elasticsearch
url: http://localhost:9200
method: good_bad_ratio
measurement:
index: test_data
date_field: last_updated
query_good: {}
query_bad:
must:
term:
name: JAgOZE8
```
Optional fields:
* `date_field`: Alternative field to filter time on. Has to be an ELK `date`
field. Defaults to `@timestamp` which is the Logstash-generated one.
**→ [Full SLO config](../samples/elasticsearch/slo_elk_test_ratio.yaml)**
You can also use the `filter_bad` field which identifies bad events instead of
the `filter_valid` field which identifies all valid events.
The Lucene query entered in either the `query_good`, `query_bad` or
`query_valid` fields will be combined (using the `bool` operator) into a larger
query that filters results on the `window` specified in your Error Budget Policy
steps.
You can specify a different field to filter error budget policy windows on,
using the `date_field` field.
The full `ElasticSearch` query body for the `query_bad` above will therefore
look like:
```json
{
"query": {
"bool": {
"must": {
"term": {
"name": "JAgOZE8"
}
},
"filter": {
"range": {
"@timestamp": {
"gte": "now-<window>s/s",
"lt": "now/s"
}
}
}
}
},
"track_total_hits": true
}
```
### Examples
Complete SLO samples using the `Elasticsearch` backend are available in
[samples/elasticsearch](../samples/elasticsearch). Check them out !
|
package com.manday.management.data.dao
import androidx.lifecycle.LiveData
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.manday.management.data.entities.TaskEntity
@Dao
interface TaskDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
fun insert(tasks: List<TaskEntity>)
@Query("SELECT * FROM tasks ")
fun getAllTasks(): LiveData<List<TaskEntity>>
@Insert(onConflict = OnConflictStrategy.REPLACE)
fun addTask(task: TaskEntity): Long?
/*
@Query("SELECT * FROM employees WHERE name = :mName AND pass = :mPass")
fun getEmployee(mName: String, mPass: String): LiveData<Employee>
@Query("UPDATE employees SET listTask=:listNewTask WHERE id = :idEmployee")
fun assignTaskToEmploye(idEmployee: Int, listNewTask: List<Int>)
@Update
suspend fun updateUsers(employee: Employee)
@Delete
suspend fun deleteUsers(employee: Employee)
*/
} |
import React from "react";
import Nav from "../Navigation";
function Header(props) {
// const [categories] = useState([
// { name: "About me" },
// { name: "Porfolio" },
// {
// name: "Contact",
// },
// {
// name: "Resume",
// },
// ]);
// const [currentCategory, setCurrentCategory] = useState(categories[0]);
return (
<header>
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"></meta>
<h2>
<a href="/portfolio/" style={{ fontSize: "50px", margin: "20px" }}>
Max Aeon Parks
</a>
</h2>
<Nav categories= {props.categories}
page={props.page}
setPage={props.setPage}/>
</header>
);
}
export default Header;
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FlockChildForcedWaypoint : FlockChild
{
public Vector3 WayPoint { get; set; }
public Vector3 SpawnPoint { get; set; }
override public Vector3 findWaypoint()
{
return WayPoint; // just use the one set earlier
}
override public void Start()
{
base.Start();
_thisT.position = SpawnPoint;
}
}
|
#ifndef ALIA_UI_BACKENDS_WX_HPP
#define ALIA_UI_BACKENDS_WX_HPP
#include <alia/ui/api.hpp>
#include <alia/ui/backends/interface.hpp>
#include <wx/wx.h>
#include <wx/glcanvas.h>
namespace alia {
struct style_tree;
// wx_opengl_window is a wxGLCanvas with an associated alia UI.
// It takes care of dispatching events received by the canvas to the UI.
struct wx_opengl_window : public wxGLCanvas
{
public:
wx_opengl_window(
alia__shared_ptr<ui_controller> const& controller,
alia__shared_ptr<style_tree> const& alia_style,
wxWindow* parent,
wxWindowID id = wxID_ANY,
int const* attrib_list = 0,
wxPoint const& pos = wxDefaultPosition,
wxSize const& size = wxDefaultSize,
long style = 0,
wxString const& name = "alia_wx_gl_window",
wxPalette const& palette = wxNullPalette);
~wx_opengl_window();
alia::ui_system& ui();
void update();
void on_paint(wxPaintEvent& event);
void on_erase_background(wxEraseEvent& event);
void on_size(wxSizeEvent& event);
void on_mouse(wxMouseEvent& event);
void on_set_focus(wxFocusEvent& event);
void on_kill_focus(wxFocusEvent& event);
void on_idle(wxIdleEvent& event);
void on_key_down(wxKeyEvent& event);
void on_key_up(wxKeyEvent& event);
void on_char(wxKeyEvent& event);
void on_menu(wxCommandEvent& event);
void on_sys_color_change(wxSysColourChangedEvent& event);
struct impl_data;
private:
impl_data* impl_;
DECLARE_EVENT_TABLE()
};
// wx_frame is a variant of wxFrame that implements the app_window interface.
struct wx_frame : public wxFrame, app_window
{
wx_frame(
alia__shared_ptr<app_window_controller> const& controller,
wxWindow* parent,
wxWindowID id,
wxString const& title,
wxPoint const& pos = wxDefaultPosition,
wxSize const& size = wxDefaultSize,
long style = wxDEFAULT_FRAME_STYLE,
wxString const& name = "alia_wx_frame");
~wx_frame();
app_window_state state() const;
bool is_full_screen() const;
void set_full_screen(bool fs);
void close();
void on_menu(wxCommandEvent& event);
void on_move(wxMoveEvent& event);
void on_size(wxSizeEvent& event);
void on_close(wxCloseEvent& event);
void update_menu_bar(wxWindow* controller, menu_container const& menu_bar);
struct impl_data;
private:
impl_data* impl_;
DECLARE_EVENT_TABLE()
};
// Create a wx_frame with a wx_opengl_window inside, filling the frame.
// If don't need the flexibility to create wxWidgets UI elements yourself,
// then this is the simplest way to get alia working with wxWdigets.
wx_frame*
create_wx_framed_window(
string const& title,
alia__shared_ptr<app_window_controller> const& controller,
alia__shared_ptr<style_tree> const& style,
app_window_state const& initial_state,
int const* gl_canvas_attribs);
}
#endif
|
Live site: https://paul-kh.github.io/nature-tour-package_css-sass/
A website template about nature tour packages.
# Technologies Used:
- HTML5
- CSS3
- SASS/SCSS
- npm (node-sass)
|
#!/bin/bash
export DURATION=${PERF_DURATION:-150}
export TARGET_URL=${PERF_TARGET_URL:-/solr/collection1}
export COLLECTION=${PERF_COLLECTION:-collection1}
export SERVER=${PERF_SERVER:-localhost}
export PORT=${PERF_PORT:-9983}
|
import * as childProcess from 'child_process';
import * as https from 'https';
import { EOL } from 'os';
import * as Generator from 'yeoman-generator';
export function createState() {
return new Proxy<any>(
{},
{
get(_, key) {
try {
const str = process.env.GENERATOR_STATE || '{}';
return JSON.parse(str)[key];
} catch {
return undefined;
}
},
set(_, key, value) {
let serializedValue: any;
switch (typeof value) {
case 'boolean':
case 'number':
case 'string':
case 'undefined':
serializedValue = value;
break;
case 'object':
serializedValue = JSON.stringify(value);
break;
default:
throw new Error(`Cannot serialize type ${typeof value}`);
}
const existing = JSON.parse(process.env.GENERATOR_STATE || '{}');
process.env.GENERATOR_STATE = JSON.stringify({
...existing,
[key]: serializedValue,
});
return true;
},
},
);
}
export async function getJson<T = any>(url: string): Promise<T> {
return JSON.parse(await getText(url));
}
export function getText(url: string): Promise<string> {
return new Promise((resolve, reject) => {
https
.get(url, (res) => {
let data: string = '';
res.on('data', (d) => {
data += d.toString();
});
res.on('end', () => {
resolve(data);
});
})
.on('error', (e) => {
reject(e);
});
});
}
export async function exec(
command: string,
options?: childProcess.ExecOptions,
): Promise<{ stdout: string; stderr: string }> {
return new Promise((resolve, reject) => {
const result = {
stdout: '',
stderr: '',
};
const proc = childProcess.exec(
command,
options || {},
(err, stdout, stderr) => {
result.stderr += stderr;
result.stdout += stdout;
},
);
proc.on('close', () => resolve(result));
proc.on('disconnect', () => resolve(result));
proc.on('exit', () => resolve(result));
});
}
export function append(
fs: Generator.MemFsEditor,
filepath: string,
contents: string,
separator: string = EOL,
): void {
const original = fs.exists(filepath) ? fs.read(filepath) : '';
fs.write(filepath, [original, contents].filter((x) => x).join(separator));
}
export function filterDev(pkg: any, dependencies: string[]): string[] {
if (!pkg?.dependencies) return dependencies;
const dependencySet = new Set(dependencies);
for (const existing of Object.keys(pkg.dependencies)) {
dependencySet.delete(existing);
}
return Array.from(dependencySet);
}
export function ignore(
fs: Generator.MemFsEditor,
filepath: string,
content: string,
): void {
const original = fs.exists(filepath) ? fs.read(filepath) : '';
const lines = (content || '')
.split(EOL)
.map((line) => line.split('#')[0].trim())
.filter((x) => x);
const newLines = new Set<string>();
for (const line of lines) {
if (!ignores(original, line)) {
newLines.add(line);
}
}
newLines.delete('');
newLines.add('');
append(fs, filepath, Array.from(newLines).join(EOL));
}
export function ignores(ignoreFile: string, item: string): boolean {
return ignoreFile
.split(EOL)
.map((line) => line.split('#')[0].trim())
.some((x) => x === item);
}
export function mergeArray(
original: string[] | undefined,
incomming: string[] | undefined,
): string[] {
if (!original?.length) return incomming || [];
if (!incomming?.length) return original || [];
const incommingSet = new Set(incomming);
for (const originalValue of original) {
incommingSet.delete(originalValue);
}
return incommingSet.size
? [...original, ...Array.from(incommingSet)]
: original;
}
|
package io.netty.learn.netty.demo;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.learn.netty.demo.handler.ClientHandler;
import io.netty.learn.netty.demo.handler.ClientHandlerWithException;
/**
* create by Jazzylol at 2017/9/10
* <p>
* Description:
*/
public class SimpleNettyClientWithException {
public static void main(String[] args) throws InterruptedException {
init();
}
private static void init() throws InterruptedException {
Bootstrap bootstrap = new Bootstrap();
EventLoopGroup group = new NioEventLoopGroup();
try {
bootstrap.channel(NioSocketChannel.class);
bootstrap.option(ChannelOption.SO_BACKLOG, 128);
bootstrap.group(group);
bootstrap.handler(new ChannelInitializer<NioSocketChannel>() {
protected void initChannel(NioSocketChannel ch) throws Exception {
ch.pipeline().addLast(new StringDecoder());
ch.pipeline().addLast(new ClientHandlerWithException());
ch.pipeline().addLast(new StringEncoder());
}
});
ChannelFuture future = bootstrap.connect("localhost", 8080).sync();
String person = "张三";
future.channel().writeAndFlush(person);
// ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer();
// buf.writeBytes(person.getBytes(Charset.defaultCharset()));
// future.channel().writeAndFlush(buf);
// future.channel().closeFuture();
} catch (InterruptedException e) {
e.printStackTrace();
}finally {
group.shutdownGracefully();
}
}
}
|
#include <stdio.h>
#include <QHBoxLayout>
#include <QLineEdit>
#include <QVBoxLayout>
#include <igvc_rviz_plugins_old/time_panel.h>
namespace rviz_plugins
{
void TimePanel::timeCallback(const std_msgs::UInt8& msg)
{
char buf[80];
struct tm tstruct;
time_t diff = (time(0) - start);
tstruct = *localtime(&diff);
strftime(buf, sizeof(buf), "%M:%S", &tstruct);
output_topic_editor_->setText(buf);
}
TimePanel::TimePanel(QWidget* parent) : rviz::Panel(parent)
{
start = time(0);
QHBoxLayout* topic_layout = new QHBoxLayout;
topic_layout->addWidget(new QLabel("Uptime:"));
output_topic_editor_ = new QLabel("TEST");
topic_layout->addWidget(output_topic_editor_);
QVBoxLayout* layout = new QVBoxLayout;
layout->addLayout(topic_layout);
setLayout(layout);
output_topic_editor_->setText("No Signal");
sub = nh_.subscribe("/battery", 1, &TimePanel::timeCallback, this);
// connect( this, SIGNAL( changeText() ), output_topic_editor_, SLOT( setTextLabel() ));
}
} // namespace rviz_plugins
#include <pluginlib/class_list_macros.h>
PLUGINLIB_EXPORT_CLASS(rviz_plugins::TimePanel, rviz::Panel)
|
package co.wangming.dragonfly.agent.plugin.jdbc.mysql.v8;
import co.wangming.dragonfly.agent.transform.transformer.Transform;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
import static net.bytebuddy.matcher.ElementMatchers.named;
@Transform
public class MysqlCallableStatementTrace extends MysqlV8TraceTransformer {
@Override
public ElementMatcher.Junction<TypeDescription> typeConstraints() {
return named(packageName() + "CallableStatement");
}
@Override
public ElementMatcher.Junction<MethodDescription> methodConstraints() {
return named("execute")
.or(named("executeQuery"))
.or(named("executeUpdate"));
}
}
|
#ifndef BBMACRO_VECTOR_H_
#define BBMACRO_VECTOR_H_
#ifndef NOINCLUDE
#define NOINCLUDE
#include <string.h>
#include <bbmacro/static.h>
#undef NOINCLUDE
#endif
/*
* Vector data structure.
* 1. A type-creating macro has a prefix:
* `B` (Backward) -- push elements into the back end;
* `F` (Forward) -- push elements into the front end;
* `Q` (Queue) -- push elements into the both ends.
* 2. Core procedures provide manual memory management only.
* 3. Structure members are a read-only part of the interface.
* You are to access the members, but do not modify them directly.
* 4. Note the vector itself is always the last argument of a procedure.
*/
/*
* How to call the macro?
* 1. Usually you need a specific type for just one source file:
* BVECTOR_CORE(myvector, int);
*
* 2. Often you need to separate interface and implementation:
* ... in a header file ...
* BVECTOR_CORE_INTERFACE(myvector, int, extern);
* ... in a source file ...
* BVECTOR_CORE_IMPLEMENTATION(myvector, int);
*
* 3. Sometimes you need recursive data structures:
* ... in a header file ...
* struct item;
* BVECTOR_STRUCT(myvector, struct item);
* struct item { struct myvector v; ... };
* BVECTOR_CORE_DECLARATIONS(myvector, struct item, extern);
* ... in a source file ...
* BVECTOR_CORE_IMPLEMENTATION(myvector, struct item);
*
* 4. For BVECTOR (with automatic memory management) everything is the same,
* except for the missing `_CORE` suffix and for the additional arguments
* necessary to implement memory management. You can find them in the file
* `memory.h`.
*/
/*
* How to use the vector?
* Consider we have used `BVECTOR_CORE_INTERFACE(myvector, int, extern);`.
* The macro expansion will be the following:
* struct myvector {
* size_t len, cap;
* int *at;
* };
* extern void myvector_init(int *array, size_t cap, struct myvector *bv);
* extern int *myvector_fini(struct myvector *bv);
* ...
* The detailed documentation is in progress.
*/
/****************************************
* Backward Vector interface.
****************************************/
/* Backward Vector structure. */
#define BVECTOR_STRUCT(BV, ELEMENT) \
\
struct BV { \
size_t len, cap; /* Length and capacity. */ \
ELEMENT *at; /* Beginning of memory and data. */ \
}
/* Backward Vector core procedures. */
#define BVECTOR_CORE_DECLARATIONS(BV, ELEMENT, PREFIX) \
\
PREFIX void BV##_init(ELEMENT *array, size_t cap, struct BV *bv); \
PREFIX ELEMENT *BV##_fini(struct BV *bv); \
PREFIX void BV##_clear(struct BV *bv); \
PREFIX int BV##_full(struct BV *bv); \
PREFIX ELEMENT *BV##_neg(size_t num, struct BV *bv); \
\
PREFIX ELEMENT *BV##_reserveback(ELEMENT *array, size_t cap, struct BV *bv); \
PREFIX void BV##_pushback(ELEMENT value, struct BV *bv); \
PREFIX ELEMENT *BV##_growback(size_t num, struct BV *bv); \
PREFIX void BV##_popback(struct BV *bv); \
PREFIX void BV##_cutback(size_t num, struct BV *bv); \
PREFIX void BV##_resizeback(size_t len, struct BV *bv); \
PREFIX ELEMENT *BV##_back(struct BV *bv); \
bbstatic_semicolon
/* Backward Vector automatic memory management appendix. */
#define BVECTOR_AUTO_DECLARATIONS(BV, ELEMENT, PREFIX) \
\
PREFIX void BV##_ainit(size_t cap, struct BV *bv); \
PREFIX void BV##_afini(struct BV *bv); \
PREFIX void BV##_aclear(struct BV *bv); \
\
PREFIX void BV##_areserveback(size_t cap, struct BV *bv); \
PREFIX void BV##_apushback(ELEMENT value, struct BV *bv); \
PREFIX ELEMENT *BV##_agrowback(size_t num, struct BV *bv); \
PREFIX void BV##_aresizeback(size_t len, struct BV *bv); \
bbstatic_semicolon
/* Backward Vector automatic memory management procedures. */
#define BVECTOR_DECLARATIONS(BV, ELEMENT, PREFIX) \
BVECTOR_CORE_DECLARATIONS(BV, ELEMENT, PREFIX); \
BVECTOR_AUTO_DECLARATIONS(BV, ELEMENT, PREFIX)
/* Backward Vector core interface. */
#define BVECTOR_CORE_INTERFACE(BV, ELEMENT, PREFIX) \
BVECTOR_STRUCT(BV, ELEMENT); \
BVECTOR_CORE_DECLARATIONS(BV, ELEMENT, PREFIX)
/* Backward Vector automatic memory management interface. */
#define BVECTOR_INTERFACE(BV, ELEMENT, PREFIX) \
BVECTOR_STRUCT(BV, ELEMENT); \
BVECTOR_DECLARATIONS(BV, ELEMENT, PREFIX)
/****************************************
* Forward Vector interface.
****************************************/
/* Forward Vector structure. */
#define FVECTOR_STRUCT(FV, ELEMENT) \
\
struct FV { \
size_t len, cap; /* Length and capacity. */ \
ELEMENT *neg; /* End of memory and data. */ \
}
/* Forward Vector core procedures. */
#define FVECTOR_CORE_DECLARATIONS(FV, ELEMENT, PREFIX) \
\
PREFIX void FV##_init(ELEMENT *array, size_t cap, struct FV *fv); \
PREFIX ELEMENT *FV##_fini(struct FV *fv); \
PREFIX void FV##_clear(struct FV *fv); \
PREFIX int FV##_full(struct FV *fv); \
PREFIX ELEMENT *FV##_at(size_t num, struct FV *fv); \
\
PREFIX ELEMENT *FV##_reservefront(ELEMENT *array, size_t cap, struct FV *fv); \
PREFIX void FV##_pushfront(ELEMENT value, struct FV *fv); \
PREFIX ELEMENT *FV##_growfront(size_t num, struct FV *fv); \
PREFIX void FV##_popfront(struct FV *fv); \
PREFIX void FV##_cutfront(size_t num, struct FV *fv); \
PREFIX void FV##_resizefront(size_t len, struct FV *fv); \
PREFIX ELEMENT *FV##_front(struct FV *fv); \
bbstatic_semicolon
/* Forward Vector automatic memory management appendix. */
#define FVECTOR_AUTO_DECLARATIONS(FV, ELEMENT, PREFIX) \
\
PREFIX void FV##_ainit(size_t cap, struct FV *fv); \
PREFIX void FV##_afini(struct FV *fv); \
PREFIX void FV##_aclear(struct FV *fv); \
\
PREFIX void FV##_areservefront(size_t cap, struct FV *fv); \
PREFIX void FV##_apushfront(ELEMENT value, struct FV *fv); \
PREFIX ELEMENT *FV##_agrowfront(size_t num, struct FV *fv); \
PREFIX void FV##_aresizefront(size_t len, struct FV *fv); \
bbstatic_semicolon
/* Forward Vector automatic memory management procedures. */
#define FVECTOR_DECLARATIONS(FV, ELEMENT, PREFIX) \
FVECTOR_CORE_DECLARATIONS(FV, ELEMENT, PREFIX); \
FVECTOR_AUTO_DECLARATIONS(FV, ELEMENT, PREFIX)
/* Forward Vector core interface. */
#define FVECTOR_CORE_INTERFACE(FV, ELEMENT, PREFIX) \
FVECTOR_STRUCT(FV, ELEMENT); \
FVECTOR_CORE_DECLARATIONS(FV, ELEMENT, PREFIX)
/* Forward Vector automatic memory management interface. */
#define FVECTOR_INTERFACE(FV, ELEMENT, PREFIX) \
FVECTOR_STRUCT(FV, ELEMENT); \
FVECTOR_DECLARATIONS(FV, ELEMENT, PREFIX)
/****************************************
* Backward Vector implementation.
****************************************/
/* Backward Vector core procedures. */
#define BVECTOR_CORE_IMPLEMENTATION(BV, ELEMENT) \
\
void BV##_init(ELEMENT *array, size_t cap, struct BV *bv) \
{ bv->at = array; \
bv->len = 0; \
bv->cap = cap; \
} \
ELEMENT *BV##_fini(struct BV *bv) \
{ return bv->at; \
} \
void BV##_clear(struct BV *bv) \
{ bv->len = 0; \
} \
int BV##_full(struct BV *bv) \
{ return bv->len == bv->cap; \
} \
ELEMENT *BV##_neg(size_t num, struct BV *bv) \
{ return &bv->at[bv->len - num]; \
} \
ELEMENT *BV##_reserveback(ELEMENT *array, size_t cap, struct BV *bv) \
{ ELEMENT *old = bv->at; \
memcpy(array, old, sizeof(ELEMENT) * bv->len); \
bv->at = array; \
bv->cap = cap; \
return old; \
} \
void BV##_pushback(ELEMENT value, struct BV *bv) \
{ bv->at[bv->len++] = value; \
} \
ELEMENT *BV##_growback(size_t num, struct BV *bv) \
{ ELEMENT *old = &bv->at[bv->len]; \
bv->len += num; \
return old; \
} \
void BV##_popback(struct BV *bv) \
{ bv->len--; \
} \
void BV##_cutback(size_t num, struct BV *bv) \
{ bv->len -= num; \
} \
void BV##_resizeback(size_t len, struct BV *bv) \
{ bv->len = len; \
} \
ELEMENT *BV##_back(struct BV *bv) \
{ return &bv->at[bv->len - 1]; \
} \
bbstatic_semicolon
/* Backward Vector automatic memory management appendix. */
#define BVECTOR_AUTO_IMPLEMENTATION(BV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
\
void BV##_ainit(size_t cap, struct BV *bv) \
{ ELEMENT *ptr = ALLOC(cap, sizeof(ELEMENT)); \
BV##_init(ptr, cap, bv); \
} \
void BV##_afini(struct BV *bv) \
{ FREE(BV##_fini(bv)); \
} \
void BV##_aclear(struct BV *bv) \
{ FREE(bv->at); \
bv->at = NULL; \
bv->len = 0; \
bv->cap = 0; \
} \
void BV##_areserveback(size_t cap, struct BV *bv) \
{ ELEMENT *ptr = ALLOC(cap, sizeof(ELEMENT)); \
FREE(BV##_reserveback(ptr, cap, bv)); \
} \
void BV##_apushback(ELEMENT value, struct BV *bv) \
{ if (BV##_full(bv)) \
BV##_areserveback(NEXT_CAP(bv->cap), bv); \
BV##_pushback(value, bv); \
} \
ELEMENT *BV##_agrowback(size_t num, struct BV *bv) \
{ size_t len = bv->len + num; \
size_t cap = bv->cap; \
if (len > cap) { \
cap = NEXT_CAP(cap); \
if (len > cap) \
cap = len; \
BV##_areserveback(cap, bv); \
} \
return BV##_growback(num, bv); \
} \
void BV##_aresizeback(size_t len, struct BV *bv) \
{ size_t cap = bv->cap; \
if (len > cap) { \
cap = NEXT_CAP(cap); \
if (len > cap) \
cap = len; \
BV##_areserveback(cap, bv); \
} \
bv->len = len; \
} \
bbstatic_semicolon
/* Backward Vector automatic memory management procedures. */
#define BVECTOR_IMPLEMENTATION(BV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
BVECTOR_CORE_IMPLEMENTATION(BV, ELEMENT); \
BVECTOR_AUTO_IMPLEMENTATION(BV, ELEMENT, ALLOC, FREE, NEXT_CAP)
/* Backward Vector core full. */
#define BVECTOR_CORE(BV, ELEMENT) \
BVECTOR_CORE_INTERFACE(BV, ELEMENT, static BBUNUSED); \
BVECTOR_CORE_IMPLEMENTATION(BV, ELEMENT)
/* Backward Vector automatic memory management full. */
#define BVECTOR(BV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
BVECTOR_INTERFACE(BV, ELEMENT, static BBUNUSED); \
BVECTOR_IMPLEMENTATION(BV, ELEMENT, ALLOC, FREE, NEXT_CAP)
/****************************************
* Forward Vector implementation.
****************************************/
/* Forward Vector core procedures. */
#define FVECTOR_CORE_IMPLEMENTATION(FV, ELEMENT) \
\
void FV##_init(ELEMENT *array, size_t cap, struct FV *fv) \
{ fv->neg = array + cap; \
fv->len = 0; \
fv->cap = cap; \
} \
ELEMENT *FV##_fini(struct FV *fv) \
{ return fv->neg - fv->cap; \
} \
void FV##_clear(struct FV *fv) \
{ fv->len = 0; \
} \
int FV##_full(struct FV *fv) \
{ return fv->len == fv->cap; \
} \
ELEMENT *FV##_at(size_t num, struct FV *fv) \
{ return &(fv->neg - fv->len)[num]; \
} \
ELEMENT *FV##_reservefront(ELEMENT *array, size_t cap, struct FV *fv) \
{ ELEMENT *old = fv->neg - fv->cap; \
ELEMENT *end = array + cap; \
size_t len = fv->len; \
memcpy(end - len, fv->neg - len, sizeof(ELEMENT) * len); \
fv->neg = end; \
fv->cap = cap; \
return old; \
} \
void FV##_pushfront(ELEMENT value, struct FV *fv) \
{ *(fv->neg - (++fv->len)) = value; \
} \
ELEMENT *FV##_growfront(size_t num, struct FV *fv) \
{ fv->len += num; \
return fv->neg - fv->len; \
} \
void FV##_popfront(struct FV *fv) \
{ fv->len--; \
} \
void FV##_cutfront(size_t num, struct FV *fv) \
{ fv->len -= num; \
} \
void FV##_resizefront(size_t len, struct FV *fv) \
{ fv->len = len; \
} \
ELEMENT *FV##_front(struct FV *fv) \
{ return fv->neg - fv->len; \
} \
bbstatic_semicolon
/* Forward Vector automatic memory management appendix. */
#define FVECTOR_AUTO_IMPLEMENTATION(FV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
\
void FV##_ainit(size_t cap, struct FV *fv) \
{ ELEMENT *ptr = ALLOC(cap, sizeof(ELEMENT)); \
FV##_init(ptr, cap, fv); \
} \
void FV##_afini(struct FV *fv) \
{ FREE(FV##_fini(fv)); \
} \
void FV##_aclear(struct FV *fv) \
{ FREE(fv->neg - fv->cap); \
fv->neg = NULL; \
fv->len = 0; \
fv->cap = 0; \
} \
void FV##_areservefront(size_t cap, struct FV *fv) \
{ ELEMENT *ptr = ALLOC(cap, sizeof(ELEMENT)); \
FREE(FV##_reservefront(ptr, cap, fv)); \
} \
void FV##_apushfront(ELEMENT value, struct FV *fv) \
{ if (FV##_full(fv)) \
FV##_areservefront(NEXT_CAP(fv->cap), fv); \
FV##_pushfront(value, fv); \
} \
ELEMENT *FV##_agrowfront(size_t num, struct FV *fv) \
{ size_t len = fv->len + num; \
size_t cap = fv->cap; \
if (len > cap) { \
cap = NEXT_CAP(cap); \
if (len > cap) \
cap = len; \
FV##_areservefront(cap, fv); \
} \
return FV##_growfront(num, fv); \
} \
void FV##_aresizefront(size_t len, struct FV *fv) \
{ size_t cap = fv->cap; \
if (len > cap) { \
cap = NEXT_CAP(cap); \
if (len > cap) \
cap = len; \
FV##_areservefront(cap, fv); \
} \
fv->len = len; \
} \
bbstatic_semicolon
/* Forward Vector automatic memory management procedures. */
#define FVECTOR_IMPLEMENTATION(FV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
FVECTOR_CORE_IMPLEMENTATION(FV, ELEMENT); \
FVECTOR_AUTO_IMPLEMENTATION(FV, ELEMENT, ALLOC, FREE, NEXT_CAP)
/* Forward Vector core full. */
#define FVECTOR_CORE(FV, ELEMENT) \
FVECTOR_CORE_INTERFACE(FV, ELEMENT, static BBUNUSED); \
FVECTOR_CORE_IMPLEMENTATION(FV, ELEMENT)
/* Forward Vector automatic memory management full. */
#define FVECTOR(FV, ELEMENT, ALLOC, FREE, NEXT_CAP) \
FVECTOR_INTERFACE(FV, ELEMENT, static BBUNUSED); \
FVECTOR_IMPLEMENTATION(FV, ELEMENT, ALLOC, FREE, NEXT_CAP)
/****************************************
* Legacy.
****************************************/
#define BBDECLARE_MBVECTOR BVECTOR_CORE_INTERFACE
#define BBDECLARE_MFVECTOR FVECTOR_CORE_INTERFACE
#define BBDEFINE_MBVECTOR BVECTOR_CORE_IMPLEMENTATION
#define BBDEFINE_MFVECTOR FVECTOR_CORE_IMPLEMENTATION
#define BBDECLARE_ABVECTOR BVECTOR_AUTO_DECLARATIONS
#define BBDECLARE_AFVECTOR FVECTOR_AUTO_DECLARATIONS
#define BBDEFINE_ABVECTOR BVECTOR_AUTO_IMPLEMENTATION
#define BBDEFINE_AFVECTOR FVECTOR_AUTO_IMPLEMENTATION
#define BBDECLARE_BVECTOR BVECTOR_INTERFACE
#define BBDECLARE_FVECTOR FVECTOR_INTERFACE
#define BBDEFINE_BVECTOR BVECTOR_IMPLEMENTATION
#define BBDEFINE_FVECTOR FVECTOR_IMPLEMENTATION
#endif
|
# async-update-props
[](https://greenkeeper.io/)
[![NPM version][npm-image]][npm-url]
[![Build Status][travis-image]][travis-url]
[![Test coverage][coveralls-image]][coveralls-url]
[![Dependency Status][david_img]][david_site]
> Async props update HoC for React Component
## Install
```
$ npm install async-update-props
```
## Usage
```js
import { compose } from 'redux';
import { connect } from 'react-redux';
import asyncUpdateProps from 'async-update-props';
import Page from '../components/Page';
import { fetchDataRequest } from '../actions';
const mapStateToProps = (state) => ({
...
});
const updater = (props) => {
props.fetchDataRequest();
};
const shouldUpdateWhenReceiveProps = () => false;
export default compose(
connect(mapStateToProps, {
fetchDataRequest,
}),
asyncUpdateProps(updater, shouldUpdateWhenReceiveProps)
)(Page);
```
If updater returns a promise, setState will be called with resolved value automatically:
```js
import asyncUpdateProps from 'async-update-props';
import Page from '../components/Page';
import * as apis from '../apis';
const updater = () => {
return apis.fetchDataRequest();
};
const shouldUpdateWhenReceiveProps = () => false;
export default asyncUpdateProps(updater, shouldUpdateWhenReceiveProps)(Page);
```
## API
### asyncUpdateProps(updater, shouldUpdateWhenReceiveProps)
#### updater
*Required*
Type: `func`
#### shouldUpdateWhenReceiveProps
*Optional*
Type: `func`
Default: `() => true`
## License
MIT © [Yoctol](https://github.com/Yoctol/async-update-props)
[npm-image]: https://badge.fury.io/js/async-update-props.svg
[npm-url]: https://npmjs.org/package/async-update-props
[travis-image]: https://travis-ci.org/Yoctol/async-update-props.svg
[travis-url]: https://travis-ci.org/Yoctol/async-update-props
[coveralls-image]: https://coveralls.io/repos/Yoctol/async-update-props/badge.svg?branch=master&service=github
[coveralls-url]: https://coveralls.io/r/Yoctol/async-update-props?branch=master
[david_img]: https://david-dm.org/Yoctol/async-update-props.svg
[david_site]: https://david-dm.org/Yoctol/async-update-props
|
#!/usr/bin/env bash
cat README.md > docs/index.md
mkdocs serve
|
using NLog;
using NLog.Config;
using NLog.Targets;
namespace NLogEvents {
/// <summary>
/// NLog target that triggers `NLogEvents.Events.OnLog`.
/// </summary>
[Target("OnLogEvent")]
class OnLogEvent : TargetWithLayout {
/// <summary>
/// Defines the value of the first argument of `NLogEvents.Events.OnLog`.
/// The default is `null`.
/// </summary>
public string Event { get; set; }
protected override void Write(LogEventInfo info) => Events.FireOnLog(Event, info);
}
}
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[System.Serializable]
public class Room
{
public GameObject[] gates;
public GameObject[] enemies;
public bool isEmpty = false;
public void EmptyRoom()
{
if (!isEmpty)
{
foreach (GameObject enemyi in enemies)
{
enemyi.SetActive(false);
}
foreach (GameObject gatei in gates)
{
gatei.GetComponent<Gate>().CloseGate();
}
isEmpty = true;
}
}
public void RefillRoom()
{
if (isEmpty)
{
foreach (GameObject enemyi in enemies)
{
enemyi.SetActive(true);
enemyi.GetComponent<Enemy>().ResetEnemy();
}
isEmpty = false;
}
}
}
|
using System.IO;
namespace BinaryMapper.Windows.Minidump
{
public interface IMinidumpMapper
{
Minidump ReadMinidump(Stream stream);
}
} |
using System;
using System.Diagnostics.Contracts;
using System.Runtime.Serialization;
namespace IsabelDb
{
/// <summary>
/// </summary>
[DataContract]
public struct Point2D
: IEquatable<Point2D>
{
static Point2D()
{
Zero = new Point2D();
}
#region Equality members
/// <inheritdoc />
public bool Equals(Point2D other)
{
return X.Equals(other.X) && Y.Equals(other.Y);
}
/// <inheritdoc />
public override bool Equals(object obj)
{
if (ReferenceEquals(objA: null, objB: obj)) return false;
return obj is Point2D && Equals((Point2D) obj);
}
/// <inheritdoc />
public override int GetHashCode()
{
unchecked
{
return (X.GetHashCode() * 397) ^ Y.GetHashCode();
}
}
/// <summary>
///
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator ==(Point2D left, Point2D right)
{
return left.Equals(right);
}
/// <summary>
///
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator !=(Point2D left, Point2D right)
{
return !left.Equals(right);
}
#endregion
/// <summary>
/// </summary>
[DataMember] public double X;
/// <summary>
/// </summary>
[DataMember] public double Y;
/// <summary>
/// </summary>
/// <param name="x"></param>
/// <param name="y"></param>
public Point2D(double x, double y)
{
X = x;
Y = y;
}
/// <inheritdoc />
public override string ToString()
{
return string.Format("X: {0}, Y: {1}", X, Y);
}
/// <summary>
///
/// </summary>
/// <param name="lhs"></param>
/// <param name="rhs"></param>
/// <returns></returns>
[Pure]
public static double SquaredDistance(Point2D lhs, Point2D rhs)
{
var x = lhs.X - rhs.X;
var y = lhs.Y - rhs.Y;
return x * x + y * y;
}
/// <summary>
///
/// </summary>
/// <param name="lhs"></param>
/// <param name="rhs"></param>
/// <returns></returns>
[Pure]
public static double Distance(Point2D lhs, Point2D rhs)
{
return Math.Sqrt(SquaredDistance(lhs, rhs));
}
/// <summary>
///
/// </summary>
public static readonly Point2D Zero;
}
} |
package com.sfxcode.sapphire.jfoenix.demo.controller.base
import com.jfoenix.controls.JFXToolbar
import com.sfxcode.sapphire.javafx.controller.SFXViewController
import com.sfxcode.sapphire.javafx.scene.SFXContentManager
import com.sfxcode.sapphire.jfoenix.demo.sevices.LogService
import javafx.event.ActionEvent
import javafx.fxml.FXML
import javafx.scene.control.Button
import scalafx.Includes._
trait ToolbarStyling {
@FXML
var toolbar: JFXToolbar = _
def toolbarButtonStyleClass: String
def mainSFXContentManager: SFXContentManager
def updateToolbarButtonStyles(selectedButton: Button): Unit =
toolbar.getLeftItems.filter(node => node.isInstanceOf[Button]).foreach { button =>
if (button == selectedButton) {
button.getStyleClass.remove(toolbarButtonStyleClass)
if (!button.getStyleClass.contains(toolbarButtonStyleClass + "-highlighted"))
button.getStyleClass.add(toolbarButtonStyleClass + "-highlighted")
}
else {
button.getStyleClass.remove(toolbarButtonStyleClass + "-highlighted")
if (!button.getStyleClass.contains(toolbarButtonStyleClass))
button.getStyleClass.add(toolbarButtonStyleClass)
}
}
def toolbarButtonClicked(event: ActionEvent, viewController: SFXViewController): Unit = {
mainSFXContentManager.updatePaneContent(viewController)
updateToolbarButtonStyles(event.getSource.asInstanceOf[Button])
}
}
|
module Cubicle
module DateTime
def self.db_time_format
@time_format ||= :iso8601 #or :native || :time || anything not :iso8601
end
def self.db_time_format=(time_format)
raise "db_time_format must be :iso8601 or :native" unless [:iso8601,:native].include?(time_format)
@time_format=time_format
end
def self.iso8601?
self.db_time_format == :iso8601
end
def iso8601?
Cubicle::DateTime.iso8601?
end
def to_cubicle(period = :date)
case period
when :year, :years then iso8601? ? self.strftime('%Y') : beginning_of_year
when :quarter, :quarters then iso8601? ? "#{db_year}-Q#{(month+2) / 3}" : beginning_of_quarter
when :month, :months then iso8601? ? self.strftime('%Y-%m') : beginning_of_month
else iso8601? ? self.strftime('%Y-%m-%d') : self
end
end
def beginning_of(period)
self.send "beginning_of_#{period.to_s.singularize}"
end
end
end
|
import * as React from 'react'
import { StyleSheet, View } from 'react-native'
import LottieBase from 'src/animate/LottieBase'
import profiles from 'src/community/lottie/all.json'
export default React.memo(function CeloContributors() {
return (
<View style={styles.root}>
<LottieBase loop={false} data={profiles} autoPlay={true} />
</View>
)
})
const styles = StyleSheet.create({
root: {
width: '100%',
maxWidth: 850,
},
})
|
#!/bin/bash
apt-get -y update
apt-get -y install expect
success_file=`find . -name "*SUCCESS"`
fail_file=`find . -name "*FAIL"`
expect << EOF
spawn scp $success_file $ANSIBLE_HOST/pingtest/external
expect -re "(yes/no)" {
send "yes\r"
exp_continue
} -re "password:" {
send "tmax@23\r"
}
expect eof
EOF
expect << EOF
spawn scp $fail_file $ANSIBLE_HOST/pingtest/external
expect "password:" {
send "tmax@23\r"
}
expect eof
EOF
rm -f $success_file
rm -f $fail_file
|
#!/bin/bash
#
HOSTNAME=`hostname`
VERSION=`cat /proc/version`
DATE=`date`
OUT="/tmp/$HOSTNAME-info.txt"
#echo -n "Customer? "; read CUSTOMER
#echo -n "Manufacturer? "; read MANUFACTURER
#echo -n "Model? "; read MODEL
#echo -n "Serial #? "; read SERIAL
PMODEL=`cat /proc/cpuinfo | grep vendor_id | awk -F\: '{print $2}'`
PNAME=`cat /proc/cpuinfo | grep model | awk -F\: '{print $2}'`
PSPEED=`cat /proc/cpuinfo | grep MHz | awk -F\: '{print $2}' | awk -F\. '{print $1}'`
if [ -z ${PSPEED} ]
then
PSPEED=`cat /proc/cpuinfo | grep mips | awk -F\: '{print $2}' | awk -F\. '{print $1}'`
fi
RAM=`cat /proc/meminfo | grep MemTotal | awk -F\: '{print $2}' | awk -F\ '{print $1 " " $2}'`
echo "System Information - $HOSTNAME" > $OUT
echo "$HOSTNAME" >> $OUT
echo "$DATE" >> $OUT
#echo "Hardware Manufacturer: $MANUFACTURER" >> $OUT
#echo "Machine Model........: $MODEL" >> $OUT
#echo "System Serial Number : $SERIAL" >> $OUT
echo "System Specifics.....: $PMODEL $PNAME, $PSPEED MHz" >> $OUT
echo " $RAM RAM" >> $OUT
echo "Operating System.....: $VERSION" >> $OUT
echo "I/O Ports" >> $OUT
cat /proc/ioports >> $OUT
echo "Interrupts" >> $OUT
cat /proc/interrupts >> $OUT
echo "PCI Devices" >> $OUT
cat /proc/pci >> $OUT
echo "SCSI Devices" >> $OUT
cat /proc/scsi/scsi >> $OUT
if [ -e /proc/rd ]
then
echo "RAID controller found (how cool!)" >> $OUT
cat /proc/rd/c*/current_status >> $OUT
fi
|
from freezegun import freeze_time
from io import BytesIO
from onegov.gazette.models import GazetteNotice
from onegov.pdf.utils import extract_pdf_info
from tests.onegov.gazette.common import accept_notice
from tests.onegov.gazette.common import edit_notice
from tests.onegov.gazette.common import edit_notice_unrestricted
from tests.onegov.gazette.common import login_users
from tests.onegov.gazette.common import publish_issue
from tests.onegov.gazette.common import reject_notice
from tests.onegov.gazette.common import submit_notice
def test_view_notice(gazette_app):
# Check if the details of the notice is displayed correctly in the
# display view (that is: organization, owner, group etc).
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 11:00"):
# create a notice for each editor
for count, user in enumerate((editor_1, editor_2, editor_3)):
manage = user.get('/notices/drafted/new-notice')
manage.form['title'] = 'Titel {}'.format(count + 1)
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['at_cost'].select('yes')
manage.form['billing_address'] = 'someone\nstreet\r\nplace'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
# check if the notices are displayed correctly
for number, owner, group in (
(1, 'First', True),
(2, 'Second', True),
(3, 'Third', False),
):
for user in (editor_1, editor_2, editor_3, publisher):
view = user.get(f'/notice/titel-{number}')
assert f"Titel {number}" in view
assert "1. Oktober 2017" in view
assert "Govikon, 1. Januar 2019" in view
assert "State Chancellerist" in view
assert "Civic Community" in view
assert "Education" in view
assert "<dd>Ja</dd>" in view
assert "someone<br>street<br>place" in view
assert f"{owner} Editor" in view
assert f"+4141511227{number}" in view
assert f"<br>editor{number}@example.org" in view
if group:
assert "TestGroup" in view
else:
assert "TestGroup" not in view
assert "Nr. 44, 03.11.2017" in view
assert "Nr. 45, 10.11.2017" in view
assert "in Arbeit" in view
assert "erstellt" in view
# Check if the publication numbers are displayed
submit_notice(editor_1, 'titel-1')
submit_notice(editor_2, 'titel-2')
submit_notice(editor_3, 'titel-3')
accept_notice(publisher, 'titel-1')
accept_notice(publisher, 'titel-2')
accept_notice(publisher, 'titel-3')
publish_issue(publisher, '2017-44')
publish_issue(publisher, '2017-45')
for number in range(1, 4):
for user in (editor_1, editor_2, editor_3, publisher):
view = user.get('/notice/titel-{}'.format(number))
assert "Nr. 44, 03.11.2017 / {}".format(number) in view
assert "Nr. 45, 10.11.2017 / {}".format(number + 3) in view
def test_view_notice_actions(gazette_app):
# Check if the actions are displayed correctly in the detail view
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 11:00"):
# create a notice for each editor
for count, user in enumerate(
(editor_1, editor_2, editor_3, publisher)
):
manage = user.get('/notices/drafted/new-notice')
manage.form['title'] = 'Titel {}'.format(count + 1)
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
# check the actions
actions = {
'p': 'action-preview',
't': 'action-attachments',
'c': 'action-copy',
'e': 'action-edit',
'd': 'action-delete',
's': 'action-submit',
'a': 'action-accept',
'r': 'action-reject'
}
def check(values):
for user, slug, can in values:
view = user.get('/notice/{}'.format(slug))
cannot = [x for x in actions.keys() if x not in can]
assert all((actions[action] in view for action in can))
assert all((actions[action] not in view for action in cannot))
# ... when drafted
check((
(admin, 'titel-1', 'pteds'),
(admin, 'titel-2', 'pteds'),
(admin, 'titel-3', 'pteds'),
(admin, 'titel-4', 'pteds'),
(publisher, 'titel-1', 'pteds'),
(publisher, 'titel-2', 'pteds'),
(publisher, 'titel-3', 'pteds'),
(publisher, 'titel-4', 'pteds'),
(editor_1, 'titel-1', 'peds'),
(editor_1, 'titel-2', 'peds'),
(editor_1, 'titel-3', 'p'),
(editor_1, 'titel-4', 'p'),
(editor_2, 'titel-1', 'peds'),
(editor_2, 'titel-2', 'peds'),
(editor_2, 'titel-3', 'p'),
(editor_2, 'titel-4', 'p'),
(editor_3, 'titel-1', 'p'),
(editor_3, 'titel-2', 'p'),
(editor_3, 'titel-3', 'peds'),
(editor_3, 'titel-4', 'p'),
))
# ... when submitted
submit_notice(editor_1, 'titel-1')
submit_notice(editor_2, 'titel-2')
submit_notice(editor_3, 'titel-3')
submit_notice(publisher, 'titel-4')
check((
(admin, 'titel-1', 'ptedar'),
(admin, 'titel-2', 'ptedar'),
(admin, 'titel-3', 'ptedar'),
(admin, 'titel-4', 'ptedar'),
(publisher, 'titel-1', 'ptear'),
(publisher, 'titel-2', 'ptear'),
(publisher, 'titel-3', 'ptear'),
(publisher, 'titel-4', 'ptear'),
(editor_1, 'titel-1', 'p'),
(editor_1, 'titel-2', 'p'),
(editor_1, 'titel-3', 'p'),
(editor_1, 'titel-4', 'p'),
(editor_2, 'titel-1', 'p'),
(editor_2, 'titel-2', 'p'),
(editor_2, 'titel-3', 'p'),
(editor_2, 'titel-4', 'p'),
(editor_3, 'titel-1', 'p'),
(editor_3, 'titel-2', 'p'),
(editor_3, 'titel-3', 'p'),
(editor_3, 'titel-4', 'p'),
))
# ... when rejected
reject_notice(publisher, 'titel-1')
reject_notice(publisher, 'titel-2')
reject_notice(publisher, 'titel-3')
reject_notice(publisher, 'titel-4')
check((
(admin, 'titel-1', 'pteds'),
(admin, 'titel-2', 'pteds'),
(admin, 'titel-3', 'pteds'),
(admin, 'titel-4', 'pteds'),
(publisher, 'titel-1', 'pteds'),
(publisher, 'titel-2', 'pteds'),
(publisher, 'titel-3', 'pteds'),
(publisher, 'titel-4', 'pteds'),
(editor_1, 'titel-1', 'peds'),
(editor_1, 'titel-2', 'peds'),
(editor_1, 'titel-3', 'p'),
(editor_1, 'titel-4', 'p'),
(editor_2, 'titel-1', 'peds'),
(editor_2, 'titel-2', 'peds'),
(editor_2, 'titel-3', 'p'),
(editor_2, 'titel-4', 'p'),
(editor_3, 'titel-1', 'p'),
(editor_3, 'titel-2', 'p'),
(editor_3, 'titel-3', 'peds'),
(editor_3, 'titel-4', 'p'),
))
# ... when accepted
submit_notice(editor_1, 'titel-1')
submit_notice(editor_2, 'titel-2')
submit_notice(editor_3, 'titel-3')
submit_notice(publisher, 'titel-4')
accept_notice(publisher, 'titel-1')
accept_notice(publisher, 'titel-2')
accept_notice(publisher, 'titel-3')
accept_notice(publisher, 'titel-4')
check((
(admin, 'titel-1', 'ptedc'),
(admin, 'titel-2', 'ptedc'),
(admin, 'titel-3', 'ptedc'),
(admin, 'titel-4', 'ptedc'),
(publisher, 'titel-1', 'pedc'),
(publisher, 'titel-2', 'pedc'),
(publisher, 'titel-3', 'pedc'),
(publisher, 'titel-4', 'pedc'),
(editor_1, 'titel-1', 'pc'),
(editor_1, 'titel-2', 'pc'),
(editor_1, 'titel-3', 'pc'),
(editor_1, 'titel-4', 'pc'),
(editor_2, 'titel-1', 'pc'),
(editor_2, 'titel-2', 'pc'),
(editor_2, 'titel-3', 'pc'),
(editor_2, 'titel-4', 'pc'),
(editor_3, 'titel-1', 'pc'),
(editor_3, 'titel-2', 'pc'),
(editor_3, 'titel-3', 'pc'),
(editor_3, 'titel-4', 'pc'),
))
# ... when published
publish_issue(publisher, '2017-44')
check((
(admin, 'titel-1', 'ptec'),
(admin, 'titel-2', 'ptec'),
(admin, 'titel-3', 'ptec'),
(admin, 'titel-4', 'ptec'),
(publisher, 'titel-1', 'pec'),
(publisher, 'titel-2', 'pec'),
(publisher, 'titel-3', 'pec'),
(publisher, 'titel-4', 'pec'),
(editor_1, 'titel-1', 'pc'),
(editor_1, 'titel-2', 'pc'),
(editor_1, 'titel-3', 'pc'),
(editor_1, 'titel-4', 'pc'),
(editor_2, 'titel-1', 'pc'),
(editor_2, 'titel-2', 'pc'),
(editor_2, 'titel-3', 'pc'),
(editor_2, 'titel-4', 'pc'),
(editor_3, 'titel-1', 'pc'),
(editor_3, 'titel-2', 'pc'),
(editor_3, 'titel-3', 'pc'),
(editor_3, 'titel-4', 'pc'),
))
# ... when imported
session = gazette_app.session()
notice = session.query(GazetteNotice).filter_by(name='titel-1').one()
notice.user = None
notice.group = None
notice.source = 'source'
notice.state = 'imported'
session.flush()
import transaction
transaction.commit()
check((
(admin, 'titel-1', 'pda'),
(publisher, 'titel-1', 'pad'),
(editor_1, 'titel-1', 'p'),
))
def test_view_notice_preview(gazette_app):
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 11:00"):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = 'Titel'
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
view = editor_1.get('/notice/titel/preview')
assert "Titel" in view
assert "1. Oktober 2017" in view
assert "Govikon, 1. Januar 2019" in view
assert "State Chancellerist" in view
assert "Civic Community" not in view
assert "Education" not in view
assert "TestGroup" not in view
assert "Nr. 44, 03.11.2017" not in view
assert "Nr. 45, 10.11.2017" not in view
assert "in Arbeit" not in view
assert "erstellt" not in view
def test_view_notice_pdf_preview(gazette_app):
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 11:00"):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = 'Titel'
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
with freeze_time("2018-01-01 12:00"):
response = editor_1.get('/notice/titel/preview-pdf')
assert response.headers['Content-Type'] == 'application/pdf'
assert response.headers['Content-Disposition'] == \
'inline; filename=amtsblatt-govikon-titel.pdf'
assert extract_pdf_info(BytesIO(response.body)) == (
1,
'xxx Titel\n'
' 1. Oktober 2017\n'
' Govikon, 1. Januar 2019\n'
' State Chancellerist\n'
'© 2018 Govikon 1'
)
def test_view_notice_delete(gazette_app):
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 11:00"):
# delete a drafted notice
for user in (editor_1, publisher):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
manage = user.get('/notice/erneuerungswahlen/delete')
manage = manage.form.submit().maybe_follow()
assert "Meldung gelöscht." in manage
# delete a submitted notice
for user in (editor_1, publisher):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
submit_notice(user, 'erneuerungswahlen')
manage = user.get('/notice/erneuerungswahlen/delete')
assert manage.forms == {}
manage = admin.get('/notice/erneuerungswahlen/delete')
manage.form.submit().maybe_follow()
# delete a rejected notice
for user in (editor_1, publisher):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
submit_notice(user, 'erneuerungswahlen')
reject_notice(publisher, 'erneuerungswahlen')
manage = user.get('/notice/erneuerungswahlen/delete')
manage = manage.form.submit().maybe_follow()
assert "Meldung gelöscht." in manage
# delete an accepted notice
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
submit_notice(editor_1, 'erneuerungswahlen')
accept_notice(publisher, 'erneuerungswahlen')
manage = editor_1.get('/notice/erneuerungswahlen/delete')
assert manage.forms == {}
manage = publisher.get('/notice/erneuerungswahlen/delete')
assert "Diese Meldung wurde bereits angenommen!" in manage
manage.form.submit().maybe_follow()
# delete a published notice
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
submit_notice(editor_1, 'erneuerungswahlen')
accept_notice(publisher, 'erneuerungswahlen')
publish_issue(publisher, '2017-44')
for user in (admin, editor_1, publisher):
manage = user.get('/notice/erneuerungswahlen/delete')
assert manage.forms == {}
def test_view_notice_changelog(gazette_app):
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-11-01 10:00"):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-44', '2017-45']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
with freeze_time("2017-11-01 11:02"):
submit_notice(editor_1, 'erneuerungswahlen')
with freeze_time("2017-11-01 11:30"):
reject_notice(publisher, 'erneuerungswahlen')
with freeze_time("2017-11-01 11:45"):
edit_notice(editor_2, 'erneuerungswahlen', organization='300')
with freeze_time("2017-11-01 11:48"):
submit_notice(editor_2, 'erneuerungswahlen')
with freeze_time("2017-11-01 15:00"):
accept_notice(publisher, 'erneuerungswahlen')
with freeze_time("2017-11-01 16:00"):
publish_issue(publisher, '2017-44')
view = editor_1.get('/notice/erneuerungswahlen')
changes = [
''.join(i.strip() for i in td.itertext())
for td in view.pyquery('table.changes td')
]
changes = sorted([
(
changes[4 * i + 0],
changes[4 * i + 1],
changes[4 * i + 2],
changes[4 * i + 3]
)
for i in range(len(changes) // 4)
])
assert changes == [
('01.11.2017 11:00', 'First Editor', 'TestGroup', 'erstellt'),
('01.11.2017 12:02', 'First Editor', 'TestGroup',
'eingereicht'),
('01.11.2017 12:30', 'Publisher', '', 'zurückgewiesenXYZ'),
('01.11.2017 12:45', 'Second Editor', 'TestGroup', 'bearbeitet'),
('01.11.2017 12:48', 'Second Editor', 'TestGroup',
'eingereicht'),
('01.11.2017 16:00', 'Publisher', '', 'Druck beauftragt'),
('01.11.2017 16:00', 'Publisher', '', 'angenommen'),
('01.11.2017 17:00', 'Publisher', '', 'veröffentlicht')
]
def test_view_notice_copy(gazette_app):
admin, editor_1, editor_2, editor_3, publisher = login_users(gazette_app)
with freeze_time("2017-10-01 12:00"):
manage = editor_1.get('/notices/drafted/new-notice')
manage.form['title'] = "Erneuerungswahlen"
manage.form['organization'] = '200'
manage.form['category'] = '11'
manage.form['issues'] = ['2017-40']
manage.form['text'] = "1. Oktober 2017"
manage.form['author_place'] = 'Govikon'
manage.form['author_name'] = 'State Chancellerist'
manage.form['author_date'] = '2019-01-01'
manage.form.submit()
submit_notice(editor_1, 'erneuerungswahlen')
accept_notice(publisher, 'erneuerungswahlen')
with freeze_time("2017-10-01 12:00"):
edit_notice_unrestricted(publisher, 'erneuerungswahlen', note='NOTE!')
with freeze_time("2018-01-01 12:00"):
for user in (editor_1, editor_2, editor_3, publisher):
manage = user.get('/notice/erneuerungswahlen').click("Kopieren")
assert manage.form['title'].value == "Erneuerungswahlen"
assert manage.form['organization'].value == '200'
assert manage.form['category'].value == '11'
assert manage.form['text'].value == "1. Oktober 2017"
assert manage.form['issues'].value is None
manage.form['issues'] = ['2018-1']
manage = manage.form.submit().maybe_follow()
assert "Erneuerungswahlen" in user.get('/dashboard')
assert "Erneuerungswahlen" in user.get('/notices/drafted')
"NOTE!" in publisher.get('/notice/erneuerungswahlen-1')
"NOTE!" in publisher.get('/notice/erneuerungswahlen-2')
"NOTE!" in publisher.get('/notice/erneuerungswahlen-3')
"NOTE!" in publisher.get('/notice/erneuerungswahlen-4')
|
import { Injectable, OnModuleInit } from '@nestjs/common';
import { QueueService } from './queue.service';
import { Events } from '@/events/events';
export interface Friendship {
sourcePlayerId: string;
targetPlayerId: string;
}
@Injectable()
export class FriendsService implements OnModuleInit {
friendships: Friendship[] = [];
constructor(private queueService: QueueService, private events: Events) {}
onModuleInit() {
this.events.queueSlotsChange.subscribe(() => this.cleanupFriendships());
}
markFriend(sourcePlayerId: string, targetPlayerId: string) {
if (this.queueService.state === 'launching') {
throw new Error('cannot make friends at this stage');
}
if (targetPlayerId === null) {
// only removing frienship
this.friendships = [
...this.friendships.filter((f) => f.sourcePlayerId !== sourcePlayerId),
];
} else {
const sourcePlayerSlot =
this.queueService.findSlotByPlayerId(sourcePlayerId);
const targetPlayerSlot =
this.queueService.findSlotByPlayerId(targetPlayerId);
if (!sourcePlayerSlot || !targetPlayerSlot) {
throw new Error('player not in the queue');
}
if (sourcePlayerSlot.gameClass !== 'medic') {
throw new Error('only medics can make friends');
}
if (targetPlayerSlot.gameClass === 'medic') {
throw new Error('cannot make the other medic as a friend');
}
if (
targetPlayerId !== null &&
!!this.friendships.find((f) => f.targetPlayerId === targetPlayerId)
) {
throw new Error(
'this player is already marked as a friend by another player',
);
}
this.friendships = [
...this.friendships.filter((f) => f.sourcePlayerId !== sourcePlayerId),
{ sourcePlayerId, targetPlayerId },
];
}
this.events.queueFriendshipsChange.next({ friendships: this.friendships });
return this.friendships;
}
private cleanupFriendships() {
this.friendships = this.friendships.filter(
(f) =>
this.queueService.findSlotByPlayerId(f.sourcePlayerId)?.gameClass ===
'medic',
);
this.events.queueFriendshipsChange.next({ friendships: this.friendships });
}
}
|
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Versioning;
using NuGet;
namespace Microsoft.Dnx.Runtime
{
public class ProjectDependencyProvider
{
public ProjectDescription GetDescription(string name, string path, LockFileTargetLibrary targetLibrary)
{
Project project;
// Can't find a project file with the name so bail
if (!Project.TryGetProject(path, out project))
{
return new ProjectDescription(name, path);
}
return GetDescription(targetLibrary.TargetFramework, project, targetLibrary);
}
public ProjectDescription GetDescription(FrameworkName targetFramework, Project project, LockFileTargetLibrary targetLibrary)
{
// This never returns null
var targetFrameworkInfo = project.GetTargetFramework(targetFramework);
var targetFrameworkDependencies = new List<LibraryDependency>(targetFrameworkInfo.Dependencies);
if (targetFramework != null && VersionUtility.IsDesktop(targetFramework))
{
targetFrameworkDependencies.Add(new LibraryDependency
{
LibraryRange = new LibraryRange("mscorlib", frameworkReference: true)
});
targetFrameworkDependencies.Add(new LibraryDependency
{
LibraryRange = new LibraryRange("System", frameworkReference: true)
});
if (targetFramework.Version >= Constants.Version35)
{
targetFrameworkDependencies.Add(new LibraryDependency
{
LibraryRange = new LibraryRange("System.Core", frameworkReference: true)
});
if (targetFramework.Version >= Constants.Version40)
{
targetFrameworkDependencies.Add(new LibraryDependency
{
LibraryRange = new LibraryRange("Microsoft.CSharp", frameworkReference: true)
});
}
}
}
var dependencies = project.Dependencies.Concat(targetFrameworkDependencies).ToList();
if (targetLibrary != null)
{
// The lock file entry might have a filtered set of dependencies
var lockFileDependencies = targetLibrary.Dependencies.ToDictionary(d => d.Id);
// Remove all non-framework dependencies that don't appear in the lock file entry
dependencies.RemoveAll(m => !lockFileDependencies.ContainsKey(m.Name) && !m.LibraryRange.IsGacOrFrameworkReference);
}
var loadableAssemblies = new List<string>();
if (project.IsLoadable)
{
loadableAssemblies.Add(project.Name);
}
// Mark the library as unresolved if there were specified frameworks
// and none of them resolved
bool unresolved = targetFrameworkInfo.FrameworkName == null;
return new ProjectDescription(
new LibraryRange(project.Name, frameworkReference: false),
project,
dependencies,
loadableAssemblies,
targetFrameworkInfo,
!unresolved);
}
}
}
|
using System;
using System.Diagnostics.Contracts;
using System.Globalization;
namespace Solace.DotNet.Rtsp.Messages {
/// <summary>
/// Describe a couple of port used to transfer video and command.
/// </summary>
public class PortCouple {
/// <summary>
/// Gets or sets the first port number.
/// </summary>
/// <value>The first port.</value>
public int First { get; set; }
/// <summary>
/// Gets or sets the second port number.
/// </summary>
/// <remarks>If not present the value is 0</remarks>
/// <value>The second port.</value>
public int Second { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="PortCouple"/> class.
/// </summary>
public PortCouple ()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="PortCouple"/> class.
/// </summary>
/// <param name="first">The first port.</param>
public PortCouple (int first)
{
First = first;
Second = 0;
}
/// <summary>
/// Initializes a new instance of the <see cref="PortCouple"/> class.
/// </summary>
/// <param name="first">The first port.</param>
/// <param name="second">The second port.</param>
public PortCouple (int first, int second)
{
First = first;
Second = second;
}
/// <summary>
/// Gets a value indicating whether this instance has second port.
/// </summary>
/// <value>
/// <c>true</c> if this instance has second port; otherwise, <c>false</c>.
/// </value>
public bool IsSecondPortPresent {
get { return Second != 0; }
}
/// <summary>
/// Parses the int values of port.
/// </summary>
/// <param name="stringValue">A string value.</param>
/// <returns>The port couple</returns>
public static PortCouple Parse (string stringValue)
{
if (stringValue == null)
throw new ArgumentNullException (nameof (stringValue));
Contract.Requires (!string.IsNullOrEmpty (stringValue));
string [] values = stringValue.Split ('-');
int.TryParse (values [0], out int tempValue);
PortCouple result = new PortCouple (tempValue);
tempValue = 0;
if (values.Length > 1)
int.TryParse (values [1], out tempValue);
result.Second = tempValue;
return result;
}
/// <summary>
/// Returns a <see cref="System.String"/> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="System.String"/> that represents this instance.
/// </returns>
public override string ToString ()
{
if (IsSecondPortPresent)
return First.ToString (CultureInfo.InvariantCulture) + "-" + Second.ToString (CultureInfo.InvariantCulture);
return First.ToString (CultureInfo.InvariantCulture);
}
}
}
|
(function () {
'use strict';
window.angular.module("resume").directive('personalInfo', function () {
function linker (scope, element, attr) {
}
var templateData = '<h1>{{name}}</h1>' +
'<div class="text-info">' +
'{{address.city}},' +
'{{address.state}} • {{address.zip}}' +
'<br>' +
'{{email}} • {{phoneNumber}}' +
'</div>';
return {
restrict: 'AE',
link : linker,
template: templateData,
scope: {
name: '=',
address: '=',
email: '=',
phoneNumber: '='
}
};
});
}).call(); |
package com.wpm.account.email;
public class AccountEmailException extends Exception {
public AccountEmailException(){
super();
}
public AccountEmailException(String msg, Exception e){
super(msg, e);
}
}
|
module Puppler
class Command
# puppler command: convert existing Shallowfile to puppetfile
class Convert < Command
include Puppler::Utils
attr_reader :options
def run(shallowfile)
if File.exist?(options[:puppetfile])
log_fatal("The specified Puppetfile `#{options[:puppetfile]}' already exists, will not overwrite it.")
end
shallowfile_data = YAML.safe_load(File.read(shallowfile))
puppetfile_lines = []
shallowfile_data['projects'].each do |modname, data|
# FIXME: Add support for branch excludes?
url = data.is_a?(Hash) ? data['url'] : data
puppetfile_lines << "mod '#{modname}',\n" << " :git => '#{url}'\n\n"
end
write_puppetfile(puppetfile_lines)
end
private
def write_puppetfile(puppetfile_lines)
File.open(options[:puppetfile], 'w') do |file|
puppetfile_lines.each { |line| file.puts(line) }
end
log_info("Written Puppetfile to `#{options[:puppetfile]}'.")
end
end
end
end
|
module FoldExample where
import qualified Data.Foldable as F
import Data.Monoid
import Tree
instance F.Foldable Tree where
foldMap _ EmptyTree = mempty
foldMap f (Node x l r) = F.foldMap f l `mappend`
f x `mappend`
F.foldMap f r
testTree :: Tree Int
testTree = Node 5
(Node 3
(Node 1 EmptyTree EmptyTree)
(Node 6 EmptyTree EmptyTree)
)
(Node 9
(Node 8 EmptyTree EmptyTree)
(Node 10 EmptyTree EmptyTree)
)
|
package com.whisk.hulk.circe
import io.circe.Decoder
import scala.util.Try
trait CirceRowOps {
def jsonOption[T](name: String)(implicit decoder: Decoder[T]): Option[T]
def jsonOption[T](index: Int)(implicit decoder: Decoder[T]): Option[T]
def json[T](name: String)(implicit decoder: Decoder[T]): T
def json[T](index: Int)(implicit decoder: Decoder[T]): T
def jsonOrElse[T](name: String, default: => T)(implicit decoder: Decoder[T]): T
def jsonOrElse[T](index: Int, default: => T)(implicit decoder: Decoder[T]): T
def jsonTry[T](name: String)(implicit decoder: Decoder[T]): Try[T]
def jsonTry[T](index: Int)(implicit decoder: Decoder[T]): Try[T]
}
|
# `SliceVec` API
* [ ] pop
* [ ] append
* [ ] truncate (drops)
* [ ] swap_remove
* [ ] resize_with
* [ ] split_off
* [ ] extend_from_slice
|
package org.teachingextensions.logo.tests;
import junit.framework.TestCase;
import org.teachingextensions.approvals.lite.Approvals;
import org.teachingextensions.approvals.lite.reporters.UseReporter;
import org.teachingextensions.approvals.lite.reporters.windows.TortoiseTextDiffReporter;
import org.teachingextensions.logo.utils.ColorUtils.Wheel;
public class WheelTest extends TestCase
{
@UseReporter(TortoiseTextDiffReporter.class)
public void testWheel() throws Exception
{
String out = "";
Wheel<Integer> wheel = new Wheel<Integer>();
wheel.add(4);
wheel.add(6);
wheel.add(8);
for (int i = 0; i < 7; i++)
{
out += wheel.next() + " - ";
}
Approvals.verify(out);
}
}
|
# ---
# title: 923. 3Sum With Multiplicity
# id: problem923
# author: Tian Jun
# date: 2020-10-31
# difficulty: Medium
# categories: Two Pointers
# link: <https://leetcode.com/problems/3sum-with-multiplicity/description/>
# hidden: true
# ---
#
# Given an integer array `A`, and an integer `target`, return the number of
# tuples `i, j, k` such that `i < j < k` and `A[i] + A[j] + A[k] == target`.
#
# As the answer can be very large, return it **modulo** `109 + 7`.
#
#
#
# **Example 1:**
#
#
#
# Input: A = [1,1,2,2,3,3,4,4,5,5], target = 8
# Output: 20
# Explanation:
# Enumerating by the values (A[i], A[j], A[k]):
# (1, 2, 5) occurs 8 times;
# (1, 3, 4) occurs 8 times;
# (2, 2, 4) occurs 2 times;
# (2, 3, 3) occurs 2 times.
#
#
# **Example 2:**
#
#
#
# Input: A = [1,1,2,2,2,2], target = 5
# Output: 12
# Explanation:
# A[i] = 1, A[j] = A[k] = 2 occurs 12 times:
# We choose one 1 from [1,1] in 2 ways,
# and two 2s from [2,2,2,2] in 6 ways.
#
#
#
#
# **Constraints:**
#
# * `3 <= A.length <= 3000`
# * `0 <= A[i] <= 100`
# * `0 <= target <= 300`
#
#
## @lc code=start
using LeetCode
## add your code here:
## @lc code=end
|
(function() {
'use strict';
angular.module('mobile-angular-ui.migrate', [
'mobile-angular-ui.migrate.toggle',
'mobile-angular-ui.migrate.forms',
'mobile-angular-ui.migrate.panels',
'mobile-angular-ui.migrate.disabled',
'mobile-angular-ui.migrate.overlay',
'mobile-angular-ui.migrate.carousel',
'mobile-angular-ui.migrate.namespaceAliases',
'mobile-angular-ui.migrate.switch'
]);
}()); |
Web Dev Lab
===========
all the formulas.
Project list
------------
---
© Kuntau 2014
|
# pchome-price-trace
pchome的價格追蹤,使用thingspeak.com來紀錄,當到達預計售價時用line notify來通知
|
// console.log(Math.sin(Math.PI / 180 * 30)) // 0.49999999999999994
// console.log(Math.sin()) // NaN
// console.log(Math.sin('yancey')) // NaN
// console.log(Math.sin(Math.PI / 2)) // 1
// console.log(Math.sinh(0)) // 0
// console.log(Math.asin(2)) // NaN
// console.log(Math.asin(0)) // 0
// console.log(Math.asin(Math.PI / 4)) // 0.9033391107665127
console.log(Math.asinh(1)) // 0.881373587019543 |
<?php
namespace App\Http\Controllers;
use App\Officers;
use App\Surveys;
use App\Learners;
use Illuminate\Http\Request;
use App\Accounts;
use App\Http\Requests;
use Illuminate\Http\Response;
use JWTAuth;
use League\Flysystem\Exception;
class UserController extends Controller {
public function index() {
return view( 'auth.admin' );
}
public function login( Request $request ) {
$credentials = $request->only( 'username', 'password' );
if ( ! $token = JWTAuth::attempt( $credentials ) ) {
return response()->json( [ 'code' => 401, 'data' => [ 'message' => 'Incorrect username or password' ] ], 401 );
}
$user = JWTAuth::toUser( $token );
return response()->json( [ 'code' => 200, 'token' => $token, 'data' => $user ], 200 );
}
public function logout( Request $request ) {
$this->guard()->logout();
$request->session()->flush();
$request->session()->regenerate();
return response()->json( [ 'code' => 200, 'data' => [ 'message' => 'Logout Success' ] ], 200 );
}
public function create( Request $request ) {
$object = null;
try {
if ( intval( $request->user_type ) == 2 ) {
$object = new Staff();
$object->department_id = null;
$object->save();
}
if ( intval( $request->user_type ) == 3 ) {
$object = new Student();
$object->student_code = null;
$object->save();
}
$person_id = ( $object ) ? $object->id : null;
$user = User::create( [
'name' => $request->name,
'email' => $request->email,
'password' => bcrypt( $request->password ),
'user_type' => intval( $request->user_type ),
'person_id' => $person_id
] );
} catch ( Exception $e ) {
return response()->json( 404, [ 'message' => 'User already exists.' ] );
}
$token = JWTAuth::fromUser( $user );
$message = array( 'message' => 'Create user successfully!' );
return response()->json( [ 'code' => 200, 'data' => [ 'message' => $message ] ], 200 );
}
public function get( $id ) {
$user = User::find( $id );
if ( $user ) {
return response()->json( [ 'code' => 200, 'data' => $user, 'message' => 'success' ], 200 );
} else {
return response()->json( [ 'code' => 400, 'message' => 'error' ], 400 );
}
}
public function update( $id, Request $request ) {
$user = User::find( $id );
$user->name = $request->name;
$user->email = $request->email;
$user->user_type = $request->user_type;
$user->save();
return response()->json( [ 'code' => 200, 'data' => [ 'message' => 'success' ] ], 200 );
}
public function delete( $id ) {
$user = User::find( $id );
if ( $user->user_type == 1 ) {
return response()->json( [ 'code' => 400, 'data' => [ 'message' => 'error' ] ], 200 );
} elseif ( $user->user == 2 ) {
Staff::destroy( $user->person_id );
} else {
Student::destroy( $user->person_id );
}
$user->destroy( $id );
return response()->json( [ 'code' => 200, 'data' => [ 'message' => 'success' ] ], 200 );
}
public function users() {
$users = User::get()->toArray();
return response()->json( [ 'code' => 200, 'message' => 'successfully', 'data' => $users ], 200 );
}
public function templates() {
$templates = Surveys::where( 'survey_type', 1 )->get()->toArray();
return view( 'auth.templates', compact( 'templates' ) );
}
public function surveys() {
$surveys = Surveys::where( 'survey_type', 2 )->get()->toArray();
return view( 'auth.surveys', compact( 'surveys' ) );
}
public function notifications() {
return view( 'auth.notifications' );
}
}
|
#include "modules/drivers/rfid/rfid_component.h"
#include "modules/common/adapters/adapter_gflags.h"
namespace apollo {
namespace drivers {
namespace rfid {
std::string RfidComponent::Name() const { return "rfid"; }
RfidComponent::RfidComponent() {}
bool RfidComponent::Init() {
if (!GetProtoConfig(&device_conf_)) {
AERROR << "Unable to load rfid conf file: " << ConfigFilePath();
return false;
}
ADEBUG << "Device conf:" << device_conf_.ShortDebugString();
device_ = std::make_unique<Uart>(device_conf_.device_id().c_str());
// Uart device set option
device_->SetOpt(9600, 8, 'N', 1);
// Publish rfid station data
rfid_writer_ = node_->CreateWriter<RFID>(device_conf_.output_channel());
// Async read
async_action_ = cyber::Async(&RfidComponent::Action, this);
return true;
}
// TODO()CHECK
bool RfidComponent::Check() {
// sum check for rfid result
int check_value = 0; // buffer[11];
// char new_hex;
// auto transfered_size = stringToHex(&buffer + 1, &new_hex);
// AINFO << "transfered size : " << transfered_size
// << " retrun transfered: " << new_hex;
int value = 0;
// for (int i = 0; i < 11; i + 2) {
// value | = new_hex[i];
// }
return value == check_value;
}
void RfidComponent::Action() {
int count = 0;
static char buffer[20];
static char buf;
while (!apollo::cyber::IsShutdown()) {
count = 1;
std::memset(buffer, 0, 20);
while (1) {
int ret = device_->Read(&buf, 1);
AINFO << "RFID Device return: " << ret;
if (ret == 1) {
AINFO << "RFID Device buf: " << buf;
if (buf == 0x02) {
count = 1;
break;
}
buffer[count] = buf;
count++;
}
AINFO << "count: " << count;
if (count == 11) {
AINFO << "origin id from buffer[10]: " << buffer[10];
// uint32_t station_id = buffer[10] - '0';
uint8_t station_id = buf;
AINFO << "TRANSFER ID :" << station_id;
apollo::drivers::RFID rfid;
auto header = rfid.mutable_header();
header->set_timestamp_sec(apollo::cyber::Time::Now().ToSecond());
header->set_frame_id("rfid");
rfid.set_id(station_id);
rfid_writer_->Write(rfid);
}
}
}
}
RfidComponent::~RfidComponent() { AINFO << "RfidComponent::~RfidComponent()"; }
} // namespace rfid
} // namespace drivers
} // namespace apollo
|
aux_step03_Weight_of_Norm_matrix <-function(inMatrix, inWeight){
outMat <- inMatrix
ncols<-dim(inMatrix)[2]
nrows<-dim(inMatrix)[1]
#iMcalc2 <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=TRUE)
iMcalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=FALSE)
nC <- dim(iMcalc)[2] # Convert weigths into a matrix in order to perform Mult
aMmul <- matrix(rep(inWeight, times=nrows), ncol=nC, byrow=TRUE)
auxCalc <- iMcalc * aMmul
outMat[,2:ncols]<-auxCalc
return(outMat)
}
#'
#' @param Matrices with Benefits and Costs,
#' @param Vectors with Benefits and Costs weights,
#' @return Score of Paths
#' @author Bruno Sousa
#' @note v1.0 no arguments validation
#' @title METH_runTOPSIS
#' @name METH_runTOPSIS
METH_runTOPSIS <- function(iMBen, iMCost, iVecBen,iVecCost ){
# Important Global Variables
MIN_COST_TOPSIS_TENDENCY <- 0.0001 # To avoid divisions by zero
BETA_TOPSIS_TENDENCY_WEIGHTS <- 0.5
MINSUM_TOPSIS <- 1e-99 # To avoid divisions by zero in normalization
#source("libNormalization.R")
mBen_Criteria <- iMBen
mCost_Criteria <- iMCost
vBen_weight <- iVecBen
vCost_weight <- iVecCost
# Euclidean Distance
fAux_Euclidean_Dist<- function(i1, i2){
adif <- (i1 - i2)^2
return(adif)
}
#
# Convert weight costs into Benefit costs
#
aux_step01_Tendency_Cnv <- function(inCosts){
nCol <- dim(inCosts)[2]
aux <- matrix(inCosts[,2:nCol], ncol=nCol-1)
aux[which(aux==0) ] <- MIN_COST_TOPSIS_TENDENCY
aux <- 1/aux
return (aux)
}
#
# Append Benefits and Cost weigths
#
aux_step01_Tendend_Weigths <- function(inWeBen, inWeCost){
ret_Ben <- BETA_TOPSIS_TENDENCY_WEIGHTS * inWeBen
ret_Cost <- (1 - BETA_TOPSIS_TENDENCY_WEIGHTS) * inWeCost
ret_ <- c(ret_Ben, ret_Cost)
return(ret_)
}
#
# Normalize matrix
# Using Vector normalization instead of min-max
#
aux_step02_Normalization <- function(inMatrix){
#internal function to help in normalization
fSum <- function(i){
sumrow <- MINSUM_TOPSIS
aSu <- sum(i^2) + sumrow
return(aSu)
}
outMat <- inMatrix
ncols<-dim(inMatrix)[2]
nrows<-dim(inMatrix)[1]
iMcalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=TRUE)
#Apply Sum by col
auxSum <- apply(iMcalc, 2, FUN=fSum)
nC <- dim(iMcalc)[2]
aMSum <- matrix(rep(auxSum, times=nrows), ncol=nC, byrow=TRUE)
auxCalc <- iMcalc / sqrt(aMSum)
outMat[,2:ncols]<-auxCalc
return(outMat)
}
#
# Determine PIS according to DiA Method
#
aux_step04_PIS <-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, max)
return (auxRet)
}
aux_step04_NIS<-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, min)
return (auxRet)
}
#
# Determine the distance to the ideal points (PIS and NIS)
#
aux_step05_distance_to_ideal <- function(inApos, inMatrix){
#Be careful with the idx of Ideal....
ncols<- dim(inMatrix)[2]
nrows<- dim(inMatrix)[1]
auxMBenef <- matrix(ncol=2,nrow=nrows)
auxMBenef[,1] <- inMatrix[,1]
mCalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=FALSE)
for (nR in seq(from=1, to=nrows)){
aSumBen <- 0
for (nC in seq(from=1, to=ncol(mCalc))){
aSumBen <- aSumBen + fAux_Euclidean_Dist(mCalc[nR, nC] , inApos[nC])
}
auxMBenef[nR,2] <- aSumBen
}
ret_ <- auxMBenef
return (ret_)
}
#
# Determine R score
#
aux_step06_Ranking<-function(inSepPos, inSepNeg){
nRow <- dim(inSepPos)[1]
nCol <- dim(inSepPos)[2]
auxCi <- matrix(ncol=2, nrow=nRow)
auxCi[,1] <- inSepPos[,1]
mCalc <- matrix(c(inSepPos[,2], inSepNeg[,2]), ncol=2, nrow=nRow, byrow=FALSE)
fDist <- function(i){ # Apply a function on pairwise columns
ipos <- mCalc[i,1]
ineg <- mCalc[i,2]
auxCj <- ineg / (ipos + ineg)
return (auxCj)
}
# dist
aDist <- sapply(1:nrow(mCalc), fDist)
auxCi[,2] <- aDist
return(auxCi)
}
ncolMB <- dim(mBen_Criteria)[2]
TPSTOPsisBenefits <- as.matrix(mBen_Criteria )
ncolMC <- dim(mCost_Criteria)[2]
TPSTOPsisCosts <- as.matrix(mCost_Criteria )
TPSWeiBenTOP <- vBen_weight
TPSWeiCostTOP <- vCost_weight
#
# Step 01
#
TPSTOPsisCostsTended <- aux_step01_Tendency_Cnv(TPSTOPsisCosts)
TPSWeiBenTOPall <- aux_step01_Tendend_Weigths(TPSWeiBenTOP, TPSWeiCostTOP)
TPSTOPsisBenefits <- cbind(TPSTOPsisBenefits, TPSTOPsisCostsTended)
TPSWeiBenTOP <- TPSWeiBenTOPall
stopifnot(TPSTOPsisBenefits != NULL)
#
# Step 02
#
TPSTOPsisBenefitsTOP <- aux_step02_Normalization(TPSTOPsisBenefits)
#
# Step 03
#
TPSTOPsisBenefitsTOP <- aux_step03_Weight_of_Norm_matrix(TPSTOPsisBenefitsTOP, TPSWeiBenTOP)
# Step 04 - Ideal Solutions
TPSApos_Dia <- aux_step04_PIS(TPSTOPsisBenefitsTOP)
TPSAneg_Dia <- aux_step04_NIS(TPSTOPsisBenefitsTOP)
# Step 05 - Distance to Ideal
TPSDposBen_Dia <- aux_step05_distance_to_ideal(TPSApos_Dia, TPSTOPsisBenefitsTOP )
TPSDnegBen_Dia <- aux_step05_distance_to_ideal(TPSAneg_Dia, TPSTOPsisBenefitsTOP)
#Step 06
#RdistBen_Dia <- aux_step06_Rscore(DposBen_Dia, DnegBen_Dia)
TPStopsisRanking <- aux_step06_Ranking(TPSDposBen_Dia, TPSDnegBen_Dia)
#TPStopsisRanking
#print(TPStopsisRanking)
return(TPStopsisRanking[order(TPStopsisRanking[,2]),])
}
#'
#' @param Matrices with Benefits and Costs,
#' @param Vectors with Benefits and Costs weights,
#' @return Score of Paths
#' @author Bruno Sousa
#' @note v1.0 no arguments validation
#' @title METH_runDiA
#' @name METH_runDiA
METH_runDiA <- function(iMBen, iMCost, iVecBen,iVecCost ){
# Important Global Variables
MIN_COST_TOPSIS_TENDENCY <- 0.0001 # To avoid divisions by zero
BETA_TOPSIS_TENDENCY_WEIGHTS <- 0.5
MINSUM_TOPSIS <- 1e-99 # To avoid divisions by zero in normalization
#source("libNormalization.R")
mBen_Criteria <- iMBen
mCost_Criteria <- iMCost
vBen_weight <- iVecBen
vCost_weight <- iVecCost
#
# Convert weight costs into Benefit costs
#
aux_step01_Tendency_Cnv <- function(inCosts){
nCol <- dim(inCosts)[2]
aux <- matrix(inCosts[,2:nCol], ncol=nCol-1)
aux[which(aux==0) ] <- MIN_COST_TOPSIS_TENDENCY
aux <- 1/aux
return (aux)
}
#
# Append Benefits and Cost weigths
#
aux_step01_Tendend_Weigths <- function(inWeBen, inWeCost){
ret_Ben <- BETA_TOPSIS_TENDENCY_WEIGHTS * inWeBen
ret_Cost <- (1 - BETA_TOPSIS_TENDENCY_WEIGHTS) * inWeCost
ret_ <- c(ret_Ben, ret_Cost)
return(ret_)
}
#
# Normalize matrix
# Using Vector normalization instead of min-max
#
aux_step02_Normalization <- function(inMatrix){
#internal function to help in normalization
fSum <- function(i){
sumrow <- MINSUM_TOPSIS
aSu <- sum(i^2) + sumrow
return(aSu)
}
outMat <- inMatrix
ncols<-dim(inMatrix)[2]
nrows<-dim(inMatrix)[1]
iMcalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=TRUE)
#Apply Sum by col
auxSum <- apply(iMcalc, 2, FUN=fSum)
nC <- dim(iMcalc)[2]
aMSum <- matrix(rep(auxSum, times=nrows), ncol=nC, byrow=TRUE)
auxCalc <- iMcalc / sqrt(aMSum)
outMat[,2:ncols]<-auxCalc
return(outMat)
}
#
# Determine PIS according to DiA Method
#
aux_step04_PIS <-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, max)
return (auxRet)
}
aux_step04_NIS<-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, min)
return (auxRet)
}
#
# Determine the distance to the ideal points (PIS and NIS)
#
aux_step05_distance_to_ideal <- function(inApos, inMatrix){
#Be careful with the idx of Ideal....
ncols<- dim(inMatrix)[2]
nrows<- dim(inMatrix)[1]
auxMBenef <- matrix(ncol=2,nrow=nrows)
auxMBenef[,1] <- inMatrix[,1]
mCalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=FALSE)
#internal fx
fAux_Positive_Dif <- function(i1, i2){
adif <- (i1 - i2)
if (adif < 0) {
adif <- adif * (-1)
}
return(adif)
}
for (nR in seq(from=1, to=nrows)){
aSumBen <- 0
for (nC in seq(from=1, to=ncol(mCalc))){
aSumBen <- aSumBen + fAux_Positive_Dif(mCalc[nR, nC] , inApos[nC])
}
auxMBenef[nR,2] <- aSumBen
}
ret_ <- auxMBenef
return (ret_)
}
#
# Determine R score
#
aux_step06_Rscore<-function(inSepPos, inSepNeg){
nRow <- dim(inSepPos)[1]
nCol <- dim(inSepPos)[2]
auxCi <- matrix(ncol=2, nrow=nRow)
auxCi[,1] <- inSepPos[,1]
mCalc <- matrix(c(inSepPos[,2], inSepNeg[,2]), ncol=2, nrow=nRow, byrow=FALSE)
# get ideal point
PIA_pos <- min(inSepPos[,2])
PIA_neg <- max(inSepNeg[,2])
fDist <- function(i){ # Apply a function on pairwise columns
ipos <- mCalc[i,1]
ineg <- mCalc[i,2]
auxSqrt <- sqrt( (ipos - PIA_pos)^2 + (ineg - PIA_neg)^2 )
return (auxSqrt)
}
# dist
aDist <- sapply(1:nrow(mCalc), fDist)
auxCi[,2] <- aDist
return(auxCi)
}
ncolMB <- dim(mBen_Criteria)[2]
DiATOPsisBenefits <- as.matrix(mBen_Criteria )
ncolMC <- dim(mCost_Criteria)[2]
DiATOPsisCosts <- as.matrix(mCost_Criteria )
DiAWeiBenTOP <- vBen_weight
DiAWeiCostTOP <- vCost_weight
#
# Step 01
#
DiATOPsisCostsTended <- aux_step01_Tendency_Cnv(DiATOPsisCosts)
DiAWeiBenTOPall <- aux_step01_Tendend_Weigths(DiAWeiBenTOP, DiAWeiCostTOP)
DiATOPsisBenefits <- cbind(DiATOPsisBenefits, DiATOPsisCostsTended)
DiAWeiBenTOP <- DiAWeiBenTOPall
stopifnot(DiATOPsisBenefits != NULL)
#
# Step 02
#
DiATOPsisBenefitsTOP <- aux_step02_Normalization(DiATOPsisBenefits)
DiATOPsisBenefitsTOPNorm <- DiATOPsisBenefitsTOP
#
# Step 03
#
DiATOPsisBenefitsTOP <- aux_step03_Weight_of_Norm_matrix(DiATOPsisBenefitsTOP, DiAWeiBenTOP)
# Step 04
DiAApos_Dia <- aux_step04_PIS(DiATOPsisBenefitsTOP)
DiAAneg_Dia <- aux_step04_NIS(DiATOPsisBenefitsTOP)
# Step 05
DiADposBen_Dia <- aux_step05_distance_to_ideal(DiAApos_Dia, DiATOPsisBenefitsTOP )
DiADnegBen_Dia <- aux_step05_distance_to_ideal(DiAAneg_Dia, DiATOPsisBenefitsTOP)
#Step 06
DiARdistBen_Dia <- aux_step06_Rscore(DiADposBen_Dia, DiADnegBen_Dia)
#DiARdistBen_Dia
#print(DiARdistBen_Dia)
return(DiARdistBen_Dia[order(DiARdistBen_Dia[,2]),])
}
#'
#' @param Matrices with Benefits and Costs,
#' @param Vectors with Benefits and Costs weights,
#' @return Score of Paths
#' @author Bruno Sousa
#' @note v1.0 no arguments validation
#' @title METH_runNMMD
#' @name METH_runNMMD
METH_runNMMD <- function(iMBen, iMCost, iVecBen,iVecCost ){
require("corpcor")
require("HDMD")
# Important Global Variables
MIN_COST_TOPSIS_TENDENCY <- 0.0001 # To avoid divisions by zero
BETA_TOPSIS_TENDENCY_WEIGHTS <- 0.5
MINSUM_TOPSIS <- 1e-99 # To avoid divisions by zero in normalization
#source("libNormalization.R")
mBen_Criteria <- iMBen
mCost_Criteria <- iMCost
vBen_weight <- iVecBen
vCost_weight <- iVecCost
# Function to check if Mahalanobis can be processed
# This is to avoid errors on singular matrices.
# TODO: need to understand why this can happen
#
# TODO2: Need a positive definite matrix... Probably due to the fact of having few data
# Some useful urls:
# http://www.r-bloggers.com/dealing-with-non-positive-definite-matrices-in-r/
# http://www2.gsu.edu/~mkteer/npdmatri.html
fAux_Do_Mahalanobis <- function(iM){
ret <- TRUE
#auxCov <- cov(iM)
auxCov2 <- cov.shrink(iM,verbose=FALSE)
#if (det(auxCov)==0){ ret <- FALSE }
if (!is.positive.definite(auxCov2)){
ret <- FALSE
}
return (ret)
}
#Mahalanobis Distance
fAux_Mahalanobis_Dist <- function(iM, me=NULL){
if (length(me)==0){
auxMean <- apply(iM, 2, mean)
}else{
auxMean <- me
}
#auxCov <- cov(iM)
auxCov <- cov.shrink(iM, verbose=FALSE)
# To avoid errors
if (fAux_Do_Mahalanobis(iM)){
#print(paste("doing Maha for" ))
#print(iM)
#print(auxCov)
#print(det(auxCov))
auxDistMaha <- mahalanobis(iM, auxMean, auxCov)
return (auxDistMaha)
}else{
return (-1)
}
}
myPairwise.mahalanobis <- function (x, grouping = NULL, cov = NULL, inverted = FALSE, digits = 5, ...)
{
x <- if (is.vector(x))
matrix(x, ncol = length(x))
else as.matrix(x)
if (!is.matrix(x))
stop("x could not be forced into a matrix")
if (length(grouping) == 0) {
grouping = t(x[1])
x = x[2:dim(x)[2]]
cat("assigning grouping\n")
print(grouping)
}
n <- nrow(x)
p <- ncol(x)
if (n != length(grouping)) {
cat(paste("n: ", n, "and groups: ", length(grouping),
"\n"))
stop("nrow(x) and length(grouping) are different")
}
g <- as.factor(grouping)
g
lev <- lev1 <- levels(g)
counts <- as.vector(table(g))
if (any(counts == 0)) {
empty <- lev[counts == 0]
warning(sprintf(ngettext(length(empty), "group %s is empty",
"groups %s are empty"), paste(empty, collapse = " ")),
domain = NA)
lev1 <- lev[counts > 0]
g <- factor(g, levels = lev1)
counts <- as.vector(table(g))
}
ng = length(lev1)
group.means <- tapply(x, list(rep(g, p), col(x)), mean)
if (missing(cov)) {
inverted = FALSE
cov = cor(x)
}
else {
if (dim(cov)[1] != p && dim(cov)[2] != p)
stop("cov matrix not of dim = (p,p)\n")
}
Distance = matrix(nrow = ng, ncol = ng)
dimnames(Distance) = list(names(group.means), names(group.means))
Means = round(group.means, digits)
Cov = round(cov, digits)
Distance = round(Distance, digits)
for (i in 1:ng) {
Distance[i, ] = mahalanobis(group.means, group.means[i,], cov, inverted)
}
result <- list(means = group.means, cov = cov, distance = Distance)
result
}
#Mahalanobis Distance
fAux_Pairwise.Mahalanobis_Dist <- function(iM, me=NULL){
if (length(me)==0){
auxMean <- apply(iM, 2, mean)
}else{
auxMean <- me
}
#auxCov <- cov(iM)
auxCov <- cov.shrink(iM, verbose=FALSE)
# To avoid errors
if (fAux_Do_Mahalanobis(iM)){
#print(paste("doing Maha for" ))
#print(iM)
#print(det(auxCov))
#print(ncol(iM))
#auxDistMaha <- pairwise.mahalanobis(iM, grouping=t(iM[,1]), digits=5, center=auxMean, cov=auxCov)
auxDistMaha <- myPairwise.mahalanobis(iM, grouping=t(iM[,1]), digits=5, center=auxMean, cov=auxCov)
return (auxDistMaha)
}else{
return (-1)
}
}
#
# Convert weight costs into Benefit costs
#
aux_step01_Tendency_Cnv <- function(inCosts){
nCol <- dim(inCosts)[2]
aux <- matrix(inCosts[,2:nCol], ncol=nCol-1)
aux[which(aux==0) ] <- MIN_COST_TOPSIS_TENDENCY
aux <- 1/aux
return (aux)
}
#
# Append Benefits and Cost weigths
#
aux_step01_Tendend_Weigths <- function(inWeBen, inWeCost){
ret_Ben <- BETA_TOPSIS_TENDENCY_WEIGHTS * inWeBen
ret_Cost <- (1 - BETA_TOPSIS_TENDENCY_WEIGHTS) * inWeCost
ret_ <- c(ret_Ben, ret_Cost)
return(ret_)
}
#
# Normalize matrix
# Using Vector normalization instead of min-max
#
aux_step02_Normalization <- function(inMatrix){
#internal function to help in normalization
fSum <- function(i){
sumrow <- MINSUM_TOPSIS
aSu <- sum(i^2) + sumrow
return(aSu)
}
outMat <- inMatrix
ncols<-dim(inMatrix)[2]
nrows<-dim(inMatrix)[1]
iMcalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=TRUE)
#Apply Sum by col
auxSum <- apply(iMcalc, 2, FUN=fSum)
nC <- dim(iMcalc)[2]
aMSum <- matrix(rep(auxSum, times=nrows), ncol=nC, byrow=TRUE)
auxCalc <- iMcalc / sqrt(aMSum)
outMat[,2:ncols]<-auxCalc
return(outMat)
}
#
# Weight the normalized matrix, by multiplying each value by its weight
#
aux_step03_Weight_of_Norm_matrix <-function(inMatrix, inWeight){
outMat <- inMatrix
ncols<-dim(inMatrix)[2]
nrows<-dim(inMatrix)[1]
iMcalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=TRUE)
nC <- dim(iMcalc)[2] # Convert weigths into a matrix in order to perform Mult
aMmul <- matrix(rep(inWeight, times=nrows), ncol=nC, byrow=TRUE)
auxCalc <- iMcalc * aMmul
outMat[,2:ncols]<-auxCalc
return(outMat)
}
#
# Determine PIS according to DiA Method
#
aux_step04_PIS <-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, max)
return (auxRet)
}
aux_step04_NIS<-function(inM){
ncolu <- dim(inM)[2]
nrows <- dim(inM)[1]
iMcalc <- matrix(inM[,2:ncolu], nrow=nrows, ncol=ncolu-1, byrow=TRUE)
auxRet <- apply(iMcalc, 2, min)
return (auxRet)
}
#
# Determine the distance to the ideal points (PIS and NIS)
#
aux_step05_distance_to_idealNMMD <- function(inMatrix){
#Be careful with the idx of Ideal....
ncols<- dim(inMatrix)[2]
nrows<- dim(inMatrix)[1]
#auxMBenef <- matrix(ncol=2,nrow=nrows)
#auxMBenef[,1] <- inMatrix[,1]
mCalc <- matrix(inMatrix[,2:ncols], nrow=nrows, ncol=ncols-1, byrow=FALSE)
auxMBenef <- fAux_Pairwise.Mahalanobis_Dist(mCalc, colMeans(mCalc))
ret_ <- auxMBenef
return (ret_)
}
#
# Determine R score
#
aux_step06_RankingNMMD<-function(inPathsIDs, inDist ){
nRow <- dim(inDist)[1]
nCol <- dim(inDist)[2]
auxCi <- matrix(ncol=nCol, nrow=nRow)
#auxCi[,1] <- 1:nRow
auxCi[,1] <- inPathsIDs
mCalc <- inDist
fDist <- function(i){ # Apply a function on pairwise columns
auxCj <- sum(mCalc[i,]) / (ncol(mCalc))
return (auxCj)
}
# dist
aDist <- sapply(1:nrow(mCalc), fDist)
auxCi[,2] <- aDist
return(auxCi)
}
ncolMB <- dim(mBen_Criteria)[2]
NMMDTOPsisBenefits <- as.matrix(mBen_Criteria )
ncolMC <- dim(mCost_Criteria)[2]
NMMDTOPsisCosts <- as.matrix(mCost_Criteria )
NMMDWeiBenTOP <- vBen_weight
NMMDWeiCostTOP <- vCost_weight
#
# Step 01
#
#Keep Path Ids
NMMD_PATHIds <- mBen_Criteria[,1]
NMMDTOPsisCostsTended <- aux_step01_Tendency_Cnv(NMMDTOPsisCosts)
NMMDWeiBenTOPall <- aux_step01_Tendend_Weigths(NMMDWeiBenTOP, NMMDWeiCostTOP)
NMMDTOPsisBenefits <- cbind(NMMDTOPsisBenefits, NMMDTOPsisCostsTended)
NMMDWeiBenTOP <- NMMDWeiBenTOPall
stopifnot(NMMDTOPsisBenefits != NULL)
#
# Step 02
#
#naCol <- dim(NMMDTOPsisBenefits)[2]
#print(NMMDTOPsisBenefits[,2:naCol])
#if (!fAux_Do_Mahalanobis(NMMDTOPsisBenefits[,2:naCol])){
# print("Doing Normalization")
NMMDTOPsisBenefitsTOP <- aux_step02_Normalization(NMMDTOPsisBenefits)
#}
#
# Step 03
#
NMMDTOPsisBenefitsTOP <- aux_step03_Weight_of_Norm_matrix(NMMDTOPsisBenefitsTOP, NMMDWeiBenTOP)
# Step 04 - Ideal Solutions
# Step 05 - Distance to Ideal
NMMDDistBen_Dia <- aux_step05_distance_to_idealNMMD( NMMDTOPsisBenefitsTOP)
#Step 06
NMMDtopsisRanking <- aux_step06_RankingNMMD(NMMD_PATHIds, NMMDDistBen_Dia$distance)
#NMMDtopsisRanking
#print(NMMDtopsisRanking)
#print("NMMD")
#print(NMMDtopsisRanking[order(-NMMDtopsisRanking[,2])])
#print("\n")
return(NMMDtopsisRanking[order(-NMMDtopsisRanking[,2]),])
}
|
object JvErrorDialog: TJvErrorDialog
Left = 202
Top = 100
ActiveControl = OKBtn
BorderIcons = [biSystemMenu]
BorderStyle = bsDialog
ClientHeight = 252
ClientWidth = 380
Color = clBtnFace
Font.Charset = DEFAULT_CHARSET
Font.Color = clWindowText
Font.Height = -11
Font.Name = 'MS Sans Serif'
Font.Style = []
FormStyle = fsStayOnTop
OldCreateOrder = True
Position = poScreenCenter
OnCreate = FormCreate
OnDestroy = FormDestroy
OnKeyUp = FormKeyUp
OnShow = FormShow
PixelsPerInch = 96
TextHeight = 13
object BasicPanel: TPanel
Left = 0
Top = 0
Width = 288
Height = 108
Align = alClient
BevelOuter = bvNone
TabOrder = 0
object ErrorText: TLabel
Left = 53
Top = 10
Width = 227
Height = 88
Align = alClient
WordWrap = True
end
object IconPanel: TPanel
Left = 0
Top = 10
Width = 53
Height = 88
Align = alLeft
BevelOuter = bvNone
TabOrder = 0
object IconImage: TImage
Left = 8
Top = 1
Width = 34
Height = 34
end
end
object TopPanel: TPanel
Left = 0
Top = 0
Width = 288
Height = 10
Align = alTop
BevelOuter = bvNone
TabOrder = 1
end
object RightPanel: TPanel
Left = 280
Top = 10
Width = 8
Height = 88
Align = alRight
BevelOuter = bvNone
TabOrder = 2
end
object BottomPanel: TPanel
Left = 0
Top = 98
Width = 288
Height = 10
Align = alBottom
BevelOuter = bvNone
TabOrder = 3
end
end
object DetailsPanel: TPanel
Left = 0
Top = 108
Width = 380
Height = 144
Align = alBottom
BevelInner = bvLowered
BevelOuter = bvLowered
TabOrder = 2
object AddrLabel: TJvxLabel
Left = 53
Top = 11
Width = 121
Height = 13
Alignment = taRightJustify
AutoSize = False
Caption = 'Error address: '
end
object TypeLabel: TJvxLabel
Left = 53
Top = 30
Width = 121
Height = 13
Alignment = taRightJustify
AutoSize = False
Caption = 'Error Type: '
end
object MessageText: TMemo
Left = 7
Top = 53
Width = 366
Height = 84
TabStop = False
Color = clBtnFace
ReadOnly = True
TabOrder = 0
WantReturns = False
end
object ErrorAddress: TEdit
Left = 180
Top = 8
Width = 192
Height = 21
TabStop = False
ParentColor = True
ReadOnly = True
TabOrder = 1
end
object ErrorType: TEdit
Left = 180
Top = 27
Width = 192
Height = 21
TabStop = False
ParentColor = True
ReadOnly = True
TabOrder = 2
end
end
object ButtonPanel: TPanel
Left = 288
Top = 0
Width = 92
Height = 108
Align = alRight
BevelOuter = bvNone
TabOrder = 1
object DetailsBtn: TButton
Left = 7
Top = 65
Width = 79
Height = 25
TabOrder = 1
OnClick = DetailsBtnClick
end
object OKBtn: TButton
Left = 7
Top = 12
Width = 79
Height = 25
Cancel = True
Default = True
ModalResult = 1
TabOrder = 0
end
end
end
|
## 用户指南
<a href="/">访问首页</a>
### 设计思想
1. 根据pom三坐标去maven仓库获取jar包
2. 通过Java `ClassLoader`机制远程获取`Class`对象
3. 反射遍历对象,生成随机数据
### 使用方式
1. 通过http接口使用
2. java代码引入使用
### 服务端使用
<a href="/">访问首页</a>
1. 填入pom坐标
2. 选择要造数据的对象
3. 点击Mock接口按钮
最终会跳转到类似下面的接口地址
[/com.qccr.shprod/shprod-facade/3.9.9.9-SNAPSHOT/com.qccr.shprod.facade.entity.employee.EmployeeTeamRO]()
#### 可选配置
##### 一、strut
数据接口类型
1. `Result<RO>` 普通对象 bean
2. `Result<List<RO>` 数组 list
3. `Result<Page<RO>` 分页 page
##### 二、listSize
数组大小
##### 三、depth
递归深度
### 说明
1. 支持CORS跨域
2. 支持jsonp跨域
3. 支持对象递归
4. 支持 List,Set,数组、枚举类
### 程序代码使用
引入
```
<dependency>
<groupId>org.wing.mocker</groupId>
<artifactId>mocker-core</artifactId>
<version>2.0</version>
</dependency>
```
手动生成数据
``` java
//生成mock数据
new org.wing.mocker.core.MockData().mock(Class);
```
### 自定义Mock数据
pom引入注解jar包
```xml
<dependency>
<groupId>org.wing.mocker</groupId>
<artifactId>annotation</artifactId>
<version>1.0</version>
</dependency>
```
```java
public class ProductRO {
//测试数据将从@MockValue随机取值
@MockValue({"精细洗车","普通洗车"})
private String productName;
@MockValue({"2500"})
private Integer productPrice;
}
```
### 服务端安装
##### 1.修改maven仓库地址
修改 `mocker-http/src/main/resources/application.properties`
nexus.server
##### 2.启动
spring-boot 启动`org.wing.mocker.http.MockerHttpApplication#main`
### 操作界面

### 注意事项
1. java对象的字段命名要规范 例如:
2. boolean不要写is
3. 字段首字母不要大写
2. facade不要相互依赖
3. facade不要依赖三方包 例如
4. apache-commons
|
package org.stepik.android.view.profile_activities.ui.fragment
import android.content.res.ColorStateList
import android.os.Bundle
import android.view.View
import androidx.annotation.ColorRes
import androidx.annotation.StringRes
import androidx.core.content.ContextCompat
import androidx.fragment.app.Fragment
import ru.nobird.android.view.base.ui.extension.argument
import androidx.lifecycle.ViewModelProvider
import androidx.lifecycle.ViewModelProviders
import kotlinx.android.synthetic.main.error_no_connection_with_button_small.*
import kotlinx.android.synthetic.main.fragment_profile_activities.*
import org.stepic.droid.R
import org.stepic.droid.base.App
import org.stepik.android.presentation.profile_activities.ProfileActivitiesPresenter
import org.stepik.android.presentation.profile_activities.ProfileActivitiesView
import org.stepik.android.view.ui.delegate.ViewStateDelegate
import javax.inject.Inject
class ProfileActivitiesFragment : Fragment(R.layout.fragment_profile_activities), ProfileActivitiesView {
companion object {
fun newInstance(userId: Long): Fragment =
ProfileActivitiesFragment()
.apply {
this.userId = userId
}
}
@Inject
internal lateinit var viewModelFactory: ViewModelProvider.Factory
private var userId by argument<Long>()
private lateinit var profileActivitiesPresenter: ProfileActivitiesPresenter
private lateinit var viewStateDelegate: ViewStateDelegate<ProfileActivitiesView.State>
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
injectComponent()
profileActivitiesPresenter = ViewModelProviders
.of(this, viewModelFactory)
.get(ProfileActivitiesPresenter::class.java)
}
private fun injectComponent() {
App.componentManager()
.profileComponent(userId)
.inject(this)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
viewStateDelegate = ViewStateDelegate()
viewStateDelegate.addState<ProfileActivitiesView.State.Idle>()
viewStateDelegate.addState<ProfileActivitiesView.State.SilentLoading>()
viewStateDelegate.addState<ProfileActivitiesView.State.Empty>()
viewStateDelegate.addState<ProfileActivitiesView.State.Loading>(view, streakLoadingPlaceholder)
viewStateDelegate.addState<ProfileActivitiesView.State.Error>(view, streakLoadingError)
viewStateDelegate.addState<ProfileActivitiesView.State.Content>(view, streakContainer)
setDataToPresenter()
tryAgain.setOnClickListener { setDataToPresenter(forceUpdate = true) }
}
private fun setDataToPresenter(forceUpdate: Boolean = false) {
profileActivitiesPresenter.fetchUserActivities(forceUpdate)
}
override fun onStart() {
super.onStart()
profileActivitiesPresenter.attachView(this)
}
override fun onStop() {
profileActivitiesPresenter.detachView(this)
super.onStop()
}
override fun setState(state: ProfileActivitiesView.State) {
viewStateDelegate.switchState(state)
if (state is ProfileActivitiesView.State.Content) {
with(state.profileActivitiesData) {
@ColorRes
val streakTintColorRes =
if (isSolvedToday) {
R.color.color_overlay_green
} else {
R.color.color_overlay_yellow
}
currentStreak.supportCompoundDrawablesTintList = ColorStateList
.valueOf(ContextCompat.getColor(requireContext(), streakTintColorRes))
currentStreakCount.text = streak
.takeIf { it > 0 }
?.toString()
.orEmpty()
@StringRes
val currentStreakRes =
when {
isSolvedToday ->
R.string.profile_activities_current_streak_active
streak > 0 ->
R.string.profile_activities_current_streak_continue
else ->
R.string.profile_activities_current_streak_start
}
currentStreak.setText(currentStreakRes)
maxStreakCount.text = maxStreak.toString()
}
}
}
} |
import { nextTick } from 'vue'
import { render } from '@testing-library/vue'
import BaseCard from './BaseCard.vue'
function renderBaseCard({ fileName = '', slots = {} } = {}) {
return render(BaseCard, {
slots,
props: { fileName },
})
}
describe('<BaseCard />', () => {
it('should render only name by default', async () => {
const { queryByTestId } = renderBaseCard({ fileName: 'Lorem Ipsum' })
await nextTick()
const cardName = queryByTestId('name')
expect(cardName).toBeInTheDocument()
expect(cardName).toHaveTextContent('Lorem Ipsum')
expect(queryByTestId('head')?.innerHTML).toBeFalsy()
expect(queryByTestId('details')?.innerHTML).toBeFalsy()
expect(queryByTestId('bottom')?.innerHTML).toBeFalsy()
})
it('should add ellipsis in name when is 35 characters', async () => {
const { queryByTestId } = renderBaseCard({
fileName: 'File Name Very Much Long I Cant Read Now',
})
const cardName = queryByTestId('name')
expect(cardName).toHaveTextContent('File Name Very Much Long I Cant Rea...')
})
it('should render slots correctly', async () => {
const { findByTestId } = renderBaseCard({
fileName: 'Lorem Ipsum',
slots: {
top: '<img data-testid="top-element" />',
details: '<button data-testid="details-element" />',
bottom: '<input data-testid="bottom-element" />',
},
})
await nextTick()
await findByTestId('top-element')
await findByTestId('details-element')
await findByTestId('bottom-element')
})
})
|
---
layout: page
title: Credits
#tagline: Supporting tagline
---
|
module Eval where
import Data.Text (append)
import Model
eval :: Expr -> LoxResult
eval e = case e of
Literal l -> Right l
Binary e1 o e2 -> evalBinary e1 o e2
Unary o e -> evalUnary o e
Grouping e -> eval e
evalUnary :: UnaryOperator -> Expr -> LoxResult
evalUnary o e = do
a <- eval e
case o of
UnaryMinus -> unaryNegate a
Bang -> unaryNot a
evalBinary :: Expr -> Operator -> Expr -> LoxResult
evalBinary e1 o e2 = do
a <- eval e1
b <- eval e2
case o of
BangEqual -> Right . BoolLiteral $ a /= b
Equal -> Right . BoolLiteral $ a == b
EqualEqual -> Right . BoolLiteral $ a == b
Greater -> Right . BoolLiteral $ a > b
GreaterEqual -> Right . BoolLiteral $ a >= b
Less -> Right . BoolLiteral $ a < b
LessEqual -> Right . BoolLiteral $ a <= b
Slash -> divide a b
Star -> multiply a b
Plus -> plus a b
Minus -> minus a b
divide :: Literal -> Literal -> LoxResult
divide a b = case (a, b) of
(NumberLiteral n1, NumberLiteral n2) ->
Right $ NumberLiteral $ n1 / n2
_ -> Left ("Cannot divide " ++ show a ++ " by " ++ show b)
multiply :: Literal -> Literal -> LoxResult
multiply a b = case (a, b) of
(NumberLiteral n1, NumberLiteral n2) ->
Right $ NumberLiteral $ n1 * n2
_ -> Left ("Cannot multiply " ++ show a ++ " by " ++ show b)
plus :: Literal -> Literal -> LoxResult
plus a b = case (a, b) of
(NumberLiteral n1, NumberLiteral n2) ->
Right $ NumberLiteral $ n1 + n2
(StringLiteral s1, StringLiteral s2) ->
Right $ StringLiteral $ s1 `append` s2
_ -> Left ("Cannot plus " ++ show a ++ " by " ++ show b)
minus :: Literal -> Literal -> LoxResult
minus a b = case (a, b) of
(NumberLiteral n1, NumberLiteral n2) ->
Right $ NumberLiteral $ n1 - n2
_ -> Left ("Cannot minus " ++ show a ++ " by " ++ show b)
unaryNegate :: Literal -> LoxResult
unaryNegate a = case a of
NumberLiteral n -> Right $ NumberLiteral $ -n
_ -> Left ("Cannot negate " ++ show a)
unaryNot :: Literal -> LoxResult
unaryNot a = case a of
BoolLiteral b -> Right $ BoolLiteral $ not b
_ -> Left ("Cannot logically invert " ++ show a)
|
use crate::pallet;
use substrate_fixed::types::U32F32;
pub fn score_claims(claims: pallet::ResolvedClaims) -> U32F32 {
let mut true_count: U32F32 = U32F32::from_num(0);
let iter_true_claims = claims.claims.iter();
// claims should be max 10
for claim in iter_true_claims {
if claim.is_accepted == true {
true_count = true_count + U32F32::from_num(1);
};
};
let total_claims = claims.claims.len() as u32;
let total = U32F32::from_num(total_claims);
true_count / total
}
|
package moleculeadmin.client.app.html.query
import moleculeadmin.client.app.html.AppElements
import moleculeadmin.client.app.html.common.DropdownMenu
import moleculeadmin.client.app.logic.query.QueryState._
import moleculeadmin.shared.ast.query.QueryDTO
import org.scalajs.dom.document
import org.scalajs.dom.html._
import org.scalajs.dom.raw.HTMLElement
import scalatags.JsDom.TypedTag
import scalatags.JsDom.all._
trait SubMenuElements extends AppElements with DropdownMenu {
// Max row selector --------------------------------------------------------
lazy val _maxRowsSelector =
select(
marginRight := 5,
for ((v, label) <- Seq(
(-1, "All"),
(25, "25"),
(50, "50"),
(100, "100"),
(500, "500"),
(1000, "1K"),
(5000, "5K"),
(10000, "10K"),
(50000, "50K"),
(100000, "100K"),
(500000, "500K"),
(1000000, "1M")
)) yield {
if (v == maxRows.now)
option(value := v, label, selected)
else
option(value := v, label)
}
).render
// Grid selector --------------------------------------------------------
lazy val _gridSelector = {
select(
marginRight := 6,
for ((v, label) <- Seq(
(0, "No grid"),
(1, "Indent"),
(2, "Full"),
)) yield {
if (v == gridType.now)
option(value := v, label, selected)
else
option(value := v, label)
}
).render
}
lazy val prioOn = Seq(
i(cls := "material-icons", "looks_one"),
i(cls := "material-icons", "looks_two"),
i(cls := "material-icons", "looks_3"),
i(cls := "material-icons", "looks_4"),
i(cls := "material-icons", "looks_5")
)
lazy val prioOff = Seq(
i(cls := "material-icons material-icons-outlined", "looks_one"),
i(cls := "material-icons material-icons-outlined", "looks_two"),
i(cls := "material-icons material-icons-outlined", "looks_3"),
i(cls := "material-icons material-icons-outlined", "looks_4"),
i(cls := "material-icons material-icons-outlined", "looks_5")
)
// Queries -------------------------------------------------------------------
def _subMenuQueryList(
curMolecule: String,
newFav: Seq[QueryDTO],
queriesByPartNs: Seq[(String, Seq[(String, Seq[(String, QueryDTO)])])],
recentQueries: Seq[QueryDTO],
savedQueries: Seq[QueryDTO],
favoriteQueries: Seq[QueryDTO],
savedMolecules: Seq[String],
favoriteMolecules: Seq[String],
use: QueryDTO => () => Unit,
upsert: QueryDTO => () => Unit,
favorite: QueryDTO => () => Unit,
unfavorite: QueryDTO => () => Unit,
retract: QueryDTO => () => Unit,
): TypedTag[LI] =
li(
cls := "dropdown",
a(href := "#", _shortcut("L", "ist")),
div(
cls := "dropdown-menu",
id := "submenu-query-list",
minWidth := 180,
paddingTop := 8,
paddingBottom := 5,
if (recentQueries.nonEmpty)
li(cls := "dropdown-submenu",
"Recent queries",
ul(
cls := "dropdown-menu",
li(
_recentQueries(
curMolecule,
recentQueries,
savedMolecules,
favoriteMolecules,
use,
upsert,
favorite
)
),
)
) else (),
if (savedQueries.nonEmpty)
li(
cls := "dropdown-submenu",
width := "max-content",
paddingRight := 10,
"Saved queries",
_savedQueries(
curMolecule,
queriesByPartNs,
favoriteMolecules,
use,
favorite,
unfavorite,
retract
)
) else (),
if (favoriteQueries.nonEmpty || newFav.nonEmpty)
li(
paddingTop := 5,
_favoriteQueries(
curMolecule,
newFav,
favoriteQueries,
use,
upsert,
unfavorite,
retract
),
) else ()
)
)
def _recentQueries(
curMolecule: String,
recentQueries: Seq[QueryDTO],
savedMolecules: Seq[String],
favoriteMolecules: Seq[String],
use: QueryDTO => () => Unit,
save: QueryDTO => () => Unit,
favorite: QueryDTO => () => Unit
): TypedTag[Table] =
table(
cls := "tableRowLink",
recentQueries.map { q =>
val cur = q.molecule == curMolecule
tr(
cls := (if (cur) "current" else "other"),
td(
q.molecule,
paddingRight := 20,
if (cur) () else onclick := use(q)
),
if (savedMolecules.contains(q.molecule))
td("")
else
td(
textAlign.right,
a(cls := "discrete", href := "#", "save", onclick := save(q))
),
if (favoriteMolecules.contains(q.molecule))
td("")
else
td(
textAlign.right,
a(cls := "discrete", href := "#", "fav", onclick := favorite(q))
),
)
}
)
def _savedQueriesNs(
curMolecule: String,
queriesByNs: Seq[(String, Seq[(String, QueryDTO)])],
favoriteMolecules: Seq[String],
use: QueryDTO => () => Unit,
fav: QueryDTO => () => Unit,
unfav: QueryDTO => () => Unit,
retract: QueryDTO => () => Unit
): Seq[TypedTag[LI]] = {
def row(m: String, q: QueryDTO, i: Int, j: Int): TypedTag[TableRow] = {
var isFav = favoriteMolecules.contains(m)
val rowId = s"saved-$i-$j"
val favId = s"savedFav-$i-$j"
val favCell = td(
textAlign.right,
a(
cls := "discrete", href := "#",
span(
id := favId,
if (isFav) "unfav" else "fav",
),
onclick := { () =>
val elem = document.getElementById(favId)
elem.innerText = ""
elem.appendChild(if (isFav) "fav".render else "unfav".render)
if (isFav) unfav(q)() else fav(q)()
isFav = !isFav
}
)
)
if (m == curMolecule) {
tr(
id := rowId,
cls := "current",
retractCell(rowId, retract(q)),
favCell,
td(m),
)
} else {
tr(
id := rowId,
cls := "other",
retractCell(rowId, retract(q)),
favCell,
td(
a(
href := "#",
q.molecule,
onclick := use(q)
)
)
)
}
}
queriesByNs.zipWithIndex.map { case ((ns, mm), i) =>
li(cls := "dropdown-submenu",
ns,
ul(
cls := "dropdown-menu",
li(
table(cls := "tableRowLink",
mm.zipWithIndex.map { case ((m, q), j) =>
row(m, q, i, j)
}
)
)
)
)
}
}
def _savedQueries(
curMolecule: String,
queriesByPartNs: Seq[(String, Seq[(String, Seq[(String, QueryDTO)])])],
favoriteMolecules: Seq[String],
use: QueryDTO => () => Unit,
favorite: QueryDTO => () => Unit,
unfavorite: QueryDTO => () => Unit,
retract: QueryDTO => () => Unit
): TypedTag[UList] =
ul(
cls := "dropdown-menu",
if (queriesByPartNs.head._1 == "db.part/user") {
// No custom partitions - show nss directly
_savedQueriesNs(
curMolecule,
queriesByPartNs.head._2,
favoriteMolecules,
use,
favorite,
unfavorite,
retract
)
} else {
queriesByPartNs.map { case (part, queriesByNs) =>
li(cls := "dropdown-submenu",
part,
ul(
cls := "dropdown-menu",
_savedQueriesNs(
curMolecule,
queriesByNs,
favoriteMolecules,
use,
favorite,
unfavorite,
retract
)
)
)
}
}
)
def _favoriteQueryRows(
curMolecule: String,
newFav: Seq[QueryDTO],
favoriteQueries: Seq[QueryDTO],
use: QueryDTO => () => Unit,
upsert: QueryDTO => () => Unit,
unfav: QueryDTO => () => Unit,
retract: QueryDTO => () => Unit
): Seq[TypedTag[TableRow]] = {
newFav.map(query =>
tr(
cls := "other",
onclick := upsert(query),
th("␣", whiteSpace.nowrap),
td(
colspan := 3,
"save current query...",
whiteSpace.nowrap,
paddingRight := 20
)
)
) ++ favoriteQueries.zipWithIndex.map { case (q, i) =>
val rowId = "favorite" + i
if (q.molecule == curMolecule) {
tr(
id := rowId,
cls := "current",
th(i + 1),
retractCell(rowId, retract(q)),
actionCell(rowId, unfav(q)),
td(q.molecule)
)
} else {
tr(
id := rowId,
cls := "other",
th(i + 1),
retractCell(rowId, retract(q)),
actionCell(rowId, unfav(q)),
td(
a(
href := "#",
q.molecule,
onclick := use(q)
)
)
)
}
}
}
def retractCell(rowId: String, retract: () => Unit): TypedTag[TableCell] = td(
cls := "retract",
a(
href := "#",
cls := "oi oi-x",
paddingBottom := 6,
onclick := { () =>
document.getElementById(rowId).innerText = ""
retract()
}
)
)
def actionCell(rowId: String, unfav: () => Unit): TypedTag[TableCell] = td(
textAlign.left,
a(
href := "#",
"unfav",
onclick := { () =>
document.getElementById(rowId).innerText = ""
unfav()
}
)
)
def _favoriteQueries(
curMolecule: String,
newFav: Seq[QueryDTO],
favoriteQueries: Seq[QueryDTO],
use: QueryDTO => () => Unit,
upsert: QueryDTO => () => Unit,
unfavorite: QueryDTO => () => Unit,
retract: QueryDTO => () => Unit
): TypedTag[Table] =
table(
id := "querylist-favorites",
cls := "tableRowLink",
_favoriteQueryRows(
curMolecule,
newFav,
favoriteQueries,
use,
upsert,
unfavorite,
retract
)
)
// Grouped -------------------------------------------------------------------
def _subMenu(
idStr: String,
header: Frag,
checkboxes: Seq[TypedTag[HTMLElement]],
): TypedTag[LI] =
li(cls := "dropdown",
a(href := "#", header),
div(
id := idStr,
cls := "dropdown-menu",
minWidth := 200,
paddingBottom := 3,
checkboxes
)
)
// Shortcuts -----------------------------------------------------------------
def _subMenuShortcuts(shortcutTables: Frag*): TypedTag[LI] =
li(
cls := "dropdown",
paddingLeft := 15,
a(href := "#", i(cls := "far fa-keyboard")),
div(
cls := "dropdown-menu",
id := "submenu-shortcuts",
paddingBottom := 5,
width := 270,
shortcutTables
)
)
def _shortCutsTable(header: String, p: Int, shortcuts: Frag*): TypedTag[Span] =
span(
h5(header, paddingBottom := 10),
table(cls := "shortcuts",
marginBottom := p,
shortcuts)
)
def _square(key: String, label: Frag, onclck: () => Unit): TypedTag[TableRow] =
tr(
cls := "other",
th(span(key, cls := "box")),
td(label),
onclick := onclck
)
def _circle(key: String, label: Frag, onclck: () => Unit): TypedTag[TableRow] =
tr(
cls := "other",
th(span(cls := "circle", key)),
td(label),
onclick := onclck
)
}
|
<?php
namespace App\Http\Controllers;
use App\Models\Withdrawal;
use Illuminate\Http\Request;
use App\Services\TransactionService;
class WithdrawalController extends Controller
{
protected $paginate_count = 15;
protected $transactionService;
public function __construct(TransactionService $transactionService)
{
$this->transactionService = $transactionService;
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
$withdrawals = Withdrawal::where('user_id', auth()->id());
if (request()->has('status')) {
$withdrawals = $withdrawals->filterByStatus(request()->query('status'));
}
if (request()->has('duration')) {
$withdrawals = $withdrawals->filterByDuration(request()->query('duration'));
}
$withdrawals = $withdrawals->simplePaginate($this->paginate_count)->withQueryString();
return view('withdrawals', compact('withdrawals'));
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
$user = auth()->user();
return view('withdraw', compact('user'));
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$request->validate([
'amount' => ['required', 'numeric', 'min:100000', 'max:300000000'],
]);
$amount = $request->amount * 100;
$user = auth()->user();
if (!$user->isSufficient($amount)) {
return back()->with('error', 'Insufficient Balance. Credit your wallet as soon as possible.');
}
$this->transactionService->makeWithdrawal($request);
return back()->with('success', 'Your withdrawal is being processed. Processing lasts within 24 hours.');
}
/**
* Remove the specified resource from storage.
*
* @param \App\Models\Withdrawal $withdrawal
* @return \Illuminate\Http\Response
*/
public function destroy(Withdrawal $withdrawal)
{
// Cancelling a withdrawal request
$this->transactionService->cancelWithdrawal($withdrawal);
return back()->with('success', 'Your withdrawal request has been cancelled.');
}
public function settle(Withdrawal $withdrawal)
{
// Validates a transaction
$withdrawal->update(['status' => 'succeed']);
return back()->with('success', 'The transaction has been declared settled!');
}
public function close(Withdrawal $withdrawal)
{
// Closes a transaction
$withdrawal->update(['status' => 'closed']);
// return $transaction;
return back()->with('success', 'The transaction has been closed!');
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.