text
stringlengths 27
775k
|
---|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
#if UNITY_EDITOR
using System;
using System.Collections.Generic;
using System.Xml;
using System.Xml.Linq;
namespace Microsoft.Build.Unity.ProjectGeneration.Templates.Xml
{
/// <summary>
/// This token is encoded as a comment that should be replaced.
/// </summary>
internal class XmlCommentTemplateToken : XProcessingInstruction, ITemplateToken
{
private readonly Guid token = Guid.NewGuid();
internal XmlCommentTemplateToken(string commentValue)
: base("somename", commentValue)
{
}
public override void WriteTo(XmlWriter writer)
{
XmlTemplateWriter xmlTemplateWriter = (XmlTemplateWriter)writer;
object value = xmlTemplateWriter.ReplacementSet.ReplacementEntries[token];
if (value is string stringValue)
{
writer.WriteRaw(stringValue);
}
else if (value is IEnumerable<string> valueSet)
{
foreach (string item in valueSet)
{
writer.WriteRaw(item);
}
}
else if (value is DelimitedStringSet delimitedStringSet)
{
bool firstWritten = false;
foreach (string item in delimitedStringSet.Items)
{
if (firstWritten)
{
writer.WriteRaw(delimitedStringSet.Delimiter);
}
writer.WriteRaw(item);
firstWritten = true;
}
}
else
{
throw new InvalidCastException($"Can't treat {value} as string or IEnumerable<string>");
}
}
public void AssignValue(TemplateReplacementSet replacementSet, object value)
{
replacementSet.ReplacementEntries[token] = value;
}
public void PrepareForReplacement(TemplateReplacementSet replacementSet)
{
// Do nothing
}
}
}
#endif
|
import random
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
@classmethod
def random(cls):
r = random.randint(0, 255)
g = random.randint(0, 255)
b = random.randint(0, 255)
return Color(r, g, b)
# Tkinter color formatting
def to_hex(self):
return "#%02x%02x%02x" % (self.r, self.g, self.b)
@classmethod
def from_hex(cls, hex_string):
r = int(hex_string[1:3], 16)
g = int(hex_string[3:5], 16)
b = int(hex_string[5:7], 16)
return Color(r, g, b)
def __str__(self):
return f"Color({self.r},{self.g},{self.b})"
|
import styled from 'styled-components';
import { secondaryColor, media } from 'styles';
export default styled.div`
color: ${secondaryColor};
font-weight: 700;
text-align: center;
font-size: 5em;
margin-bottom: 5%;
${media.tablet`
margin-bottom: 20px;
`};
`;
|
#!/usr/bin/python
# -*- coding: ascii -*-
# $Id$
#
# Author: Filippo Rivato
# Date: 2015/10/04
__author__ = "Filippo Rivato"
__email__ = "[email protected]"
__name__ = _("Flatten")
__version__= "0.0.2"
from CNC import CNC,Block
from ToolsPage import Plugin
#==============================================================================
#Flatten class
#==============================================================================
class Flatten:
def __init__(self,name="Flatten"):
self.name = name
#----------------------------------------------------------------------
def make(self,app, XStart=0.0, YStart=0.0, FlatWidth=10., FlatHeight=10., \
FlatDepth=0, BorderPass=False, CutDirection="Climb", PocketType="Raster"):
#GCode Blocks
blocks = []
#Check parameters
if CutDirection is "":
app.setStatus(_("Flatten abort: Cut Direction is undefined"))
return
if PocketType is "":
app.setStatus(_("Flatten abort: Pocket Type is undefined"))
return
if FlatWidth <= 0 or FlatHeight <= 0 :
app.setStatus(_("Flatten abort: Flatten Area dimensions must be > 0"))
return
if FlatDepth > 0 :
app.setStatus(_("Flatten abort: Hey this is only for subtractive machine! Check depth!"))
return
#Add Region disabled to show worked area
block = Block(self.name + " Outline")
block.enable = False
block.append(CNC.zsafe())
xR,yR = self.RectPath(XStart,YStart,FlatWidth,FlatHeight)
for x,y in zip(xR,yR):
block.append(CNC.gline(x,y))
blocks.append(block)
# Load tool and material settings
toolDiam = CNC.vars['diameter']
toolRadius = toolDiam / 2.
#Calc tool diameter with Maximum Step Over allowed
StepOverInUnitMax = toolDiam * CNC.vars['stepover'] / 100.0
#Offset for Border Cut
BorderXStart = XStart + toolRadius
BorderYStart = YStart + toolRadius
BorderWidth = FlatWidth - toolDiam
BorderHeight = FlatHeight - toolDiam
BorderXEnd = XStart + FlatWidth - toolRadius
BorderYEnd = YStart + FlatHeight - toolRadius
PocketXStart = BorderXStart
PocketYStart = BorderYStart
PocketXEnd = BorderXEnd
PocketYEnd = BorderYEnd
#Calc space to work with/without border cut
WToWork = FlatWidth - toolDiam
HToWork = FlatHeight - toolDiam
if(WToWork < toolRadius or HToWork < toolRadius):
app.setStatus(_("Flatten abort: Flatten area is too small for this End Mill."))
return
#Prepare points for pocketing
xP=[]
yP=[]
#and border
xB=[]
yB=[]
#---------------------------------------------------------------------
#Raster approach
if PocketType == "Raster":
#Correct sizes if border is used
if(BorderPass):
PocketXStart += StepOverInUnitMax
PocketYStart += StepOverInUnitMax
PocketXEnd -= StepOverInUnitMax
PocketYEnd -= StepOverInUnitMax
WToWork -= (StepOverInUnitMax)
HToWork -= (StepOverInUnitMax)
#Calc number of pass
VerticalCount = (int)(HToWork / StepOverInUnitMax)
#Calc step minor of Max step
StepOverInUnit = HToWork / (VerticalCount +1)
flip = False
ActualY = PocketYStart
#Zig zag
if StepOverInUnit==0 : StepOverInUnit=0.001 #avoid infinite while loop
while (True):
#Zig
xP.append(self.ZigZag(flip,PocketXStart,PocketXEnd))
yP.append(ActualY)
flip = not flip
#Zag
xP.append(self.ZigZag(flip,PocketXStart,PocketXEnd))
yP.append(ActualY)
if(ActualY >= PocketYEnd - StepOverInUnitMax + StepOverInUnit):
break
#Up
ActualY += StepOverInUnit
xP.append(self.ZigZag(flip,PocketXStart,PocketXEnd))
yP.append(ActualY)
#Points for border cut depends on Zig/Zag end
if(BorderPass):
if flip:
xB,yB = self.RectPath(BorderXStart,BorderYEnd,BorderWidth,-BorderHeight)
else:
xB,yB = self.RectPath(BorderXEnd,BorderYEnd,-BorderWidth,-BorderHeight)
#Reverse in case of Climb
if CutDirection == "Climb":
xB = xB[::-1]
yB = yB[::-1]
#---------------------------------------------------------------------
#Offset approach
if PocketType == "Offset":
#Calc number of pass
VerticalCount = (int)(HToWork / StepOverInUnitMax)
HorrizontalCount = (int)(WToWork / StepOverInUnitMax)
#Make them odd
if VerticalCount%2 == 0 : VerticalCount += 1
if HorrizontalCount%2 == 0 : HorrizontalCount += 1
#Calc step minor of Max step
StepOverInUnitH = HToWork / (VerticalCount)
StepOverInUnitW = WToWork / (HorrizontalCount)
#Start from border to center
xS = PocketXStart
yS = PocketYStart
wS = WToWork
hS = HToWork
xC = 0
yC = 0
while (xC<=HorrizontalCount/2 and yC<=VerticalCount/2):
#Pocket offset points
xO,yO = self.RectPath(xS, yS, wS, hS)
if CutDirection == "Conventional":
xO = xO[::-1]
yO = yO[::-1]
xP = xP + xO
yP = yP + yO
xS+=StepOverInUnitH
yS+=StepOverInUnitW
hS-=2.0*StepOverInUnitH
wS-=2.0*StepOverInUnitW
xC += 1
yC += 1
#Reverse point to start from inside (less stres on the tool)
xP = xP[::-1]
yP = yP[::-1]
#Blocks for pocketing
block = Block(self.name)
block.append("(Flatten from X=%g Y=%g)"%(XStart,YStart))
block.append("(W=%g x H=%g x D=%g)"%(FlatWidth,FlatHeight,FlatDepth))
block.append("(Approach: %s %s)" % (PocketType,CutDirection))
if BorderPass : block.append("(with border)")
#Move safe to first point
block.append(CNC.zsafe())
block.append(CNC.grapid(xP[0],yP[0]))
#Init Depth
currDepth = 0.
stepz = CNC.vars['stepz']
if stepz==0 : stepz=0.001 #avoid infinite while loop
#Create GCode from points
while True:
currDepth -= stepz
if currDepth < FlatDepth : currDepth = FlatDepth
block.append(CNC.zenter(currDepth))
block.append(CNC.gcode(1, [("f",CNC.vars["cutfeed"])]))
#Pocketing
for x,y in zip(xP,yP):
block.append(CNC.gline(x,y))
#Border cut if request
for x,y in zip(xB,yB):
block.append(CNC.gline(x,y))
#Verify exit condition
if currDepth <= FlatDepth : break
#Move to the begin in a safe way
block.append(CNC.zsafe())
block.append(CNC.grapid(xP[0],yP[0]))
#Zsafe
block.append(CNC.zsafe())
blocks.append(block)
return blocks
#----------------------------------------------------------------------
def RectPath(self,x,y,w,h):
xR = []
yR = []
xR.append(x)
yR.append(y)
xR.append(x + w)
yR.append(y)
xR.append(x + w)
yR.append(y + h)
xR.append(x)
yR.append(y + h)
xR.append(x)
yR.append(y)
return (xR,yR)
#----------------------------------------------------------------------
def ZigZag(self,flip,zig,zag):
if flip:
return zig
else:
return zag
#==============================================================================
# Create a flatten surface
#==============================================================================
class Tool(Plugin):
__doc__ = _("Flatten an area in different ways")
def __init__(self, master):
Plugin.__init__(self, master, "Flatten")
self.icon = "flatten"
self.group = "CAM"
self.variables = [
("name", "db", "", _("Name")),
("XStart" , "mm", 0.0, _("X start")),
("YStart" , "mm", 0.0, _("Y start")),
("FlatWidth" , "mm", 30.0, _("Width to flatten")),
("FlatHeight" , "mm", 20.0, _("Height to flatten")),
("FlatDepth" , "mm", 0.0, _("Depth to flatten")),
("BorderPass" , "bool", True , _("Raster border")),
("CutDirection", "Climb,Conventional","Climb", _("Cut Direction")),
("PocketType" , "Raster,Offset" ,"Raster", _("Pocket type"))
]
self.buttons.append("exe")
# ----------------------------------------------------------------------
def execute(self, app):
n = self["name"]
if not n or n=="default": n="Flatten"
flatten = Flatten(n)
blocks = flatten.make(app,
self.fromMm("XStart"),
self.fromMm("YStart"),
self.fromMm("FlatWidth"),
self.fromMm("FlatHeight"),
self.fromMm("FlatDepth"),
self["BorderPass"],
self["CutDirection"],
self["PocketType"]
)
if blocks is not None:
active = app.activeBlock()
if active==0: active=1
app.gcode.insBlocks(active, blocks, "Flatten")
app.refresh()
app.setStatus(_("Flatten: Generated flatten surface"))
|
require "rails_helper"
RSpec.describe Queries::GetPublishIntent do
let(:timeout) { false }
before do
request_stub = stub_request(:get, %r{.*content-store.*/publish-intent#{base_path}})
if timeout
request_stub.to_timeout
else
request_stub.to_return(status: status, body: body.to_json)
end
end
context "when the content store responds with a 200" do
let(:base_path) { "/vat-rates" }
let(:status) { 200 }
let(:body) { { foo: "bar" } }
it "returns the body of the response from the content store" do
result = subject.call(base_path)
expect(result).to eq(body)
end
end
context "when the content store responds with a 404" do
let(:base_path) { "/missing" }
let(:status) { 404 }
let(:body) { {} }
it "raises a command error" do
expect {
subject.call(base_path)
}.to raise_error(CommandError, /could not find/i)
end
end
context "when the content store times out" do
let(:base_path) { "/timeout" }
let(:timeout) { true }
it "raises a command error" do
expect {
subject.call(base_path)
}.to raise_error(CommandError, /content store timed out/i)
end
end
end
|
$elastomer = Elastomer::Client.new url: ENV["ELASTICSEARCH_URL"] || "http://localhost:9200"
|
#!/bin/sh
# REQUIRES: command -v clangd
. test/lib.sh
cat >> .config/kak/kakrc << EOF
set-option global lsp_diagnostic_line_error_sign ' X'
set-option global lsp_diagnostic_line_hint_sign '¿ '
set-option global lsp_diagnostic_line_info_sign '¡ '
set-option global lsp_diagnostic_line_warning_sign 'W '
EOF
cat > main.c << EOF
void main(int argc, char** argv) {}
syntax error
EOF
test_tmux_kak_start main.c
test_tmux capture-pane -p | sed 2q
# CHECK: {{W }}void main(int argc, char** argv) {}
# CHECK: {{ X}}syntax error
test_tmux send-keys %:comment-line Enter
test_sleep
test_tmux capture-pane -p | sed 2q
# CHECK: {{ }}// void main(int argc, char** argv) {}
# CHECK: {{ }}// syntax error
|
# -*- encoding : utf-8 -*-
require 'rademade_admin/routing/mapper'
require 'simple_form'
require 'select2-rails'
require 'carrierwave'
require 'light_resizer'
module RademadeAdmin
class Engine < ::Rails::Engine
isolate_namespace RademadeAdmin
config.assets.paths << "#{config.root}/vendor/assets/javascript/bower_components"
initializer 'ckeditor.assets_precompile', :group => :all do |app|
filter_ckeditor_assets = Proc.new do |logical_path|
File.fnmatch('ckeditor/*', logical_path) \
&& ! [
'ckeditor/CHANGES',
'ckeditor/LICENSE',
'ckeditor/README',
'ckeditor/plugins/scayt/CHANGELOG',
'ckeditor/plugins/scayt/LICENSE',
'ckeditor/plugins/scayt/README',
'ckeditor/plugins/wsc/LICENSE',
'ckeditor/plugins/wsc/README',
'ckeditor/skins/moono/readme',
].include?(logical_path)
end
app.config.assets.precompile << filter_ckeditor_assets
app.config.assets.precompile += %w(rademade_admin.css rademade_admin.js rademade_admin/fav1.ico)
end
$LOAD_PATH << "#{config.root}/app/services/"
paths = %W(
#{config.root}/app/helpers/**/*.rb
#{config.root}/app/services/**/*.rb
#{config.root}/app/inputs/**/*.rb
#{config.root}/lib/rademade_admin/**/*.rb
)
paths.each do |path|
Dir[path].each { |f| require f }
end
end
end
|
import 'dart:developer';
import 'package:encrypted_shared_preferences/encrypted_shared_preferences.dart';
import 'package:flutter/material.dart';
class ThemeProvider with ChangeNotifier {
final _prefs = EncryptedSharedPreferences();
static const _keyTheme = 'themeApp';
static const _dark = 'dark';
static const _light = 'light';
static const _system = 'system';
ThemeMode? _themeMode;
ThemeMode? get getTheme => _themeMode;
ThemeProvider() {
_initTheme();
}
void _initTheme() async {
await _prefs.getString(_keyTheme).then((String value) {
log('Theme from storage: $value');
var themeMode = value.isEmpty ? _light : value;
if (themeMode == _light) {
_themeMode = ThemeMode.light;
}
if (themeMode == _dark) {
_themeMode = ThemeMode.dark;
}
if (themeMode == _system) {
_themeMode = ThemeMode.system;
}
_debugProvider();
notifyListeners();
});
}
void setDarkTheme(themeMode) async {
_themeMode = themeMode;
await _prefs.setString(_keyTheme, _dark);
_debugProvider();
notifyListeners();
}
void setLightTheme(themeMode) async {
_themeMode = themeMode;
await _prefs.setString(_keyTheme, _light);
_debugProvider();
notifyListeners();
}
void setSystemTheme(themeMode) async {
_themeMode = themeMode;
await _prefs.setString(_keyTheme, _system);
_debugProvider();
notifyListeners();
}
void _debugProvider() {
log('Theme mode: $_themeMode');
}
}
|
## Anot.js 2.x
> Anot 是Anot not only templateEngine的缩写。 它是一款迷你,易用、高性能的前端MVVM框架, fork于avalon。
>2.0版进行了大量的精简, 移除了非现代浏览器的兼容代码, 移除组件API, 正式引入web component。
>> 2.x版本为 全新版本, 只兼容支持type="module"的浏览器。
```bash
# 开发模式
npm start
```
```bash
# 打包
npm run prod
```
执行完, 会打包为2个版本, 分别是
- anot.js 普通版(需要支持es6 module的现代浏览器)
- anot.touch.js 带触摸事件的版本(需要支持es6 module的现代浏览器)
### 文档:
[文档](https://doui.cc/wiki/anot)
### 基于Anot.js的组件库
[文档](https://doui.cc) |
## Redis[remote dictionary server]: NoSQL
### 简介
1. redis 简介
- key-value 的 NoSQL {内存级别}开源数据库 vs RDBM
- [集群]高并发 + 高性能 + 高扩展 + 高可用
- 多种数据结构[**本质是计算向数据移动**{server 有很多方法 | 与 memcache 的 string 比可以只取一些字段}] + 速度快
- 内存 + 异步持久化 + string max 512m
- 解决[分布式]缓存{超热的 key} + 锁 + 数据存储 + 解决方案
- feature: IO{epoll} + 线程{单线程}
- **redis 不适合一个操作占用大量时间, 或者存储大数据块**
- 渐进式 rehash 保证高可用, hash 碰撞时采用链地址法解决的[数组里面是链表]

2. comparison k-v production
- redis 数据可以持久化, 可以将内存中的数据写入磁盘, 重启的时候可以再次加载进入内存使用[推荐使用 aof[执行级别的, 但是指令多的化重启就会变慢] + rbd[数据级别的, 会丢数据]
- redis 提供了 string, hash, list, set, zset 的数据结构
- redis 支持数据备份, master-slave
3. 基本的数据结构



- string: int/sds[raw/embstr] + 二进制安全的, value 最多可以是 512M + 缓存、计数器、分布式锁
- hash: ziplist、hashtable + 存储大对象[某部分需要修改]
```java
Map<string, Map<K, V>>
// 1. hset shopcart:uid productid count 新增商品
// hget shopcart:uid productid
// 2. hincrby shopcart:uid productid count 商品 +1
// 3. hdel shopcart:uid productid 删除商品
// 4. hlen shopcart:uid 商品总数
// 5. hgetall shopcart:uid 所有商品
```
- list: ziplist、linkedlist + 底层是双向无环链表 + 公众号列表
1. 实现栈: LPUSH + LPOP
2. 实现队列: LPUSH + RPOP
3. BlockingQueue: BLPUSH + BRPOP
- set: intset、hashtable + **抽奖[SRANDMEMBER/SPOP]**/微信的点赞[可见性 sinter]/推荐可能认识的人[sdiff]/社交的共同关注[sinter]
- zset: ziplist、skiplist + 跳跃表 + 排行榜/热搜
- bit: 签到 [SETBIT key offset(512m) value/getbit, bitcount]
1. 一个以每天日期为 key, 每个 uid 为偏移量: `一個人一月一個key: upms:member:1:202001 0 1`
2. [link](https://github.com/Alice52/Alice52/issues/58#issue-971076463)
- hypeloglog: 日活

4. redis 作为数据库和缓存的区别
- 作为高效缓存时, 数据安全不能得到保证
- 缓存不是全量数据, 热点数据
- 缓存应该随着访问变化而变化
5. redis(缓存) 和 MySQL(存储) 的区别:
- DB 的读写性能低、延迟高
6. [redis 作为消息队列 和 MQ 的区别](./09.mq.md)
- `内存消息[占用内存]` + `消息可能丢失[stream不会]`
7. [pipeline](./11.pipeline.md): 单机最大可达到 100wqps
- 可以将多次 IO 往返的时间缩减为一次, 前提是 pipeline 执行的指令之间没有因果相关性
- 使用 redis-benchmark 进行压测的时候可以发现影响 redis 的 QPS 峰值的一个重要因素是 pipeline 批次指令的数目

### 安装
1. 默认安装目录: `/usr/local/bin`
```shell
root@7d41c0bd290a:/usr/local/bin# ls -la
├── redis-benchmark # 性能测试工具
├── redis-check-aof # 修复有问题的 AOF 文件
├── redis-check-rdb # 修复有问题的 dump.rdb 文件
├── redis-cli # 客户端入口
├── redis-sentinel # 哨兵
└── redis-server # 服务端
```
2. 启动关闭
```shell
# 搞一份 conf 之后
# start up
/usr/local/bin/redis-server /usr/local/etc/redis/redis.conf
# shut down
/usr/local/bin/redis-cli shutdown
/usr/local/bin/redis-cli -p 6379 shutdown
```
3. docker
```yaml
version: '3.0'
services:
redis:
image: registry.cn-shanghai.aliyuncs.com/alice52/dev-standalone-redis:20200930.4e34876
restart: 'on-failure:3'
container_name: dev-redis-standalone
ports:
- 6379:6379
volumes:
- /root/redis/standalone/data:/data
environment:
TZ: Asia/Shanghai
```
### config
0. 可以通过配置文件配置 & 也可以通过 config 命令行配置
1. units
- 1k --> 1000 bytes
- 1kb --> 1024 bytes
- units are case insensitive so 1GB 1Gb 1gB are all the same.
2. INCLUDES
- Include one or more other config files here.
3. NETWORK
- daeminize: run as a daemon, if run in docker, it will need change to `no`
- pidfile: run as a daemon and write pid in specify file
- port
- timeout: Close the connection after a client is idle for N seconds (0 to disable)
- bind:
- protected-mode: set auth, then change it to no
4. GENERAL
- log/loglevel/logfile: [debug / verbose / notice / warning]
- tcp-keepalive
- syslog-enabled / syslog-ident / syslog-facility
- databases
- daeminize: docker
5. SNAPSHOTTING
- RDB 是整个内存的压缩过的 Snapshot
- save <seconds> <change>
- rdbcompression: 对存储的快照进行压缩, 消耗 CPU
- rdbchecksum: 存储完成后使用 CRC64 对数据进行校验, 消耗 10% CPU
- dbfilename
- dir
6. APPEND ONLY MODE
- appendonly
- appendfilename
- appendfsync <[always / everysec/ no]>
- `no-appendfsync-on-rewrite no`: 重写时是否使用 appendfsync, no 保证数据的安全性
- auto-aof-rewrite-percentage 100
- auto-aof-rewrite-min-size 64mb
7. duration
- aof-use-rdb-preamble no
8. MEMORY MANAGEMENT
- maxmemory:
- maxmemory-policy: 缓存淘汰策略
9. REPLICATION
- cluster-enabled
- cluster-config-file
- cluster-node-timeout
11. SECURITY
- requirepass: 设置密码 auth [和 protected-mode 相关]
### 基本命令
1. common
- bluk operation: mset/mget, hmset ...
- 原子操作: incr/decrby/hincrby
- common
```shell
# 切换数据库
SELECT 0
# 查看数据库key的数量
DBSIZE
# 清空DB
FLUSHDB
FLUSHALL
```
- key: 查询不存在的 key 返回 nil
```js
del key
keys *
dump key
exists key
expire key second
ttl/pttl key
type key
move key db
persist ket // 删除过期时间
rename key newKey
```
|
/**
* "This vocabulary defines terms used in SHACL, the W3C Shapes Constraint Language."@en
* @type <http://www.w3.org/2002/07/owl#Ontology>
* @label-en W3C Shapes Constraint Language (SHACL) Vocabulary
* @declare prefix-
* shnamespace-
* http://www.w3.org/ns/shacl#
* @suggestedShapesGraph <http://www.w3.org/ns/shacl-shacl#>
*/
export declare namespace interfaces {
/** */
interface sh {
value: any;
}
/** */
interface AbstractResult extends AbstractResult {
value: any;
}
/** */
interface AndConstraintComponent {
value: any;
}
/** */
interface AndConstraintComponent_and {
value: any;
}
/** */
interface BlankNode {
value: any;
}
/** */
interface BlankNodeOrIRI {
value: any;
}
/** */
interface BlankNodeOrLiteral {
value: any;
}
/** */
interface ClassConstraintComponent {
value: any;
}
/** */
interface ClassConstraintComponent_class {
value: any;
}
/** */
interface ClosedConstraintComponent {
value: any;
}
/** */
interface ClosedConstraintComponent_closed {
value: any;
}
/** */
interface ClosedConstraintComponent_ignoredProperties {
value: any;
}
/** */
interface ConstraintComponent extends ConstraintComponent {
value: any;
}
/** */
interface DatatypeConstraintComponent {
value: any;
}
/** */
interface DatatypeConstraintComponent_datatype {
value: any;
}
/** */
interface DisjointConstraintComponent {
value: any;
}
/** */
interface DisjointConstraintComponent_disjoint {
value: any;
}
/** */
interface EqualsConstraintComponent {
value: any;
}
/** */
interface EqualsConstraintComponent_equals {
value: any;
}
/** */
interface ExpressionConstraintComponent {
value: any;
}
/** */
interface ExpressionConstraintComponent_expression {
value: any;
}
/** */
interface Function extends Function {
value: any;
}
/** */
interface HasValueConstraintComponent {
value: any;
}
/** */
interface HasValueConstraintComponent_hasValue {
value: any;
}
/** */
interface IRI {
value: any;
}
/** */
interface IRIOrLiteral {
value: any;
}
/** */
interface InConstraintComponent {
value: any;
}
/** */
interface InConstraintComponent_in {
value: any;
}
/** */
interface Info {
value: any;
}
/** */
interface JSConstraint extends JSConstraint {
value: any;
}
/** */
interface JSConstraint_js {
value: any;
}
/** */
interface JSConstraintComponent {
value: any;
}
/** */
interface JSExecutable extends JSExecutable {
value: any;
}
/** */
interface JSFunction extends JSFunction, JSFunction {
value: any;
}
/** */
interface JSLibrary extends JSLibrary {
value: any;
}
/** */
interface JSRule extends JSRule, JSRule {
value: any;
}
/** */
interface JSTarget extends JSTarget, JSTarget {
value: any;
}
/** */
interface JSTargetType extends JSTargetType, JSTargetType {
value: any;
}
/** */
interface JSValidator extends JSValidator, JSValidator {
value: any;
}
/** */
interface LanguageInConstraintComponent {
value: any;
}
/** */
interface LanguageInConstraintComponent_languageIn {
value: any;
}
/** */
interface LessThanConstraintComponent {
value: any;
}
/** */
interface LessThanConstraintComponent_lessThan {
value: any;
}
/** */
interface LessThanOrEqualsConstraintComponent {
value: any;
}
/** */
interface LessThanOrEqualsConstraintComponent_lessThanOrEquals {
value: any;
}
/** */
interface Literal {
value: any;
}
/** */
interface MaxCountConstraintComponent {
value: any;
}
/** */
interface MaxCountConstraintComponent_maxCount {
value: any;
}
/** */
interface MaxExclusiveConstraintComponent {
value: any;
}
/** */
interface MaxExclusiveConstraintComponent_maxExclusive {
value: any;
}
/** */
interface MaxInclusiveConstraintComponent {
value: any;
}
/** */
interface MaxInclusiveConstraintComponent_maxInclusive {
value: any;
}
/** */
interface MaxLengthConstraintComponent {
value: any;
}
/** */
interface MaxLengthConstraintComponent_maxLength {
value: any;
}
/** */
interface MinCountConstraintComponent {
value: any;
}
/** */
interface MinCountConstraintComponent_minCount {
value: any;
}
/** */
interface MinExclusiveConstraintComponent {
value: any;
}
/** */
interface MinExclusiveConstraintComponent_minExclusive {
value: any;
}
/** */
interface MinInclusiveConstraintComponent {
value: any;
}
/** */
interface MinInclusiveConstraintComponent_minInclusive {
value: any;
}
/** */
interface MinLengthConstraintComponent {
value: any;
}
/** */
interface MinLengthConstraintComponent_minLength {
value: any;
}
/** */
interface NodeConstraintComponent {
value: any;
}
/** */
interface NodeConstraintComponent_node {
value: any;
}
/** */
interface NodeKind extends NodeKind {
value: any;
}
/** */
interface NodeKindConstraintComponent {
value: any;
}
/** */
interface NodeKindConstraintComponent_nodeKind {
value: any;
}
/** */
interface NodeShape extends NodeShape {
value: any;
}
/** */
interface NotConstraintComponent {
value: any;
}
/** */
interface NotConstraintComponent_not {
value: any;
}
/** */
interface OrConstraintComponent {
value: any;
}
/** */
interface OrConstraintComponent_or {
value: any;
}
/** */
interface Parameter extends Parameter {
value: any;
}
/** */
interface Parameterizable extends Parameterizable {
value: any;
}
/** */
interface PatternConstraintComponent {
value: any;
}
/** */
interface PatternConstraintComponent_flags {
value: any;
}
/** */
interface PatternConstraintComponent_pattern {
value: any;
}
/** */
interface PrefixDeclaration extends PrefixDeclaration {
value: any;
}
/** */
interface PropertyConstraintComponent {
value: any;
}
/** */
interface PropertyConstraintComponent_property {
value: any;
}
/** */
interface PropertyGroup extends PropertyGroup {
value: any;
}
/** */
interface PropertyShape extends PropertyShape {
value: any;
}
/** */
interface QualifiedMaxCountConstraintComponent {
value: any;
}
/** */
interface QualifiedMaxCountConstraintComponent_qualifiedMaxCount {
value: any;
}
/** */
interface QualifiedMaxCountConstraintComponent_qualifiedValueShape {
value: any;
}
/** */
interface QualifiedMaxCountConstraintComponent_qualifiedValueShapesDisjoint {
value: any;
}
/** */
interface QualifiedMinCountConstraintComponent {
value: any;
}
/** */
interface QualifiedMinCountConstraintComponent_qualifiedMinCount {
value: any;
}
/** */
interface QualifiedMinCountConstraintComponent_qualifiedValueShape {
value: any;
}
/** */
interface QualifiedMinCountConstraintComponent_qualifiedValueShapesDisjoint {
value: any;
}
/** */
interface ResultAnnotation extends ResultAnnotation {
value: any;
}
/** */
interface Rule extends Rule {
value: any;
}
/** */
interface SPARQLAskExecutable extends SPARQLAskExecutable {
value: any;
}
/** */
interface SPARQLAskValidator extends SPARQLAskValidator, SPARQLAskValidator {
value: any;
}
/** */
interface SPARQLConstraint extends SPARQLConstraint {
value: any;
}
/** */
interface SPARQLConstraintComponent {
value: any;
}
/** */
interface SPARQLConstraintComponent_sparql {
value: any;
}
/** */
interface SPARQLConstructExecutable extends SPARQLConstructExecutable {
value: any;
}
/** */
interface SPARQLExecutable extends SPARQLExecutable {
value: any;
}
/** */
interface SPARQLFunction extends SPARQLFunction, SPARQLFunction, SPARQLFunction {
value: any;
}
/** */
interface SPARQLRule extends SPARQLRule, SPARQLRule {
value: any;
}
/** */
interface SPARQLSelectExecutable extends SPARQLSelectExecutable {
value: any;
}
/** */
interface SPARQLSelectValidator extends SPARQLSelectValidator, SPARQLSelectValidator {
value: any;
}
/** */
interface SPARQLTarget extends SPARQLTarget, SPARQLTarget, SPARQLTarget {
value: any;
}
/** */
interface SPARQLTargetType extends SPARQLTargetType, SPARQLTargetType, SPARQLTargetType {
value: any;
}
/** */
interface SPARQLUpdateExecutable extends SPARQLUpdateExecutable {
value: any;
}
/** */
interface Severity extends Severity {
value: any;
}
/** */
interface Shape extends Shape {
value: any;
}
/** */
interface Target extends Target {
value: any;
}
/** */
interface TargetType extends TargetType, TargetType {
value: any;
}
/** */
interface TripleRule extends TripleRule {
value: any;
}
/** */
interface UniqueLangConstraintComponent {
value: any;
}
/** */
interface UniqueLangConstraintComponent_uniqueLang {
value: any;
}
/** */
interface ValidationReport extends ValidationReport {
value: any;
}
/** */
interface ValidationResult extends ValidationResult {
value: any;
}
/** */
interface Validator extends Validator {
value: any;
}
/** */
interface Violation {
value: any;
}
/** */
interface Warning {
value: any;
}
/** */
interface XoneConstraintComponent {
value: any;
}
/** */
interface XoneConstraintComponent_xone {
value: any;
}
/** */
interface alternativePath {
value: any;
}
/** */
interface and {
value: any;
}
/** */
interface annotationProperty {
value: any;
}
/** */
interface annotationValue {
value: any;
}
/** */
interface annotationVarName {
value: any;
}
/** */
interface ask {
value: any;
}
/** */
interface Class {
value: any;
}
/** */
interface closed {
value: any;
}
/** */
interface condition {
value: any;
}
/** */
interface conforms {
value: any;
}
/** */
interface construct {
value: any;
}
/** */
interface datatype {
value: any;
}
/** */
interface deactivated {
value: any;
}
/** */
interface declare {
value: any;
}
/** */
interface defaultValue {
value: any;
}
/** */
interface description {
value: any;
}
/** */
interface detail {
value: any;
}
/** */
interface disjoint {
value: any;
}
/** */
interface entailment {
value: any;
}
/** */
interface equals {
value: any;
}
/** */
interface expression {
value: any;
}
/** */
interface filterShape {
value: any;
}
/** */
interface flags {
value: any;
}
/** */
interface focusNode {
value: any;
}
/** */
interface group {
value: any;
}
/** */
interface hasValue {
value: any;
}
/** */
interface ignoredProperties {
value: any;
}
/** */
interface In {
value: any;
}
/** */
interface intersection {
value: any;
}
/** */
interface inversePath {
value: any;
}
/** */
interface js {
value: any;
}
/** */
interface jsFunctionName {
value: any;
}
/** */
interface jsLibrary {
value: any;
}
/** */
interface jsLibraryURL {
value: any;
}
/** */
interface labelTemplate {
value: any;
}
/** */
interface languageIn {
value: any;
}
/** */
interface lessThan {
value: any;
}
/** */
interface lessThanOrEquals {
value: any;
}
/** */
interface maxCount {
value: any;
}
/** */
interface maxExclusive {
value: any;
}
/** */
interface maxInclusive {
value: any;
}
/** */
interface maxLength {
value: any;
}
/** */
interface message {
value: any;
}
/** */
interface minCount {
value: any;
}
/** */
interface minExclusive {
value: any;
}
/** */
interface minInclusive {
value: any;
}
/** */
interface minLength {
value: any;
}
/** */
interface name {
value: any;
}
/** */
interface namespace {
value: any;
}
/** */
interface node {
value: any;
}
/** */
interface nodeKind {
value: any;
}
/** */
interface nodeValidator {
value: any;
}
/** */
interface nodes {
value: any;
}
/** */
interface not {
value: any;
}
/** */
interface object {
value: any;
}
/** */
interface oneOrMorePath {
value: any;
}
/** */
interface optional {
value: any;
}
/** */
interface or {
value: any;
}
/** */
interface order {
value: any;
}
/** */
interface parameter {
value: any;
}
/** */
interface path {
value: any;
}
/** */
interface pattern {
value: any;
}
/** */
interface predicate {
value: any;
}
/** */
interface prefix {
value: any;
}
/** */
interface prefixes {
value: any;
}
/** */
interface property {
value: any;
}
/** */
interface propertyValidator {
value: any;
}
/** */
interface qualifiedMaxCount {
value: any;
}
/** */
interface qualifiedMinCount {
value: any;
}
/** */
interface qualifiedValueShape {
value: any;
}
/** */
interface qualifiedValueShapesDisjoint {
value: any;
}
/** */
interface result {
value: any;
}
/** */
interface resultAnnotation {
value: any;
}
/** */
interface resultMessage {
value: any;
}
/** */
interface resultPath {
value: any;
}
/** */
interface resultSeverity {
value: any;
}
/** */
interface returnType {
value: any;
}
/** */
interface rule {
value: any;
}
/** */
interface select {
value: any;
}
/** */
interface severity {
value: any;
}
/** */
interface shapesGraph {
value: any;
}
/** */
interface shapesGraphWellFormed {
value: any;
}
/** */
interface sourceConstraint {
value: any;
}
/** */
interface sourceConstraintComponent {
value: any;
}
/** */
interface sourceShape {
value: any;
}
/** */
interface sparql {
value: any;
}
/** */
interface subject {
value: any;
}
/** */
interface suggestedShapesGraph {
value: any;
}
/** */
interface target {
value: any;
}
/** */
interface targetClass {
value: any;
}
/** */
interface targetNode {
value: any;
}
/** */
interface targetObjectsOf {
value: any;
}
/** */
interface targetSubjectsOf {
value: any;
}
/** */
interface This {
value: any;
}
/** */
interface union {
value: any;
}
/** */
interface uniqueLang {
value: any;
}
/** */
interface update {
value: any;
}
/** */
interface validator {
value: any;
}
/** */
interface value {
value: any;
}
/** */
interface xone {
value: any;
}
/** */
interface zeroOrMorePath {
value: any;
}
/** */
interface zeroOrOnePath {
value: any;
}
}
|
#!/usr/bin/env ruby
# encoding: UTF-8
require 'optparse'
require 'druid-tools'
def doit(druid, flags)
puts "Processing #{druid.id}"
begin
item = Dor::Item.find(druid.druid)
ap({:item => item, :collections => item.collections}) if flags[:debug]
# remove all collections
item.collections.dup.each {|c| item.remove_collection(c)}
# add the new ones
flags[:collections].each do |k, collection|
item.add_collection(collection)
end
ap({:item => item, :collections => item.collections}) if flags[:debug]
unless item.allows_modification?
puts "WARNING: Item not editable: #{druid.id}"
item.open_new_version
end
item.save
item.close_version
rescue ActiveFedora::ObjectNotFoundError => e
puts "ERROR: #{e.message}"
end
end
# __MAIN__
begin
File.umask(002)
flags = {
:tmpdir => 'tmp',
:verbose => false,
:debug => false,
:collections => {},
:workspacedir => '/var/geomdtk/current/workspace'
}
OptionParser.new do |opts|
opts.banner = <<EOM
Usage: #{File.basename(__FILE__)} [options] [druid [druid...] | < druids]
EOM
opts.on('--collection DRUID', 'Collection for accession') do |druid|
flags[:collections][DruidTools::Druid.new(druid).id] = nil
end
opts.on('--tmpdir DIR', "Temporary directory for assembly (default: #{flags[:tmpdir]})") do |d|
flags[:tmpdir] = d
end
opts.on('-v', '--verbose', 'Run verbosely, use multiple times for debug level output') do
flags[:debug] = true if flags[:verbose] # -vv
flags[:verbose] = true
end
opts.on('--workspace DIR', "Workspace directory for assembly (default: #{flags[:workspacedir]})") do |d|
flags[:workspacedir] = d
end
end.parse!
[flags[:tmpdir], flags[:workspacedir]].each do |d|
raise ArgumentError, "Missing directory #{d}" unless File.directory? d
end
ap({:flags => flags}) if flags[:debug]
# Validate collection druids
flags[:collections].each do |druid,v|
begin
flags[:collections][druid] = Dor::Collection.find(druid)
rescue ActiveFedora::ObjectNotFoundError => e
puts "ERROR: Invalid collection #{druid}: #{e.message}"
exit(-1)
end
end
(ARGV.empty?? STDIN : ARGV).each do |pid|
druid = DruidTools::Druid.new(pid.strip, flags[:workspacedir])
ap({:druid => druid}) if flags[:debug]
begin
doit(druid, flags)
rescue Exception => e
ap({:error => e})
end
end
rescue SystemCallError => e
$stderr.puts "ERROR: #{e.message}"
$stderr.puts e.backtrace
exit(-1)
end
|
cat > dataset.csv << EOF
f1,ans
0.1,0
0.7,1
0.6,1
0.2,0
0.8,1
EOF
|
import 'package:canton_design_system/canton_design_system.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter_slidable/flutter_slidable.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:notes_app/src/models/note.dart';
import 'package:notes_app/src/ui/providers/note_provider.dart';
class PinNoteAction extends ConsumerWidget {
const PinNoteAction(this.note, this.setState);
final Note note;
final void Function(void Function()) setState;
@override
Widget build(BuildContext context, ScopedReader watch) {
return Container(
margin: EdgeInsets.only(right: 10),
child: SlideAction(
decoration: ShapeDecoration(
color: Theme.of(context).colorScheme.onSurface,
shape: SquircleBorder(
radius: BorderRadius.circular(35),
),
),
child: Icon(
note.pinned! ? CupertinoIcons.pin_slash_fill : CupertinoIcons.pin_fill,
size: 27,
color: Theme.of(context).colorScheme.surface,
),
onTap: () {
setState(() {
watch(noteProvider.notifier).updateNote(note: note, pinned: !note.pinned!);
});
},
),
);
}
}
|
#!/bin/bash
## ABSOLUTE path to the spawn-fcgi binary
SPAWNFCGI="/usr/bin/spawn-fcgi"
## ABSOLUTE path to the PHP binary
FCGIPROGRAM="/usr/bin/php-cgi"
## ABSOLUTE path to UNIX socket
FCGISOCKET="/var/run/php.socket"
## uncomment the PHPRC line, if you want to have an extra php.ini for this user
## store your custom php.ini in /var/www/fastcgi/fred/php.ini
## with an custom php.ini you can improve your security
## just set the open_basedir to the users webfolder
## Example: (add this line in you custom php.ini)
## open_basedir = /var/www/vhosts/fred/html
##
#PHPRC="/var/www/fastcgi/fred/"
## number of PHP childs to spawn in addition to the default. Minimum of 2.
## Actual childs = PHP_FCGI_CHILDREN + 1
PHP_FCGI_CHILDREN=5
## number of request server by a single php-process until is will be restarted
PHP_FCGI_MAX_REQUESTS=1000
## IP adresses where PHP should access server connections from
FCGI_WEB_SERVER_ADDRS="127.0.0.1"
# allowed environment variables sperated by spaces
ALLOWED_ENV="PATH USER"
## if this script is run as root switch to the following user
USERID=user
GROUPID=group
################## no config below this line
if test x$PHP_FCGI_CHILDREN = x; then
PHP_FCGI_CHILDREN=5
fi
export PHP_FCGI_MAX_REQUESTS
export FCGI_WEB_SERVER_ADDRS
export PHPRC
ALLOWED_ENV="$ALLOWED_ENV PHP_FCGI_MAX_REQUESTS FCGI_WEB_SERVER_ADDRS PHPRC"
# copy the allowed environment variables
E=
for i in $ALLOWED_ENV; do
E="$E $i=$(eval echo "\$$i")"
done
# clean environment and set up a new one
env - $E $SPAWNFCGI -s $FCGISOCKET -f $FCGIPROGRAM -u $USERID -g $GROUPID -C $PHP_FCGI_CHILDREN
|
class CompanySerializer < ActiveModel::Serializer
attributes :id, :name, :url, :description
has_many :positions
has_many :users
end
|
package net.yuzumone.tootrus.data.mastodon
import com.sys1yagi.mastodon4j.MastodonClient
import com.sys1yagi.mastodon4j.api.Range
import com.sys1yagi.mastodon4j.api.entity.Status
import com.sys1yagi.mastodon4j.api.method.Timelines
import javax.inject.Inject
import javax.inject.Named
interface TimelineRepository {
fun getTimeline(range: Range): List<Status>
}
class DefaultTimelineRepository @Inject constructor(
@Named("client") private val client: MastodonClient
): TimelineRepository {
override fun getTimeline(range: Range): List<Status> {
val result = Timelines(client).getHome().execute()
return result.part
}
} |
class User
include DataMapper::Resource
property :id, Serial
property :name, String
property :email, String
property :username, String
property :role, String
property :created_at, DateTime
property :updated_at, DateTime
validates_presence_of :name, :username
validates_uniqueness_of :username
has n, :responses
end
|
/*
* The Shadow Simulator
* See LICENSE for licensing information
*/
use test_utils::set;
use test_utils::TestEnvironment as TestEnv;
// The number of random values to generate with each method.
const RGENLEN: usize = 200;
// The number of buckets to use when checking random value distribution.
const BUCKETLEN: usize = 10;
fn main() -> Result<(), String> {
// should we restrict the tests we run?
let filter_shadow_passing = std::env::args().any(|x| x == "--shadow-passing");
let filter_libc_passing = std::env::args().any(|x| x == "--libc-passing");
// should we summarize the results rather than exit on a failed test
let summarize = std::env::args().any(|x| x == "--summarize");
let mut tests: Vec<test_utils::ShadowTest<_, _>> = vec![
test_utils::ShadowTest::new(
"test_dev_urandom",
test_dev_urandom,
set![TestEnv::Libc, TestEnv::Shadow],
),
// Outside of Shadow, this test could block indefinitely.
test_utils::ShadowTest::new("test_dev_random", test_dev_random, set![TestEnv::Shadow]),
test_utils::ShadowTest::new("test_rand", test_rand, set![TestEnv::Libc, TestEnv::Shadow]),
test_utils::ShadowTest::new(
"test_getrandom",
test_getrandom,
set![TestEnv::Libc, TestEnv::Shadow],
),
];
if filter_shadow_passing {
tests = tests
.into_iter()
.filter(|x| x.passing(TestEnv::Shadow))
.collect()
}
if filter_libc_passing {
tests = tests
.into_iter()
.filter(|x| x.passing(TestEnv::Libc))
.collect()
}
test_utils::run_tests(&tests, summarize)?;
println!("Success.");
Ok(())
}
// This is just a quick check that the 0<=f<=100 fractions that are generated using
// the random APIs are "plausibly random"; its primary purpose is to test that the
// randomness API plumbing is working, but not to test the quality of the underlying
// RNGs. We just check that each decile of the distribution has at least one entry.
fn check_randomness(fracs: &[f64]) -> Result<(), String> {
let mut buckets = [0_u8; BUCKETLEN];
for f in fracs {
let percent = (f * 100_f64) as u8;
assert!(percent <= 100, "invalid random percent value: {}", percent,);
let j = percent as usize % BUCKETLEN;
buckets[j] += 1;
}
let fail = buckets.iter().any(|&i| i == 0);
println!("bucket values:");
for (i, val) in buckets.iter().enumerate() {
println!("bucket[{}] = {}", i, val);
}
if fail {
return Err("failed to get random values across entire range".to_string());
} else {
return Ok(());
}
}
fn test_path_helper(path: &str) -> Result<(), String> {
use std::io::Read;
let mut file =
std::fs::File::open(path).map_err(|e| format!("error: cannot open file: {:?}", e))?;
let mut values = [0_f64; RGENLEN];
for val in values.iter_mut() {
let mut rv = [0_u8; 4];
file.read_exact(&mut rv)
.map_err(|_| "error reading file".to_string())?;
*val = u32::from_be_bytes([rv[0], rv[1], rv[2], rv[3]]) as f64 / core::u32::MAX as f64;
}
check_randomness(&values)
}
fn test_dev_urandom() -> Result<(), String> {
test_path_helper("/dev/urandom")
}
fn test_dev_random() -> Result<(), String> {
test_path_helper("/dev/random")
}
fn test_rand() -> Result<(), String> {
let mut values = [0_f64; RGENLEN];
for val in values.iter_mut() {
let random_value = unsafe { libc::rand() };
if random_value < 0 || random_value > libc::RAND_MAX {
return Err("error: rand returned bytes outside of expected range".to_string());
}
*val = random_value as f64 / libc::RAND_MAX as f64;
}
check_randomness(&values)
}
fn test_getrandom() -> Result<(), String> {
let mut values = [0_f64; RGENLEN];
for val in values.iter_mut() {
let mut rv = [0_u8; 4];
// getrandom() was only added in glibc 2.25, so use syscall until all of
// our supported OS targets pick up the new libc call
// https://sourceware.org/legacy-ml/libc-alpha/2017-02/msg00079.html
let num_bytes = unsafe {
libc::syscall(
libc::SYS_getrandom,
rv.as_mut_ptr() as *mut libc::c_void,
rv.len(),
0,
)
};
if num_bytes <= 0 {
return Err("error: getrandom returned bytes outside of expected range".to_string());
}
*val = u32::from_be_bytes([rv[0], rv[1], rv[2], rv[3]]) as f64 / core::u32::MAX as f64;
}
check_randomness(&values)
}
|
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt < %s -instcombine -S | FileCheck %s
; RUN: opt < %s -passes=instcombine -S | FileCheck %s
; TODO: Replace with boolean Phi.
define i1 @test_eq(i1 %cond) {
; CHECK-LABEL: @test_eq(
; CHECK-NEXT: entry:
; CHECK-NEXT: br i1 [[COND:%.*]], label [[IF_TRUE:%.*]], label [[IF_FALSE:%.*]]
; CHECK: if.true:
; CHECK-NEXT: br label [[MERGE:%.*]]
; CHECK: if.false:
; CHECK-NEXT: br label [[MERGE]]
; CHECK: merge:
; CHECK-NEXT: [[PHI:%.*]] = phi i32 [ 123, [[IF_TRUE]] ], [ 456, [[IF_FALSE]] ]
; CHECK-NEXT: br label [[EXIT:%.*]]
; CHECK: exit:
; CHECK-NEXT: [[COMPARE:%.*]] = icmp eq i32 [[PHI]], 456
; CHECK-NEXT: ret i1 [[COMPARE]]
;
entry:
br i1 %cond, label %if.true, label %if.false
if.true:
br label %merge
if.false:
br label %merge
merge:
%phi = phi i32 [123, %if.true], [456, %if.false]
br label %exit
exit:
%compare = icmp eq i32 %phi, 456
ret i1 %compare
}
define i1 @test_slt(i1 %cond) {
; CHECK-LABEL: @test_slt(
; CHECK-NEXT: entry:
; CHECK-NEXT: br i1 [[COND:%.*]], label [[IF_TRUE:%.*]], label [[IF_FALSE:%.*]]
; CHECK: if.true:
; CHECK-NEXT: br label [[MERGE:%.*]]
; CHECK: if.false:
; CHECK-NEXT: br label [[MERGE]]
; CHECK: merge:
; CHECK-NEXT: [[PHI:%.*]] = phi i32 [ 123, [[IF_TRUE]] ], [ 456, [[IF_FALSE]] ]
; CHECK-NEXT: br label [[EXIT:%.*]]
; CHECK: exit:
; CHECK-NEXT: [[COMPARE:%.*]] = icmp ult i32 [[PHI]], 456
; CHECK-NEXT: ret i1 [[COMPARE]]
;
entry:
br i1 %cond, label %if.true, label %if.false
if.true:
br label %merge
if.false:
br label %merge
merge:
%phi = phi i32 [123, %if.true], [456, %if.false]
br label %exit
exit:
%compare = icmp slt i32 %phi, 456
ret i1 %compare
}
define i1 @test_sle(i1 %cond) {
; CHECK-LABEL: @test_sle(
; CHECK-NEXT: entry:
; CHECK-NEXT: br i1 [[COND:%.*]], label [[IF_TRUE:%.*]], label [[IF_FALSE:%.*]]
; CHECK: if.true:
; CHECK-NEXT: br label [[MERGE:%.*]]
; CHECK: if.false:
; CHECK-NEXT: br label [[MERGE]]
; CHECK: merge:
; CHECK-NEXT: br label [[EXIT:%.*]]
; CHECK: exit:
; CHECK-NEXT: ret i1 true
;
entry:
br i1 %cond, label %if.true, label %if.false
if.true:
br label %merge
if.false:
br label %merge
merge:
%phi = phi i32 [123, %if.true], [456, %if.false]
br label %exit
exit:
%compare = icmp sle i32 %phi, 456
ret i1 %compare
}
define i1 @test_ne(i1 %cond) {
; CHECK-LABEL: @test_ne(
; CHECK-NEXT: entry:
; CHECK-NEXT: br i1 [[COND:%.*]], label [[IF_TRUE:%.*]], label [[IF_FALSE:%.*]]
; CHECK: if.true:
; CHECK-NEXT: br label [[MERGE:%.*]]
; CHECK: if.false:
; CHECK-NEXT: br label [[MERGE]]
; CHECK: merge:
; CHECK-NEXT: [[PHI:%.*]] = phi i32 [ 123, [[IF_TRUE]] ], [ 456, [[IF_FALSE]] ]
; CHECK-NEXT: br label [[EXIT:%.*]]
; CHECK: exit:
; CHECK-NEXT: [[COMPARE:%.*]] = icmp ne i32 [[PHI]], 456
; CHECK-NEXT: ret i1 [[COMPARE]]
;
entry:
br i1 %cond, label %if.true, label %if.false
if.true:
br label %merge
if.false:
br label %merge
merge:
%phi = phi i32 [123, %if.true], [456, %if.false]
br label %exit
exit:
%compare = icmp ne i32 %phi, 456
ret i1 %compare
}
|
using System;
using UniRx;
using UnityEngine.Networking;
public static class NetworkingExtensions
{
public static IObservable<string> ObserveRequestResult(this UnityWebRequest request)
{
if (request.downloadHandler == null)
{
request.downloadHandler = new DownloadHandlerBuffer();
}
return request.SendWebRequest().AsObservable().Select(_ =>
{
return request.downloadHandler.text;
});
}
}
|
#!/bin/bash
# Commands that you want to be run once connected can go here
cd /home/pi/iqube-web
/usr/bin/npm install
sudo /usr/bin/node /home/pi/iqube-web/app.js &
# uncomment for testing:
#/home/pi/bin/test-turnkey.sh
#startx /home/pi/raspberry-pi-turnkey/xinitrc_ap
startx /home/pi/raspberry-pi-turnkey/xinitrc
|
import 'dart:io';
import 'package:path/path.dart' as path;
final Directory externalStorage = new Directory(path.current); |
<?php
namespace LeKoala\CommonExtensions;
use SilverStripe\ORM\DataObject;
use SilverStripe\Forms\FieldList;
use SilverStripe\ORM\DataExtension;
use LeKoala\CmsActions\CustomAction;
use SilverStripe\Forms\GridField\GridField;
/**
* Makes record locked
*
* Unlocking require special action
*
* @property \LeKoala\CommonExtensions\LockableExtension|DataObject $owner
* @property boolean $IsLocked
*/
class LockableExtension extends DataExtension
{
private static $db = [
"IsLocked" => "Boolean"
];
public function updateCMSFields(FieldList $fields)
{
$fields->removeByName('IsLocked');
}
/**
* Use this in your model. It has to run last and cannot be done automatically
*
* if($this->hasExtension(LockableExtension::class)) {
* $this->lockFields($fields);
* }
*
* @param FieldList $fields
* @return void
*/
public function lockFields(FieldList $fields)
{
if (!$this->owner->IsLocked) {
return;
}
$fields->makeReadonly();
}
public function canEdit($member)
{
if ($this->owner->IsLocked) {
return false;
}
return parent::canEdit($member);
}
public function updateCMSActions(FieldList $actions)
{
if ($this->owner->ID && !$this->owner->IsLocked) {
$lockRecord = new CustomAction("LockRecord", "Lock");
$lockRecord->setConfirmation("Are you sure to lock this record?");
$actions->push($lockRecord);
}
}
public function LockRecord($data, $form, $controller)
{
$this->owner->IsLocked = true;
$this->owner->write();
return 'Record locked';
}
}
|
#pragma once
#include "../Interfaces/hooks.h"
#include "../main.h"
//Hooks
namespace ThirdPerson
{
void OverrideView(CViewSetup* pSetup);
void FrameStageNotify(ClientFrameStage_t stage);
}
|
#include "match4.h"
unsigned char PLAYER_TURN = 0;
char PLAYER_PIECE[2] = {'X', 'O'};
unsigned char get_player_turn(void)
{
return (PLAYER_TURN);
}
unsigned char set_player_turn(unsigned char player)
{
if (player == 0 || player == 1)
{
PLAYER_TURN = player;
}
return (PLAYER_TURN);
}
unsigned char get_next_player_turn()
{
return ((~PLAYER_TURN) & 1);
}
char get_player_piece(void)
{
return (PLAYER_PIECE[PLAYER_TURN]);
}
|
package org.dnltsk.luggagelift.order.confirm
import org.dnltsk.luggagelift.luggage.LuggageRepository
import org.junit.Assert.assertEquals
import org.junit.Test
class OrderConfirmHttpControllerTest {
@Test
fun `correct number of luggageIds are generated`() {
val controller = OrderConfirmHttpController(LuggageRepository())
val fiveBags = 5
val fiveLuggageIds = controller.registerBagTrackIds(fiveBags)
assertEquals(fiveLuggageIds.size, fiveBags)
}
} |
'use strict';
const Joi = require('joi');
const internals = {};
// Root schemas
exports.handler = [
Joi.string(),
Joi.object({
template: Joi.string(),
context: Joi.object(),
options: Joi.object()
})
];
// Manager schemas
exports.viewOverride = Joi.object({
path: [Joi.array().items(Joi.string()), Joi.string()],
relativeTo: Joi.string(),
compileOptions: Joi.object(),
runtimeOptions: Joi.object(),
layout: Joi.string().allow(false, true),
layoutKeyword: Joi.string(),
layoutPath: [Joi.array().items(Joi.string()), Joi.string()],
encoding: Joi.string(),
allowAbsolutePaths: Joi.boolean(),
allowInsecureAccess: Joi.boolean(),
contentType: Joi.string()
});
exports.viewBase = exports.viewOverride.keys({
partialsPath: [Joi.array().items(Joi.string()), Joi.string()],
helpersPath: [Joi.array().items(Joi.string()), Joi.string()],
isCached: Joi.boolean(),
compileMode: Joi.string().valid('sync', 'async'),
defaultExtension: Joi.string()
});
exports.manager = exports.viewBase.keys({
engines: Joi.object().required(),
context: [Joi.object(), Joi.func()]
});
exports.view = exports.viewBase.keys({
module: Joi.object({
compile: Joi.func().required()
})
.options({ allowUnknown: true })
.required()
});
|
-- |Processing of data from the lodestone and database
module Data.Krile.FFData
where
|
#!/usr/bin/env perl
use v5.18;
use warnings FATAL => 'all';
package ServedObject;
use Net::DBus;
use parent qw(Net::DBus::Object);
use Net::DBus::Exporter qw(com.zbentley.ipc.gelatin);
sub new {
my ( $class, $service ) = @_;
return $class->SUPER::new($service, "/ipc/gelatin");
}
dbus_method("test_method", ["string", "string"], ["string"]);
sub test_method {
my ( $self, $arg1, $arg2 ) = @_;
say "WHOA";
return "$arg1 :: $arg2";
}
1;
package main;
use Data::Printer;
use Net::DBus;
use Net::DBus::Reactor;
#my $address = local $ENV{DBUS_SESSION_BUS_ADDRESS} = "launchd:env=DBUS_LAUNCHD_SESSION_BUS_SOCKET";
my $address = local $ENV{DBUS_SESSION_BUS_ADDRESS} = "unix:path=tst";say "Using address: $address";
my $bus = Net::DBus->find;
my $service = $bus->export_service("com.zbentley.ipc.gelatin");
my $object = ServedObject->new($service);
sleep 10;
Net::DBus::Reactor->main->run;
1; |
# frozen_string_literal: true
require 'google/apis/options'
# these require solve load order issues (undefined constant Google::Apis::ServerError and Signet::RemoteServerError, rescued in multiple places)
require 'google/apis/errors'
require 'signet/errors'
# As stated in https://github.com/googleapis/google-api-ruby-client#errors--retries,
# enabling retries is strongly encouraged but disabled by default. Large uploads
# that may hit timeouts will mainly benefit from this.
Google::Apis::RequestOptions.default.retries = 3 if Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_GOOGLE_API_RETRIES', true))
|
# Base64URL
```js
import { Base64URL } from "https://code4fukui.github.io/Base64URL/Base64URL.js";
const bin = new Uint8Array([1, 2, 3]);
const s = Base64URL.encode(bin);
console.log(s);
const bin2 = Base64URL.decode(s);
console.log(bin2);
```
|
## ----------- General functions
name(d :: AbstractDeviation) = d.name;
short_description(d :: AbstractDeviation) = d.shortStr;
long_description(d :: AbstractDeviation) = d.longStr;
norm_p(d :: AbstractDeviation) = d.normP;
"""
$(SIGNATURES)
Retrieve data values
"""
get_data_values(d :: AbstractDeviation{F1}) where F1 = deepcopy(d.dataV);
"""
$(SIGNATURES)
Retrieve model values
"""
get_model_values(d :: AbstractDeviation{F1}) where F1 = deepcopy(d.modelV);
"""
$(SIGNATURES)
Retrieve std errors of data values. Not valid for all types of deviations.
Returns `nothing` if std errors are not set (are all 0).
"""
function get_std_errors(d :: AbstractDeviation{F1}) where F1
if all(d.stdV .== zero(F1))
return nothing
else
return deepcopy(d.stdV);
end
end
"""
$(SIGNATURES)
Set model values in an existing deviation.
"""
function set_model_values(d :: AbstractDeviation{F1}, modelV) where F1
dataV = get_data_values(d);
if typeof(modelV) != typeof(dataV)
println(modelV);
println(dataV);
error("Type mismatch in $(d.name): $(typeof(modelV)) vs $(typeof(dataV))");
end
@assert size(modelV) == size(dataV) "Size mismatch: $(size(modelV)) vs $(size(dataV))"
d.modelV = deepcopy(modelV);
return nothing
end
"""
$(SIGNATURES)
Retrieve weights. Returns scalar 1 for scalar deviations.
"""
function get_weights(d :: AbstractDeviation{F1}) where F1
return d.wtV
end
"""
set_weights
Does nothing for Deviation types that do not have weights.
"""
function set_weights!(d :: AbstractDeviation{F1}, wtV) where F1
if isa(d, Deviation)
@assert typeof(wtV) == typeof(get_data_values(d))
@assert size(wtV) == size(get_data_values(d))
@assert all(wtV .> 0.0)
d.wtV = deepcopy(wtV);
end
return nothing
end
"""
$(SIGNATURES)
Validate a `Deviation`.
"""
validate_deviation(d :: AbstractDeviation) = true
## ------------- Computing the scalar deviation
"""
$(SIGNATURES)
Compute the scalar deviation between model and data values.
Using a weighted sum of deviations to a power. By default: simply mean abs deviation.
Note: Using a weighted norm would not increase the overall deviation for a moment that fits poorly.
"""
function scalar_deviation(modelV :: AbstractArray{F1}, dataV :: AbstractArray{F1},
wtV; p :: F1 = one(F1)) where F1 <: AbstractFloat
totalWt = sum(wtV);
@assert totalWt > 1e-8 "Total weight too small: $totalWt"
# Scaling `wtV` so it sums to 1 partially undoes the `^(1/p)` scaling below.
devV = (wtV ./ totalWt) .* (abs.(modelV .- dataV)) .^ p;
scalarDev = totalWt * sum(devV);
return scalarDev
end
scalar_deviation(model :: F1, data :: F1, wt :: F1;
p :: F1 = one(F1)) where F1 <: AbstractFloat =
wt * (abs(model - data) ^ p);
## --------------- Display
# This is never called for concrete types (why?)
Base.show(io :: IO, d :: AbstractDeviation{F1}) where F1 =
Base.print(io, "$(name(d)): ", short_description(d));
## Formatted short deviation for display
function short_display(d :: AbstractDeviation{F1}; inclScalarWt :: Bool = true) where F1
_, scalarStr = scalar_dev(d, inclScalarWt = inclScalarWt);
return d.shortStr * ": " * scalarStr;
end
"""
$(SIGNATURES)
Show a deviation using the show function contained in its definition.
Optionally, a file path can be provided. If none is provided, the path inside the deviation is used.
"""
function show_deviation(d :: AbstractDeviation{F1}; showModel :: Bool = true, fPath :: String = "") where F1
return d.showFct(d, showModel = showModel, fPath = fPath)
end
function open_show_path(d :: AbstractDeviation{F1};
fPath :: String = "", writeMode :: String = "w") where F1
if isempty(fPath)
showPath = d.showPath;
else
showPath = fPath;
end
if isempty(showPath)
io = stdout;
else
io = open(showPath, "w");
end
return io
end
function close_show_path(d :: AbstractDeviation{F1}, io) where F1
if io != stdout
close(io);
end
end
# ------------- |
# vortex-cypher-layers
layer configurations for my Vortex Cypher
## Default Layer
This is the stock Vortex Cypher layout without any modifications.

|
<?php
declare(strict_types=1);
namespace Emonkak\Validation\Tests\Type;
use Emonkak\Validation\Collector\CollectorInterface;
use Emonkak\Validation\Type\ArrayOf;
use Emonkak\Validation\Type\TypeInterface;
use PHPUnit\Framework\TestCase;
/**
* @covers \Emonkak\Validation\Type\ArrayOf
*/
class ArrayOfTest extends TestCase
{
/**
* @dataProvider providerGetDeclaration
*/
public function testGetDeclaration($itemDeclaration, $expectedDeclaration): void
{
$itemType = $this->createMock(TypeInterface::class);
$itemType
->expects($this->once())
->method('getDeclaration')
->willReturn($itemDeclaration);
$arrayOf = new ArrayOf($itemType);
$this->assertSame($itemType, $arrayOf->getItemType());
$this->assertSame($expectedDeclaration, $arrayOf->getDeclaration());
}
public function providerGetDeclaration(): array
{
return [
['integer', 'integer[]'],
['(integer|string)', '(integer|string)[]'],
];
}
public function testValidateReturnsTrue(): void
{
$key = 'foo';
$value = [
123,
456,
789,
];
$collector = $this->createMock(CollectorInterface::class);
$itemType = $this->createMock(TypeInterface::class);
$type = new ArrayOf($itemType);
$collector
->expects($this->never())
->method('collectTypeError');
$itemType
->expects($this->at(0))
->method('validate')
->with(
$this->identicalTo($key . '[0]'),
$this->identicalTo($value[0]),
$this->identicalTo($collector)
)
->willReturn(true);
$itemType
->expects($this->at(1))
->method('validate')
->with(
$this->identicalTo($key . '[1]'),
$this->identicalTo($value[1]),
$this->identicalTo($collector)
)
->willReturn(true);
$itemType
->expects($this->at(2))
->method('validate')
->with(
$this->identicalTo($key . '[2]'),
$this->identicalTo($value[2]),
$this->identicalTo($collector)
)
->willReturn(true);
$this->assertTrue($type->validate($key, $value, $collector));
}
public function testValidateReturnsFalse(): void
{
$key = 'foo';
$value = [
123,
true,
false,
];
$collector = $this->createMock(CollectorInterface::class);
$itemType = $this->createMock(TypeInterface::class);
$type = new ArrayOf($itemType);
$collector
->expects($this->never())
->method('collectTypeError');
$itemType
->expects($this->at(0))
->method('validate')
->with(
$this->identicalTo($key . '[0]'),
$this->identicalTo($value[0]),
$this->identicalTo($collector)
)
->willReturn(true);
$itemType
->expects($this->at(1))
->method('validate')
->with(
$this->identicalTo($key . '[1]'),
$this->identicalTo($value[1]),
$this->identicalTo($collector)
)
->willReturn(false);
$itemType
->expects($this->at(2))
->method('validate')
->with(
$this->identicalTo($key . '[2]'),
$this->identicalTo($value[2]),
$this->identicalTo($collector)
)
->willReturn(false);
$this->assertFalse($type->validate($key, $value, $collector));
}
public function testValidateWithNull(): void
{
$key = 'foo';
$value = null;
$collector = $this->createMock(CollectorInterface::class);
$itemType = $this->createMock(TypeInterface::class);
$type = new ArrayOf($itemType);
$collector
->expects($this->once())
->method('collectTypeError')
->with(
$this->identicalTo($key),
$this->identicalTo($value),
$this->identicalTo($type)
);
$itemType
->expects($this->never())
->method('validate');
$this->assertFalse($type->validate($key, $value, $collector));
}
}
|
import { Component, OnInit } from '@angular/core';
// Imports
import { AngularFirestore, AngularFirestoreCollection } from '@angular/fire/firestore';
import { Observable } from 'rxjs';
import { Item } from 'src/app/interfaces/item.interface';
@Component({
selector: 'app-photos',
templateUrl: './photos.component.html',
styles: [
]
})
export class PhotosComponent implements OnInit {
private itemsCollection: AngularFirestoreCollection<Item>;
public items: Observable<Item[]>;
constructor(
private angularFirestorage: AngularFirestore
) {
this.itemsCollection = this.angularFirestorage.collection<Item>('img');
this.items = this.itemsCollection.valueChanges();
}
ngOnInit(): void {
}
}
|
/**
* File: RandomWorld.java
* Author: William Forte
* Time: 9:52:46 AM
* Date: Mar 24, 2016
* Project: Survival
* Package: survival.main.generation.worlds
*/
package survival.main.generation.worlds;
import java.awt.Graphics2D;
import java.util.Random;
import backbone.engine.main.BackboneGameStateManager;
import survival.main.entity.creatures.Player;
import survival.main.generation.Block;
import survival.main.generation.BlockType;
import survival.main.generation.World;
/**
* File: RandomWorld.java
* Language: Java
* Author: Will 40
* Data Created: Mar 24, 2016
* Time Created: 9:52:46 AM
* Project: Survival
* Package: survival.main.generation.worlds
*/
public class RandomWorld extends World {
public RandomWorld(BackboneGameStateManager gsm, int width, int height) {
super(gsm);
this.width = width;
this.height = height;
loadWorld();
}
private void loadRandomWorld(int border_size) {
for(int y = border_size; y < height - border_size; y++) {
for(int x = border_size; x < width - border_size; x++) {
BlockType blocktype = BlockType.values()[new Random().nextInt(BlockType.values().length)];
if(blocktype == BlockType.BRICK) blocktype = BlockType.GRASS;
if(new Random().nextInt(2) == 0) blocktype = BlockType.GRASS;
block_manager.addBlock(
new Block(
x * block_size,
y * block_size,
blocktype)
.setSolid(blocktype.isSolid()));
}
}
loadBorder(border_size);
}
private void loadBorder(int border_size) {
for(int y = 0; y < height; y++) {
for(int x = 0; x < width; x++) {
if(y >= 0 && y < border_size || y >= height - border_size && y < height) {
block_manager.addBlock(
new Block(
x * block_size,
y * block_size,
BlockType.BRICK)
.setSolid(true));
}
if(x >= 0 && x < border_size || x >= width - border_size && x < width) {
block_manager.addBlock(
new Block(
x * block_size,
y * block_size,
BlockType.BRICK)
.setSolid(true));
}
}
}
}
/* (non-Javadoc)
* @see survival.main.generation.World#loadWorld()
*/
@Override
public void loadWorld() {
loadRandomWorld(3);
super.loadWorld();
}
/* (non-Javadoc)
* @see survival.main.generation.World#tick()
*/
@Override
public void tick() {
setWorldVariables(worldxpos, worldypos);
super.tick();
}
/* (non-Javadoc)
* @see survival.main.generation.World#render(java.awt.Graphics2D)
*/
@Override
public void render(Graphics2D g) {
super.render(g);
}
/* (non-Javadoc)
* @see survival.main.generation.World#keyPressed(int)
*/
@Override
public void keyPressed(int k) {
getPlayer().keyPressed(k);
}
/* (non-Javadoc)
* @see survival.main.generation.World#keyReleased(int)
*/
@Override
public void keyReleased(int k) {
getPlayer().keyReleased(k);
}
}
|
import {Routes, RouterModule} from '@angular/router';
import {Maps} from './maps.component';
import {GoogleMaps} from './components/googleMaps/googleMaps.component';
import {NgModule} from "@angular/core";
import {MarkerResolver} from "./components/googleMaps/marker-resolver.service";
import {MarkerService} from "./components/googleMaps/marker.service";
// noinspection TypeScriptValidateTypes
const routes: Routes = [
{
path: '',
component: Maps,
children: [
{path: 'googlemaps', component: GoogleMaps, resolve: {marker: MarkerResolver}}
]
}
];
//export const routing = RouterModule.forChild(routes);
@NgModule({
imports: [
RouterModule.forChild(routes)
],
exports: [
RouterModule
],
providers: [
MarkerResolver
]
})
export class MapsRoutingModule { }
|
package org.torproject.android.service.vpn;
/*
* Copyright (c) 2013, Psiphon Inc.
* All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
import android.annotation.TargetApi;
import android.os.Build;
import android.os.ParcelFileDescriptor;
import android.util.Log;
import java.net.DatagramSocket;
import java.net.Socket;
public class Tun2Socks
{
public static interface IProtectSocket
{
boolean doVpnProtect(Socket socket);
boolean doVpnProtect(DatagramSocket socket);
};
private static final String TAG = Tun2Socks.class.getSimpleName();
private static final boolean LOGD = true;
private static Thread mThread;
private static ParcelFileDescriptor mVpnInterfaceFileDescriptor;
private static int mVpnInterfaceMTU;
private static String mVpnIpAddress;
private static String mVpnNetMask;
private static String mSocksServerAddress;
private static String mUdpgwServerAddress;
private static boolean mUdpgwTransparentDNS;
// Note: this class isn't a singleton, but you can't run more
// than one instance due to the use of global state (the lwip
// module, etc.) in the native code.
public static void init () {
System.loadLibrary("tun2socks");
}
public static void Start(
ParcelFileDescriptor vpnInterfaceFileDescriptor,
int vpnInterfaceMTU,
String vpnIpAddress,
String vpnNetMask,
String socksServerAddress,
String udpgwServerAddress,
boolean udpgwTransparentDNS)
{
mVpnInterfaceFileDescriptor = vpnInterfaceFileDescriptor;
mVpnInterfaceMTU = vpnInterfaceMTU;
mVpnIpAddress = vpnIpAddress;
mVpnNetMask = vpnNetMask;
mSocksServerAddress = socksServerAddress;
mUdpgwServerAddress = udpgwServerAddress;
mUdpgwTransparentDNS = udpgwTransparentDNS;
if (mVpnInterfaceFileDescriptor != null)
runTun2Socks(
mVpnInterfaceFileDescriptor.detachFd(),
mVpnInterfaceMTU,
mVpnIpAddress,
mVpnNetMask,
mSocksServerAddress,
mUdpgwServerAddress,
mUdpgwTransparentDNS ? 1 : 0);
}
public static void Stop()
{
terminateTun2Socks();
}
public static void logTun2Socks(
String level,
String channel,
String msg)
{
String logMsg = level + "(" + channel + "): " + msg;
if (0 == level.compareTo("ERROR"))
{
Log.e(TAG, logMsg);
}
else
{
if (LOGD) Log.d(TAG, logMsg);
}
}
private native static int runTun2Socks(
int vpnInterfaceFileDescriptor,
int vpnInterfaceMTU,
String vpnIpAddress,
String vpnNetMask,
String socksServerAddress,
String udpgwServerAddress,
int udpgwTransparentDNS);
private native static void terminateTun2Socks();
} |
namespace Line.Messaging
{
/// <summary>
/// Response from Get User Profile API.
/// https://developers.line.me/en/docs/messaging-api/reference/#get-profile
/// </summary>
public class UserProfile
{
/// <summary>
/// Display name
/// </summary>
public string DisplayName { get; set; }
/// <summary>
/// User ID
/// </summary>
public string UserId { get; set; }
/// <summary>
/// Language
/// </summary>
public string language { get; set; }
/// <summary>
/// Image URL
/// </summary>
public string PictureUrl { get; set; }
/// <summary>
/// Status message
/// </summary>
public string StatusMessage { get; set; }
}
}
|
package org.odk.collect.android.support
import android.os.Bundle
import android.os.Handler
import android.os.Looper
import androidx.fragment.app.FragmentManager
import org.odk.collect.geo.maps.MapFragment
import org.odk.collect.geo.maps.MapPoint
class FakeClickableMapFragment : MapFragment {
private var idCounter = 1
private var featureClickListener: MapFragment.FeatureListener? = null
override fun applyConfig(config: Bundle?) { }
override fun addTo(
fragmentManager: FragmentManager,
containerId: Int,
readyListener: MapFragment.ReadyListener?,
errorListener: MapFragment.ErrorListener?
) {
readyListener?.onReady(this)
}
override fun getCenter(): MapPoint {
return MapPoint(0.0, 0.0)
}
override fun getZoom(): Double {
return 1.0
}
override fun setCenter(center: MapPoint?, animate: Boolean) {}
override fun zoomToPoint(center: MapPoint?, animate: Boolean) {}
override fun zoomToPoint(center: MapPoint?, zoom: Double, animate: Boolean) {}
override fun zoomToBoundingBox(
points: MutableIterable<MapPoint>?,
scaleFactor: Double,
animate: Boolean
) {}
override fun addMarker(point: MapPoint?, draggable: Boolean, iconAnchor: String?): Int {
val id = idCounter++
return id
}
override fun setMarkerIcon(featureId: Int, drawableId: Int) {}
override fun getMarkerPoint(featureId: Int): MapPoint {
return MapPoint(0.0, 0.0)
}
override fun addDraggablePoly(points: MutableIterable<MapPoint>, closedPolygon: Boolean): Int {
return -1
}
override fun appendPointToPoly(featureId: Int, point: MapPoint) {}
override fun removePolyLastPoint(featureId: Int) {}
override fun getPolyPoints(featureId: Int): MutableList<MapPoint> {
return mutableListOf()
}
override fun removeFeature(featureId: Int) {}
override fun clearFeatures() {}
override fun setClickListener(listener: MapFragment.PointListener?) {}
override fun setLongPressListener(listener: MapFragment.PointListener?) {}
override fun setFeatureClickListener(listener: MapFragment.FeatureListener?) {
featureClickListener = listener
}
override fun setDragEndListener(listener: MapFragment.FeatureListener?) {}
override fun setGpsLocationEnabled(enabled: Boolean) {}
override fun getGpsLocation(): MapPoint? {
return null
}
override fun getLocationProvider(): String? {
return null
}
override fun runOnGpsLocationReady(listener: MapFragment.ReadyListener) {}
override fun setGpsLocationListener(listener: MapFragment.PointListener?) {}
override fun setRetainMockAccuracy(retainMockAccuracy: Boolean) {}
fun clickOnFeature(featureId: Int) {
var done = false
Handler(Looper.getMainLooper()).post {
featureClickListener?.onFeature(featureId)
done = true
}
while (!done) {
Thread.sleep(1)
}
}
}
|
export const environment = {
apiGatewayBaseUrl: 'https://{APIKEY}.execute-api.{REGION}.amazonaws.com',
production: true,
};
|
package es.weso.rdfshape.server.api.routes.data.logic.operations
import cats.effect.IO
import es.weso.rdfshape.server.api.routes.data.logic.operations.DataQuery.{
DataQueryResult,
successMessage
}
import es.weso.rdfshape.server.api.routes.data.logic.types.Data
import es.weso.rdfshape.server.api.routes.endpoint.logic.query.SparqlQuery
import io.circe.syntax.EncoderOps
import io.circe.{Encoder, Json}
/** Data class representing the output of a data-information operation
*
* @param inputData RDF input data (contains content and format information)
* @param inputQuery Sparql query input
* @param result Object of type [[DataQueryResult]] containing the properties extracted from the data
*/
final case class DataQuery private (
override val inputData: Data,
inputQuery: SparqlQuery,
result: DataQueryResult
) extends DataOperation(successMessage, inputData) {}
/** Static utilities to perform SPARQL queries on RDF data
*/
private[api] object DataQuery {
private val successMessage = "Query executed successfully"
/** Given an input data and query, perform the query on the data
*
* @param data Input data to be queried
* @param query Input SPARQL query
* @return A [[DataQuery]] object with the query results (see also [[DataQueryResult]])
*/
def dataQuery(data: Data, query: SparqlQuery): IO[DataQuery] =
query.rawQuery match {
case Left(err) => IO.raiseError(new RuntimeException(err))
case Right(raw) =>
for {
rdf <- data.toRdf() // Get the RDF reader
resultJson <- rdf.use(
_.queryAsJson(raw)
) // Perform query
} yield DataQuery( // Form the results object
inputData = data,
inputQuery = query,
result = DataQueryResult(
json = resultJson
)
)
}
/** Case class representing the results to be returned when performing a data-query operation
* @note Currently limited to JSON formatted results for convenience
*/
final case class DataQueryResult(
json: Json
)
/** Encoder for [[DataQuery]]
*/
implicit val encodeDataQueryOperation: Encoder[DataQuery] =
(dataQuery: DataQuery) =>
Json.fromFields(
List(
("message", Json.fromString(dataQuery.successMessage)),
("data", dataQuery.inputData.asJson),
("query", dataQuery.inputQuery.asJson),
("result", dataQuery.result.json)
)
)
}
|
;RUN: not ch6_read_ir %s 2> %t.err
;RUN: FileCheck --input-file=%t.err %s
;CHECK: Error while processing the bitcode file
|
alias cp='cp'
wget --user=hassio --password='wukong2019hassio' ftp://127.0.0.1/share/wukongdata/config.yml
cp -f config.yml /root/.wukong/config.yml
python3 wukong.py
|
<?php
declare(strict_types=1);
namespace DigitalCz\DigiSign\Resource;
use ArrayObject;
use DigitalCz\DigiSign\Exception\RuntimeException;
use Psr\Http\Message\ResponseInterface;
/**
* @template T
*
* @extends ArrayObject<int|string, T>
*/
class Collection extends ArrayObject implements ResourceInterface
{
/** @var ResponseInterface Original API response */
protected $_response; // phpcs:ignore
/**
* @var class-string<T>
*/
protected $resourceClass;
/**
* @param mixed[] $result
* @param class-string<T> $resourceClass
*/
public function __construct(array $result, string $resourceClass)
{
$this->resourceClass = $resourceClass;
$items = array_map(static function (array $itemValue) use ($resourceClass) {
return new $resourceClass($itemValue);
}, $result);
parent::__construct($items);
}
public function getResponse(): ResponseInterface
{
if (!isset($this->_response)) {
throw new RuntimeException('Only resource returned from client has API response set');
}
return $this->_response;
}
public function setResponse(ResponseInterface $response): void
{
$this->_response = $response;
}
/**
* @return array<T>
*/
public function getResult(): array
{
return $this->getArrayCopy();
}
/**
* @return array<int|string, array<mixed>>
*/
public function toArray(): array
{
return array_map(
static function (BaseResource $item): array {
return $item->toArray();
},
$this->getArrayCopy()
);
}
public function self(): string
{
throw new RuntimeException('Resource has no self link.');
}
/**
* @throws RuntimeException
*/
public function id(): string
{
throw new RuntimeException('Collection doesnt have ID.');
}
/**
* @return array<int|string, array<mixed>>
*/
public function jsonSerialize(): array
{
return $this->toArray();
}
/**
* @return class-string<T>
*/
public function getResourceClass(): string
{
return $this->resourceClass;
}
}
|
package vampirefsm.controllers
import javax.inject.{Inject, Singleton}
import journeys.VampireFSMJourneyService
import play.api.data.Form
import play.api.data.Forms._
import play.api.data.validation.{Constraint, Invalid, Valid, ValidationError}
import play.api.i18n.{I18nSupport, MessagesApi}
import play.api.mvc._
import play.api.{Configuration, Environment}
import uk.gov.hmrc.agentmtdidentifiers.model.Arn
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.bootstrap.controller.FrontendController
import uk.gov.hmrc.play.fsm.JourneyController
import vampirefsm.connectors.FrontendAuthConnector
import vampirefsm.journeys.VampireFSMJourneyModel.State._
import vampirefsm.journeys.VampireFSMJourneyModel.Transitions._
import vampirefsm.views.html
import scala.concurrent.ExecutionContext
@Singleton
class VampireFSMController @Inject()(
override val messagesApi: MessagesApi,
val journeyService: VampireFSMJourneyService,
val authConnector: FrontendAuthConnector,
val env: Environment)(implicit val configuration: Configuration,
ec: ExecutionContext)
extends FrontendController
with JourneyController[HeaderCarrier]
with I18nSupport
with AuthActions {
override def context(implicit rh: RequestHeader): HeaderCarrier = hc
import VampireFSMController._
val AsAgent: WithAuthorised[Arn] = { implicit request: Request[Any] =>
withAuthorisedAsAgent(_)
}
def root: Action[AnyContent] =
Action(Redirect(routes.VampireFSMController.start()))
def start: Action[AnyContent] = actionShowStateWhenAuthorised(AsAgent) {
case Start =>
}
def submitStart: Action[AnyContent] = action { implicit request =>
whenAuthorised(AsAgent)(started)(redirect)
}
def showAge = actionShowStateWhenAuthorised(AsAgent) {
case Age =>
}
def submitAge: Action[AnyContent] = action { implicit request =>
whenAuthorisedWithForm(AsAgent)(ageForm)(selectedAge)
}
def showBiteMarks: Action[AnyContent] =
actionShowStateWhenAuthorised(AsAgent) {
case _: BiteMarks =>
}
def submitBiteMarks: Action[AnyContent] = action { implicit request =>
whenAuthorisedWithForm(AsAgent)(biteMarksForm)(selectedBiteMarks)
}
def showSun: Action[AnyContent] = actionShowStateWhenAuthorised(AsAgent) {
case _: Sun =>
}
def submitSun: Action[AnyContent] = action { implicit request =>
whenAuthorisedWithForm(AsAgent)(sunForm)(selectedSun)
}
def showEnd: Action[AnyContent] = actionShowStateWhenAuthorised(AsAgent) {
case _: End =>
}
override def getCallFor(state: journeyService.model.State)(
implicit request: Request[_]): Call = state match {
case Start => routes.VampireFSMController.start()
case Age => routes.VampireFSMController.showAge()
case _: BiteMarks => routes.VampireFSMController.showBiteMarks()
case _: Sun => routes.VampireFSMController.showSun()
case _: End => routes.VampireFSMController.showEnd()
}
override def renderState(
state: journeyService.model.State,
breadcrumbs: journeyService.Breadcrumbs,
formWithErrors: Option[Form[_]])(implicit request: Request[_]): Result =
state match {
case Start => Ok(html.start())
case Age => Ok(html.age(ageForm))
case _: BiteMarks => Ok(html.bite_marks(biteMarksForm))
case _: Sun => Ok(html.sun(sunForm))
case End(likelihood) => Ok(html.end(likelihood))
}
}
object VampireFSMController {
import play.api.data.Form
def confirmationChoice(errorMessage: String): Constraint[String] =
Constraint[String] { fieldValue: String =>
if (fieldValue.trim.nonEmpty) Valid
else Invalid(ValidationError(errorMessage))
}
def ageForm: Form[Int] =
Form(
mapping(
"age" -> optional(text)
.transform[String](_.getOrElse(""), s => Some(s))
.verifying(confirmationChoice("nothing selected"))
)(choice => choice.toInt)(confirmation => Some(confirmation.toString)))
def biteMarksForm: Form[Boolean] =
Form(
mapping(
"accepted" -> optional(text)
.transform[String](_.getOrElse(""), s => Some(s))
.verifying(confirmationChoice("no choice"))
)(choice => choice.toBoolean)(confirmation =>
Some(confirmation.toString)))
def sunForm: Form[String] =
Form(
mapping(
"sun" -> optional(text)
.transform[String](_.getOrElse(""), s => Some(s))
.verifying(confirmationChoice("no choice"))
)(choice => choice)(confirmation => Some(confirmation.toString)))
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Korisnik;
class KorisnikController extends Controller
{
public function addKorisnika()
{
return view("welcome");
}
public function selectData()
{
$korisnici = Korisnik::all();
return view("list-data", [
"korisnici" => $korisnici
]);
}
public function storeKorisnika(Request $request)
{
$korisnik_obj = new Korisnik;
//set values
$korisnik_obj->name = $request->name;
$korisnik_obj->lastname = $request->lastname;
$korisnik_obj->mobile = $request->mobile;
$korisnik_obj->home = $request->home;
//save
$korisnik_obj->save();
//flash msg
$request->session()->flash("success");
//redirect
return redirect("dashboard");
}
}
|
import { Matrix4 } from 'three'
import { getComponent } from '@xrengine/engine/src/ecs/functions/ComponentFunctions'
import { Object3DComponent } from '@xrengine/engine/src/scene/components/Object3DComponent'
import { accessSelectionState } from '../services/SelectionServices'
const IDENTITY_MAT_4 = new Matrix4().identity()
export function getSpaceMatrix() {
const selectedEntities = accessSelectionState().selectedEntities.value
if (selectedEntities.length === 0) return IDENTITY_MAT_4
const lastSelectedEntity = selectedEntities[selectedEntities.length - 1]
const obj3d = getComponent(lastSelectedEntity, Object3DComponent).value
obj3d.updateMatrixWorld()
if (!obj3d.parent) return IDENTITY_MAT_4
return obj3d.parent.matrixWorld
}
|
---
title: Status
category: Backend
order: 4
---
### Goals for this Release
* Real name entry upon registration
* Support for comments
* Ability to download episodes in advance of listening to them
### Next Release
The Android app has a 1 month release cycle.
|
SECTION rodata_font
SECTION rodata_font_8x8
PUBLIC _zx7_font_8x8_zx_system
PUBLIC _zx7_font_8x8_zx_system_end
_zx7_font_8x8_zx_system:
BINARY "font_8x8_zx_system.bin.zx7"
_zx7_font_8x8_zx_system_end:
|
import 'package:flutter/material.dart';
class AnimatedListStateWidget extends StatefulWidget {
AnimatedListStateWidget({Key? key}) : super(key: key);
@override
_AnimatedListStateWidgettState createState() =>
_AnimatedListStateWidgettState();
}
class _AnimatedListStateWidgettState extends State<AnimatedListStateWidget> {
final GlobalKey<AnimatedListState> _listKey = GlobalKey();
List<String> _data = ['12', 'ss'];
void _addItem() {
final int _index = _data.length;
_data.insert(_index, _index.toString());
_listKey.currentState?.insertItem(_index);
}
void _removeItem() {
final int _index = _data.length - 1;
_listKey.currentState?.removeItem(
_index,
(context, animation) => Container(),
);
_data.removeAt(_index);
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('AnimatedListState'),
),
floatingActionButton: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
FloatingActionButton(
onPressed: _removeItem,
child: Text('减'),
),
FloatingActionButton(
onPressed: _addItem,
child: Text('加'),
),
],
),
body: Container(
padding: EdgeInsets.all(20),
child: Column(
children: [
Expanded(
child: AnimatedList(
key: _listKey,
initialItemCount: _data.length,
itemBuilder: (
BuildContext context,
int index,
Animation<double> animation,
) {
return SizeTransition(
sizeFactor: animation,
child: Card(
child: ListTile(
title: Text(_data[index]),
),
),
);
},
),
),
],
),
),
);
}
}
|
import argparse
import itertools
import json
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
from common import add_db_args
from common import add_plot_limit_args
from common import set_db_connection
from common import set_plot_limits
from matplotlib.colors import LogNorm
from plot_estimator import _format_datum
from plot_estimator import _extract_data
def parse_args(*argument_list):
parser = argparse.ArgumentParser()
parser.add_argument('infiles', type=argparse.FileType('r'), nargs='+',
help='File(s) with inliers & outliers with their scores '
'outputted by macrobase')
parser.add_argument('--histogram-bins', default=100, type=int)
parser.add_argument('--restrict-to', choices=['inliers', 'outliers'],
help='Plots 2d histogram of outliers or inliers')
parser.add_argument('--columns', nargs=1, default=['metrics.*'],
help='Data to include in the plot')
parser.add_argument('--legend-loc', default='best')
parser.add_argument('--no-scores', action='store_false', default=True, dest='plot_scores')
parser.add_argument('--savefig')
add_plot_limit_args(parser)
add_db_args(parser)
args = parser.parse_args(*argument_list)
return args
def _format_data(infile, args):
print 'formatting data from file %s' % infile.name
raw_data = json.load(infile)
dimensions = len(_format_datum(raw_data['inliers'][0], args.columns)) - 1
assert dimensions == 1
outliers = _extract_data(raw_data, 'outliers', args.columns, args.x_limits, None)
return os.path.basename(infile.name).rsplit('.')[0], list(outliers)
def plot_histograms(args):
classifiers = {}
data, labels = [], []
for _file in args.infiles:
label, content = _format_data(_file, args)
labels.append(label)
X, _ = zip(*content)
data.append(X)
plt.hist(data, args.histogram_bins, histtype='bar', stacked=False, label=labels)
plt.legend(loc=args.legend_loc)
set_plot_limits(plt, args)
if args.savefig is not None:
filename = args.savefig
modifiers = []
if args.x_limits:
modifiers.append('X=%d,%d' % tuple(args.x_limits))
if args.y_limits:
modifiers.append('Y=%d,%d' % tuple(args.y_limits))
name, ext = filename.rsplit('.')
new_filename = '{old_name}-{modifiers}.{ext}'.format(old_name=name, modifiers='-'.join(modifiers), ext=ext)
print 'saving figure to - ', new_filename
plt.savefig(new_filename, dpi=320)
plt.clf()
else:
plt.show()
if __name__ == '__main__':
args = parse_args()
plot_histograms(args)
|
<?php
/**
* @file
* Include this file to include all classes of Glamus Utils.
*
* For more information on how to install check README.md.
*/
require_once dirname(__DIR__) . "/src/FileUtils.php";
require_once dirname(__DIR__) . "/src/JsonHandler.php";
require_once dirname(__DIR__) . "/src/StringUtils.php";
require_once dirname(__DIR__) . "/src/DateUtils.php";
require_once dirname(__DIR__) . "/src/Logger.php";
|
/**
* \file
* \copyright
* Copyright (c) 2012-2022, OpenGeoSys Community (http://www.opengeosys.org)
* Distributed under a Modified BSD License.
* See accompanying file LICENSE.txt or
* http://www.opengeosys.org/project/license
*
*/
#pragma once
#include <boost/property_tree/ptree.hpp>
#include <functional>
#include <map>
#include <memory>
#include <optional>
#include <typeindex>
#include <utility>
#include <vector>
extern template class boost::property_tree::basic_ptree<
std::string, std::string, std::less<>>;
namespace BaseLib
{
class ConfigTree;
/*! Check if \c conf has been read entirely and invalidate it.
*
* This method can safely be called on \c nullptr's.
*
* \see ConfigTree::checkAndInvalidate()
*/
void checkAndInvalidate(ConfigTree* const conf);
//! \overload
void checkAndInvalidate(std::unique_ptr<ConfigTree> const& conf);
//! \overload
void checkAndInvalidate(ConfigTree& conf);
template <typename Iterator>
class Range;
/*!
* Wrapper around a Boost Property Tree with some basic error reporting
* features.
*
* Features. This class:
* * makes sure that every configuration setting in a Property Tree is read
* exactly once. If some settings is not read (e.g. due to a typo), a warning
* message is generated. The message contains a hint where it occurred.
* * enforces a naming scheme of settings: letters a-z, numbers 0-9, underscore
* * provides some functionality to read lists of values using range-based for
* loops.
* * has rather long method names that are easily greppable from the source
* code. So a list of supported configuration options can be easily obtained
* from the source code.
*
* The purpose of this class is to reduce or completely avoid the amount of
* error-handling code in routines that take configuration parameters.
*
* Most methods of this class check that they have not been called before for
* the same \c ConfigTree and the same parameter. This behaviour helps to
* enforce that every parameter is read exactly once during parsing of the
* configuration settings.
*
* The most notable restriction of this class when compared to plain tree
* traversal is, that one must know all the XML tags (i.e. configuration
* parameters) at compile time. It is not possible to read from this class,
* which configuration parameters are present in the tree. This restriction,
* however, is intended, because it provides the possibility to get all existing
* configuration parameters from the source code.
*
* This class maintains a read counter for each parameter accessed through any
* of its methods. Read counters are increased with every read (the only
* exception being the peekConfigParameter() method). The destructor finally
* decreases the read counter for every tag/attribute it find on the current
* level of the XML tree. If the increases/decreases don't cancel each other,
* warning messages are generated. This check can also be enforced before
* destruction by using the BaseLib::checkAndInvalidate() functions.
*
* The design of this class entails some limitations compared to traversing a
* plain tree, e.g., it is not possible to obtain a list of tags or attributes
* from the tree, but one has to explicitly query the specific tags/attributes
* one is interested in. That way it is possible to get all used configuration
* parameters directly from the source code where this class is used, and to
* maintain the quality of the configuration parameter documentation.
*
* Instances of this class only keep a reference to the underlying
* <tt>boost::property_tree</tt>. Therefore it is necessary that the underlying
* property tree stays intact as long as any instance---i.e. the top level
* ConfigTree and any of its children---reference it. In order to simplify the
* handling of this dependence, the class ConfigTreeTopLevel can be used.
*
* The construction of a ConfigTree from the content of an XML file can be done
* with the function BaseLib::makeConfigTree(), which performs many error
* checks. For limitations of the used XML parser, please have a look at that
* function's documentation.
*/
class ConfigTree final
{
public:
/*! A wrapper around a Boost Iterator for iterating over ranges of subtrees.
*
* The methods of this class tell the associated (parent) \c ConfigTree
* object when a setting has been parsed.
*/
class SubtreeIterator
: public std::iterator<std::input_iterator_tag, ConfigTree>
{
public:
using Iterator = boost::property_tree::ptree::const_assoc_iterator;
explicit SubtreeIterator(Iterator const& it, std::string const& root,
ConfigTree const& parent)
: it_(it), tagname_(root), parent_(parent)
{
}
SubtreeIterator& operator++()
{
++it_;
has_incremented_ = true;
return *this;
}
ConfigTree operator*()
{
// if this iterator has been incremented since the last dereference,
// tell the parent_ instance that a subtree now has been parsed.
if (has_incremented_)
{
has_incremented_ = false;
parent_.markVisited(tagname_, Attr::TAG, false);
}
return ConfigTree(it_->second, parent_, tagname_);
}
bool operator==(SubtreeIterator const& other) const
{
return it_ == other.it_;
}
bool operator!=(SubtreeIterator const& other) const
{
return it_ != other.it_;
}
private:
bool has_incremented_ = true;
Iterator it_;
protected:
std::string const tagname_;
ConfigTree const& parent_;
};
/*! A wrapper around a Boost Iterator for iterating over ranges of
* parameters.
*
* The methods of this class tell the associated (parent) \c ConfigTree
* object when a setting has been parsed.
*/
class ParameterIterator : public SubtreeIterator
{
public:
//! Inherit the constructor
using SubtreeIterator::SubtreeIterator;
ConfigTree operator*()
{
auto st = SubtreeIterator::operator*();
if (st.hasChildren())
{
parent_.error("The requested parameter <" + tagname_ +
"> has child elements.");
}
return st;
}
};
/*!
* A wrapper around a Boost Iterator for iterating over ranges of values.
*
* The methods of this class tell the associated (parent) \c ConfigTree
* object when a setting has been parsed.
*/
template <typename ValueType>
class ValueIterator
: public std::iterator<std::input_iterator_tag, ValueType>
{
public:
using Iterator = boost::property_tree::ptree::const_assoc_iterator;
explicit ValueIterator(Iterator const& it, std::string const& root,
ConfigTree const& parent)
: it_(it), tagname_(root), parent_(parent)
{
}
ValueIterator<ValueType>& operator++()
{
++it_;
has_incremented_ = true;
return *this;
}
ValueType operator*()
{
// if this iterator has been incremented since the last dereference,
// tell the parent_ instance that a setting now has been parsed.
if (has_incremented_)
{
has_incremented_ = false;
parent_.markVisited<ValueType>(tagname_, Attr::TAG, false);
}
return ConfigTree(it_->second, parent_, tagname_)
.getValue<ValueType>();
}
bool operator==(ValueIterator<ValueType> const& other) const
{
return it_ == other.it_;
}
bool operator!=(ValueIterator<ValueType> const& other) const
{
return it_ != other.it_;
}
private:
bool has_incremented_ = true;
Iterator it_;
std::string const tagname_;
ConfigTree const& parent_;
};
//! The tree being wrapped by this class.
using PTree = boost::property_tree::ptree;
/*! Type of the function objects used as callbacks.
*
* Arguments of the callback:
* \arg \c filename the file being from which this ConfigTree has been read.
* \arg \c path the path in the tree where the message was generated.
* \arg \c message the message to be printed.
*/
using Callback = std::function<void(const std::string& filename,
const std::string& path,
const std::string& message)>;
/*!
* Creates a new instance wrapping the given Boost Property Tree.
*
* \param top_level_tree the top level Boost Property Tree
* \param filename the file from which the \c tree has been read
* \param error_cb callback function to be called on error.
* \param warning_cb callback function to be called on warning.
*
* The callback functions must be valid callable functions, i.e. not
* nullptr's. They are configurable in order to make unit tests of this
* class easier. They should not be provided in production code!
*
* Defaults are strict: By default, both callbacks are set to the same
* function, i.e., warnings will also result in program abortion!
*/
explicit ConfigTree(PTree&& top_level_tree,
std::string filename,
Callback error_cb,
Callback warning_cb);
//! copying is not compatible with the semantics of this class
ConfigTree(ConfigTree const&) = delete;
//! After being moved from, \c other is in an undefined state and must not
//! be used anymore!
ConfigTree(ConfigTree&& other);
//! copying is not compatible with the semantics of this class
ConfigTree& operator=(ConfigTree const&) = delete;
//! After being moved from, \c other is in an undefined state and must not
//! be used anymore!
ConfigTree& operator=(ConfigTree&& other);
//! Used to get the project file name.
std::string const& getProjectFileName() const { return filename_; }
/*! \name Methods for directly accessing parameter values
*
*/
//!\{
/*! Get parameter \c param of type \c T from the configuration tree.
*
* \return the value looked for.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
template <typename T>
T getConfigParameter(std::string const& param) const;
/*! Get parameter \c param of type \c T from the configuration tree or the
* \c default_value.
*
* This method has a similar behaviour as getConfigParameter(std::string
* const&) except the \c default_value is returned if the attribute has not
* been found.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
template <typename T>
T getConfigParameter(std::string const& param,
T const& default_value) const;
/*! Get parameter \c param of type \c T from the configuration tree if
* present
*
* This method has a similar behaviour as getConfigParameter(std::string
* const&) except no errors are raised. Rather it can be told from the
* return value if the parameter could be read.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
template <typename T>
std::optional<T> getConfigParameterOptional(std::string const& param) const;
/*! Fetches all parameters with name \c param from the current level of the
* tree.
*
* The return value is suitable to be used with range-base for-loops.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
template <typename T>
Range<ValueIterator<T>> getConfigParameterList(
std::string const& param) const;
//!\}
/*! \name Methods for accessing parameters that have attributes
*
* The <tt>getConfigParameter...()</tt> methods in this group---note: they
* do not have template parameters---check that the queried parameters do
* not have any children (apart from XML attributes); if they do, error() is
* called.
*
* The support for parameters with attributes is limited in the sense that
* it is not possible to peek/check them. However, such functionality can
* easily be added on demand.
*/
//!\{
/*! Get parameter \c param from the configuration tree.
*
* \return the subtree representing the requested parameter
*
* \pre \c param must not have been read before from this ConfigTree.
*/
ConfigTree getConfigParameter(std::string const& root) const;
/*! Get parameter \c param from the configuration tree if present.
*
* \return the subtree representing the requested parameter
*
* \pre \c param must not have been read before from this ConfigTree.
*/
std::optional<ConfigTree> getConfigParameterOptional(
std::string const& root) const;
/*! Fetches all parameters with name \c param from the current level of the
* tree.
*
* The return value is suitable to be used with range-base for-loops.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
Range<ParameterIterator> getConfigParameterList(
std::string const& param) const;
/*! Get the plain data contained in the current level of the tree.
*
* \return the data converted to the type \c T
*
* \pre The data must not have been read before.
*/
template <typename T>
T getValue() const;
/*! Get XML attribute \c attr of type \c T for the current parameter.
*
* \return the requested attribute's value.
*
* \pre \c attr must not have been read before from the current parameter.
*/
template <typename T>
T getConfigAttribute(std::string const& attr) const;
/*! Get XML attribute \c attr of type \c T for the current parameter or the
* \c default_value.
*
* This method has a similar behaviour as getConfigAttribute(std::string
* const&) except the \c default_value is returned if the attribute has not
* been found.
*
* \return the requested attribute's value.
*
* \pre \c attr must not have been read before from the current parameter.
*/
template <typename T>
T getConfigAttribute(std::string const& attr, T const& default_value) const;
/*! Get XML attribute \c attr of type \c T for the current parameter if
* present.
*
* \return the requested attribute's value.
*
* \pre \c attr must not have been read before from the current parameter.
*/
template <typename T>
std::optional<T> getConfigAttributeOptional(std::string const& attr) const;
//!\}
/*! \name Methods for peeking and checking parameters
*
* To be used in builder/factory functions: E.g., one can peek a parameter
* denoting the type of an object to generate in the builder, and check the
* type parameter in the constructor of the generated object.
*/
//!\{
/*! Peek at a parameter \c param of type \c T from the configuration tree.
*
* This method is an exception to the single-read rule. It is meant to be
* used to tell from a ConfigTree instance where to pass that instance on
* for further processing.
*
* But in order that the requested parameter counts as "completely parsed",
* it has to be read through some other method, too.
*
* Return value and error behaviour are the same as for
* getConfigParameter<T>(std::string const&).
*/
template <typename T>
T peekConfigParameter(std::string const& param) const;
/*! Assert that \c param has the given \c value.
*
* Convenience method combining getConfigParameter(std::string const&) with
* a check.
*/
template <typename T>
void checkConfigParameter(std::string const& param, T const& value) const;
//! Make checkConfigParameter() work for string literals.
template <typename Ch>
void checkConfigParameter(std::string const& param, Ch const* value) const;
//!\}
/*! \name Methods for accessing subtrees
*/
//!\{
/*! Get the subtree rooted at \c root
*
* If \c root is not found error() is called.
*
* \pre \c root must not have been read before from this ConfigTree.
*/
ConfigTree getConfigSubtree(std::string const& root) const;
/*! Get the subtree rooted at \c root if present
*
* \pre \c root must not have been read before from this ConfigTree.
*/
std::optional<ConfigTree> getConfigSubtreeOptional(
std::string const& root) const;
/*! Get all subtrees that have a root \c root from the current level of the
* tree.
*
* The return value is suitable to be used with range-base for-loops.
*
* \pre \c root must not have been read before from this ConfigTree.
*/
Range<SubtreeIterator> getConfigSubtreeList(std::string const& root) const;
//!\}
/*! \name Methods for ignoring parameters
*/
//!\{
/*! Tell this instance to ignore parameter \c param.
*
* This method is used to avoid warning messages.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
void ignoreConfigParameter(std::string const& param) const;
/*! Tell this instance to ignore all parameters \c param on the current
* level of the tree.
*
* This method is used to avoid warning messages.
*
* \pre \c param must not have been read before from this ConfigTree.
*/
void ignoreConfigParameterAll(std::string const& param) const;
/*! Tell this instance to ignore the XML attribute \c attr.
*
* This method is used to avoid warning messages.
*
* \pre \c attr must not have been read before from this ConfigTree.
*/
void ignoreConfigAttribute(std::string const& attr) const;
//!\}
//! The destructor performs the check if all nodes at the current level of
//! the tree have been read. Errors raised by the check are swallowed. Use
//! assertNoSwallowedErrors() manually to check for those.
~ConfigTree();
//! Default error callback function
//! Will throw std::runtime_error
static void onerror(std::string const& filename, std::string const& path,
std::string const& message);
//! Default warning callback function
//! Will print a warning message
static void onwarning(std::string const& filename, std::string const& path,
std::string const& message);
//! Asserts that there have not been any errors reported in the destructor.
static void assertNoSwallowedErrors();
private:
//! Default implementation of reading a value of type T.
template <typename T>
std::optional<T> getConfigParameterOptionalImpl(std::string const& param,
T* /*unused*/) const;
//! Implementation of reading a vector of values of type T.
template <typename T>
std::optional<std::vector<T>> getConfigParameterOptionalImpl(
std::string const& param, std::vector<T>* /*unused*/) const;
struct CountType
{
int count;
std::type_index type;
};
//! Used to indicate if dealing with XML tags or XML attributes
enum class Attr : bool
{
TAG = false,
ATTR = true
};
//! Used for wrapping a subtree
explicit ConfigTree(PTree const& tree, ConfigTree const& parent,
std::string const& root);
/*! Called if an error occurs. Will call the error callback.
*
* This method only acts as a helper method and throws std::runtime_error.
*/
[[noreturn]] void error(std::string const& message) const;
//! Called for printing warning messages. Will call the warning callback.
//! This method only acts as a helper method.
void warning(std::string const& message) const;
//! Checks if \c key complies with the rules [a-z0-9_].
void checkKeyname(std::string const& key) const;
//! Used to generate the path of a subtree.
std::string joinPaths(std::string const& p1, std::string const& p2) const;
//! Asserts that the \c key has not been read yet.
void checkUnique(std::string const& key) const;
//! Asserts that the attribute \c attr has not been read yet.
void checkUniqueAttr(std::string const& attr) const;
/*! Keeps track of the key \c key and its value type \c T.
*
* This method asserts that a key is read always with the same type.
*
* \c param peek_only if true, do not change the read-count of the given
* key.
*/
template <typename T>
CountType& markVisited(std::string const& key, Attr const is_attr,
bool peek_only) const;
/*! Keeps track of the key \c key and its value type ConfigTree.
*
* This method asserts that a key is read always with the same type.
*
* \c param peek_only if true, do not change the read-count of the given
* key.
*/
CountType& markVisited(std::string const& key, Attr const is_attr,
bool const peek_only) const;
//! Used in the destructor to compute the difference between number of reads
//! of a parameter and the number of times it exists in the ConfigTree
void markVisitedDecrement(Attr const is_attr, std::string const& key) const;
//! Checks if this tree has any children.
bool hasChildren() const;
/*! Checks if the top level of this tree has been read entirely (and not too
* often).
*
* \post This method also invalidates the instance, i.e., afterwards it must
* not be used anymore!
*/
void checkAndInvalidate();
//! returns a short string at suitable for error/warning messages
static std::string shortString(std::string const& s);
//! Root of the tree.
//!
//! Owned by all ConfigTree instances that might access any part of it.
std::shared_ptr<PTree const> top_level_tree_;
//! The wrapped tree.
PTree const* tree_;
//! A path printed in error/warning messages.
std::string path_;
//! The path of the file from which this tree has been read.
std::string filename_;
//! A pair (is attribute, tag/attribute name).
using KeyType = std::pair<Attr, std::string>;
//! A map KeyType -> (count, type) keeping track which parameters have been
//! read how often and which datatype they have.
//!
//! This member will be written to when reading from the config tree.
//! Therefore it has to be mutable in order to be able to read from
//! constant instances, e.g., those passed as constant references to
//! temporaries.
mutable std::map<KeyType, CountType> visited_params_;
//! Indicates if the plain data contained in this tree has already been
//! read.
mutable bool have_read_data_ = false;
Callback onerror_; //!< Custom error callback.
Callback onwarning_; //!< Custom warning callback.
//! Character separating two path components.
static const char pathseparator;
//! Set of allowed characters as the first letter of a key name.
static const std::string key_chars_start;
//! Set of allowed characters in a key name.
static const std::string key_chars;
friend void checkAndInvalidate(ConfigTree* const conf);
friend void checkAndInvalidate(ConfigTree& conf);
friend void checkAndInvalidate(std::unique_ptr<ConfigTree> const& conf);
};
} // namespace BaseLib
#include "ConfigTree-impl.h"
|
package com.ajoshow.mock.web.dto;
import com.ajoshow.mock.domain.*;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
/**
* Created by andychu on 2017/4/22.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class AssetDto {
private Integer id;
private Data data;
private Title title;
@JsonProperty("img")
private Image image;
private Link link;
@JsonUnwrapped
private AssetMeta meta;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Data getData() {
return data;
}
public void setData(Data data) {
this.data = data;
}
public Title getTitle() {
return title;
}
public void setTitle(Title title) {
this.title = title;
}
public Link getLink() {
return link;
}
public void setLink(Link link) {
this.link = link;
}
public Image getImage() {
return image;
}
public void setImage(Image image) {
this.image = image;
}
public AssetMeta getMeta() {
return meta;
}
public void setMeta(AssetMeta meta) {
this.meta = meta;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Asset{");
sb.append("id=").append(id);
if(data != null)
sb.append(", data=").append(data);
if(title != null)
sb.append(", title=").append(title);
if(link != null)
sb.append(", link=").append(link);
if(image != null)
sb.append(", image=").append(image);
if(meta != null)
sb.append(", meta=").append(meta);
sb.append('}');
return sb.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AssetDto asset = (AssetDto) o;
if (id != null ? !id.equals(asset.id) : asset.id != null) return false;
if (data != null ? !data.equals(asset.data) : asset.data != null) return false;
if (title != null ? !title.equals(asset.title) : asset.title != null) return false;
if (image != null ? !image.equals(asset.image) : asset.image != null) return false;
return link != null ? link.equals(asset.link) : asset.link == null;
}
@Override
public int hashCode() {
int result = id != null ? id.hashCode() : 0;
result = 31 * result + (data != null ? data.hashCode() : 0);
result = 31 * result + (title != null ? title.hashCode() : 0);
result = 31 * result + (image != null ? image.hashCode() : 0);
result = 31 * result + (link != null ? link.hashCode() : 0);
return result;
}
}
|
<?php
namespace App\Jobs;
use App\Mail\DeleteReviewMailable;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Queue\SerializesModels;
use Illuminate\Support\Facades\Mail;
class DeleteReviewJob extends Job implements ShouldQueue
{
use SerializesModels;
/**
* Create a new job instance.
*
* @return void
*/
private $email;
private $reason;
public function __construct($email,$reason)
{
$this->email = $email;
$this->reason = $reason;
}
/**
* Execute the job.
*
* @return void
*/
public function handle()
{
Mail::to($this->email)->send(new DeleteReviewMailable($this->reason));
}
}
|
---
layout: post
title: Halte au magasin funèbre
description: Pamphlet anti-"marché noir" !
authors:
- Dirty Henry
wordpress_id: 339
date: "2006-11-16 12:38:00 +0100"
categories:
- Artistes
tags:
- Oasis
cover: noel-gallagher-live.jpg
---
Y'en a marre tes potes !
Qu'il n'y ait plus de place pour le concert "Noel and Gem semi-unplugged show" 5
minutes après l'ouverture des réservations, ok, sauf quand une demie-heure plus
tard, on les trouve en vente sur eBay à 60 ou 70€. Vous faites chier les mecs !
Vous m'avez privé de Noel cette année, je vous priverai de Pâques l'année
prochaine !
|
# bin
Stash spot for my ~/bin and /usr/local/bin all comments and criticism
is much appreciated.
If there is any licensed code in here, please notify me! It is also
not my intention to claim others code as my own and try my best to
include a link to any orginal source when this is the case.
|
/* @flow */
import { FetchHistoryArguments, HistoryResponse, ModulesInject } from '../../flow_interfaces';
import operationConstants from '../../constants/operations';
import utils from '../../utils';
export function getOperation(): string {
return operationConstants.PNDeleteMessagesOperation;
}
export function validateParams(modules: ModulesInject, incomingParams: FetchHistoryArguments) {
let { channel } = incomingParams;
let { config } = modules;
if (!channel) return 'Missing channel';
if (!config.subscribeKey) return 'Missing Subscribe Key';
}
export function useDelete() {
return true;
}
export function getURL(modules: ModulesInject, incomingParams: FetchHistoryArguments): string {
let { channel } = incomingParams;
let { config } = modules;
return `/v3/history/sub-key/${config.subscribeKey}/channel/${utils.encodeString(channel)}`;
}
export function getRequestTimeout({ config }: ModulesInject): boolean {
return config.getTransactionTimeout();
}
export function isAuthSupported(): boolean {
return true;
}
export function prepareParams(modules: ModulesInject, incomingParams: FetchHistoryArguments): Object {
const { start, end } = incomingParams;
let outgoingParams: Object = {};
if (start) outgoingParams.start = start;
if (end) outgoingParams.end = end;
return outgoingParams;
}
export function handleResponse(modules: ModulesInject, serverResponse: Object): HistoryResponse {
return serverResponse.payload;
}
|
/*
* Copyright (c) Intellinium SAS, 2014-present
*
* SPDX-License-Identifier: Apache-2.0
*/
package io.runtime.mcumgr.response.log;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.runtime.mcumgr.response.McuMgrResponse;
public class McuMgrLevelListResponse extends McuMgrResponse {
@JsonProperty("level_map")
public String[] level_map;
@JsonCreator
public McuMgrLevelListResponse() {}
}
|
<?php
namespace App;
trait CacheFlushableAfterCreatedModelTrait
{
public static function bootCacheFlushableAfterCreatedModelTrait()
{
static::created(function (){
\Cache::tags([self::class])->flush();
});
}
} |
package ch.dreipol.multiplatform.reduxsample.shared.utils
import java.util.*
actual fun AppLanguage.Companion.fromLocale(): AppLanguage {
return fromValue(Locale.getDefault().language)
} |
package com.motiz88.rctmidi.webmidi.impl;
import com.facebook.react.bridge.ReadableMap;
public class MIDIOptions {
private boolean software = false;
private boolean sysex = false;
public boolean getSoftware() {
return software;
}
public boolean getSysex() {
return sysex;
}
public MIDIOptions(ReadableMap options) {
if (options == null)
return;
if (options.hasKey("software"))
software = options.getBoolean("software");
if (options.hasKey("sysex"))
sysex = options.getBoolean("sysex");
}
} |
import { Request, Response } from "express"
import { ExampleService } from '../services/exampleService'
export class ExampleController{
public exampleService: ExampleService
constructor() {
this.exampleService = new ExampleService()
}
public async dispatchPub(req: Request, res: Response){
await this.exampleService.sendMessageToQueue()
res.status(200).json({ok: true})
}
} |
#include <iostream>
#include <numeric>
using namespace std;
int mult(int x, int y) { return x * y; }
void src1013()
{
// P361,利用普通函数来定义函数对象
cout << "--->" << "代码10-13(利用普通函数来定义函数对象)" << "<---" << endl;
int A[] = { 1, 2, 3, 4, 5 };
const int N = sizeof(A) / sizeof(int);
cout << "The result by multipling all elements in A is:"
<< accumulate(A, A + N, 1, mult) // 将普通函数 mult() 传递给通用算法
<< endl;
}
|
import { ASTNode, NameNode, DefinitionNode } from 'graphql';
export declare type NamedDefinitionNode = DefinitionNode & {
name?: NameNode;
};
export declare function resetComments(): void;
export declare function collectComment(node: NamedDefinitionNode): void;
export declare function pushComment(node: any, entity: string, field?: string, argument?: string): void;
export declare function printComment(comment: string): string;
/**
* Converts an AST into a string, using one set of reasonable
* formatting rules.
*/
export declare function printWithComments(ast: ASTNode): any;
|
# bad args
Code
encode_unit(2, prune_method()$values, direction = "forward")
Condition
Error in `encode_unit()`:
! `x` should be a dials parameter object.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, prune_method()$values, direction = "forwards")
Condition
Error in `encode_unit()`:
! `direction` should be either 'forward' or 'backward'
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(x, prune_method()$values, direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a numeric vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, 1, direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a character vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(x, matrix(letters[1:4], ncol = 2), direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a numeric vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(x, matrix(1:4, ncol = 2), direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a numeric vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, 1, direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a character vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, matrix(1:4, ncol = 2), direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a character vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, matrix(letters[1:4], ncol = 2), direction = "forward")
Condition
Error in `encode_unit()`:
! `value` should be a character vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(x, prune_method()$values, direction = "backward")
Condition
Error in `encode_unit()`:
! `value` should be a numeric vector.
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, prune_method()$values, direction = "backward")
Condition
Error in `encode_unit()`:
! Values should be on [0, 1].
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(x, 1:2, direction = "backward")
Condition
Error in `encode_unit()`:
! Values should be on [0, 1].
i This is an internal error in the dials package, please report it to the package authors.
---
Code
encode_unit(z, 1:2, direction = "backward")
Condition
Error in `encode_unit()`:
! Values should be on [0, 1].
i This is an internal error in the dials package, please report it to the package authors.
|
package com.dietsodasoftware.yail.xmlrpc.model.customfields;
import com.dietsodasoftware.yail.xmlrpc.model.CustomField;
import com.dietsodasoftware.yail.xmlrpc.model.CustomField.Field;
import com.dietsodasoftware.yail.xmlrpc.model.CustomField.FormFieldFormatting;
import com.dietsodasoftware.yail.xmlrpc.model.CustomField.Model;
/**
* User: wendel.schultz
* Date: 8/29/14
*/
class SimpleNamedCustomField extends SimpleOperationCustomField implements NamedCustomField {
private final CustomField field;
SimpleNamedCustomField(CustomField field) {
super((String) field.getFieldValue(Field.Name));
this.field = field;
}
@Override
public Model getModel() {
return field.getEntityModel();
}
@Override
public String getLabel() {
return field.getFieldValue(Field.Label);
}
@Override
public FormFieldFormatting getFieldFormat() {
return field.getFieldFormat();
}
@Override
public String getValues(){
return field.getFieldValue(Field.Values);
}
}
|
namespace UglyToad.PdfPig.DocumentLayoutAnalysis.Export.Alto
{
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Xml.Serialization;
public partial class AltoDocument
{
/// <summary>
/// [Alto] Modern OCR software stores information on glyph level. A glyph is essentially a character or ligature.
/// Accordingly the value for the glyph element will be defined as follows:
/// Pre-composed representation = base + combining character(s) (decomposed representation)
/// See http://www.fileformat.info/info/unicode/char/0101/index.htm
/// "U+0101" = (U+0061) + (U+0304)
/// "combining characters" ("base characters" in combination with non-spacing marks or characters which are combined to one) are represented as one "glyph", e.g.áàâ.
///
/// <para>Each glyph has its own coordinate information and must be separately addressable as a distinct object.
/// Correction and verification processes can be carried out for individual characters.</para>
///
/// <para>Post-OCR analysis of the text as well as adaptive OCR algorithm must be able to record information on glyph level.
/// In order to reproduce the decision of the OCR software, optional characters must be recorded.These are called variants.
/// The OCR software evaluates each variant and picks the one with the highest confidence score as the glyph.
/// The confidence score expresses how confident the OCR software is that a single glyph had been recognized correctly.</para>
///
/// <para>The glyph elements are in order of the word. Each glyph need to be recorded to built up the whole word sequence.</para>
///
/// <para>The glyph’s CONTENT attribute is no replacement for the string’s CONTENT attribute.
/// Due to post-processing steps such as correction the values of both attributes may be inconsistent.</para>
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
[Serializable]
[DebuggerStepThrough]
[XmlType(Namespace = "http://www.loc.gov/standards/alto/ns-v4#")]
public class AltoGlyph : AltoPositionedElement
{
private float gc;
/// <remarks/>
public AltoShape Shape { get; set; }
/// <summary>
/// Alternative (combined) character for the glyph, outlined by OCR engine or similar recognition processes.
/// In case the variant are two (combining) characters, two characters are outlined in one Variant element.
/// E.g. a Glyph element with CONTENT="m" can have a Variant element with the content "rn".
/// <para>Details for different use-cases see on the samples on GitHub.</para>
/// </summary>
[XmlElement("Variant")]
public AltoVariant[] Variant { get; set; }
/// <remarks/>
[XmlAttribute("ID", DataType = "ID")]
public string Id { get; set; }
/// <summary>
/// CONTENT contains the precomposed representation (combining character) of the character from the parent String element.
/// The sequence position of the Gylph element matches the position of the character in the String.
/// </summary>
[XmlAttribute("CONTENT")]
public string Content { get; set; }
/// <summary>
/// This GC attribute records a float value between 0.0 and 1.0 that expresses the level of confidence for the variant where is 1 is certain.
/// This attribute is optional. If it is not available, the default value for the variant is "0".
///
/// <para>The GC attribute semantic is the same as the WC attribute on the String element and VC on Variant element.</para>
/// </summary>
[XmlAttribute("GC")]
public float Gc
{
get => gc;
set
{
gc = value;
if (!float.IsNaN(value)) GcSpecified = true;
}
}
/// <remarks/>
[XmlIgnore]
public bool GcSpecified { get; set; }
/// <remarks/>
public override string ToString()
{
return Content;
}
}
}
}
|
create or replace package file_pkg_version is
pkg_version varchar2(200) := '1.0';
function get_version return varchar2;
end file_pkg_version;
/
|
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.PwmControl;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.hardware.ServoControllerEx;
import org.firstinspires.ftc.teamcode.FlamingPhoenix.Arm;
/**
* Created by HwaA1 on 11/2/2017.
*/
public class Farrm extends OpMode {
Servo shoulder;
Servo elbow;
Servo wrist;
Servo wristation;
Servo finger;
Arm arm;
@Override
public void init() {
shoulder = hardwareMap.servo.get("shoulder");
elbow = hardwareMap.servo.get("elbow");
wrist = hardwareMap.servo.get("wrist");
finger = hardwareMap.servo.get("finger");
wristation = hardwareMap.servo.get("wristation");
ServoControllerEx servoController = (ServoControllerEx) shoulder.getController();
int shoulderServoPort = shoulder.getPortNumber();
PwmControl.PwmRange shoulderPwmRange = new PwmControl.PwmRange(968, 1748);
servoController.setServoPwmRange(shoulderServoPort, shoulderPwmRange);
ServoControllerEx elbowController = (ServoControllerEx) elbow.getController();
int elbowServoPort = elbow.getPortNumber();
PwmControl.PwmRange elbowPwmRange = new PwmControl.PwmRange(700, 2300);
elbowController.setServoPwmRange(elbowServoPort, elbowPwmRange);
ServoControllerEx wristController = (ServoControllerEx) wrist.getController();
int wristServoPort = wrist.getPortNumber();
PwmControl.PwmRange wristPwmRange = new PwmControl.PwmRange(750, 2250);
wristController.setServoPwmRange(wristServoPort, wristPwmRange);
double shoulderInitialize = 1;
shoulder.setPosition(shoulderInitialize);
elbow.setPosition(1);
wrist.setPosition(1);
wristation.setPosition(1);
finger.setPosition(1);
arm = new Arm(shoulder, elbow, wrist, wristation, finger, shoulderInitialize, this);
telemetry.addData("shoulder", shoulder.getPosition());
telemetry.addData("elbow", elbow.getPosition());
telemetry.update();
}
@Override
public void loop() {
/*double shoulderPos = shoulder.getPosition();
if(gamepad1.dpad_down) {
shoulderPos -= .001;
} else if(gamepad1.dpad_up) {
shoulderPos += .001;
}
shoulder.setPosition(shoulderPos);
//shoulder.setPosition(1);
double elbowPos = elbow.getPosition();
if(gamepad1.a) {
elbowPos -= .005;
} else if(gamepad1.y) {
elbowPos += .005;
} else {}
elbow.setPosition(elbowPos);*/
arm.moveArm(gamepad1);
if (gamepad1.y) {
arm.moveOutOfWay();
} else if(gamepad1.a) {
arm.pullArmBack();
}
telemetry.addData("shoulder", shoulder.getPosition());
telemetry.addData("elbow", elbow.getPosition());
telemetry.update();
}
}
|
import 'dart:async';
import 'dart:io';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
import 'package:sqflite/sqflite.dart';
class CartDatabase {
static final CartDatabase _instance = CartDatabase._();
static Database _database;
CartDatabase._();
factory CartDatabase() {
return _instance;
}
Future<Database> get db async {
if (_database != null) {
return _database;
}
_database = await init();
return _database;
}
Future<Database> init() async {
Directory directory = await getApplicationDocumentsDirectory();
String dbPath = join(directory.path, 'database1.db');
var database = openDatabase(dbPath,
version: 1, onCreate: _onCreate, onUpgrade: _onUpgrade);
return database;
}
void _onCreate(Database db, int version) {
db.execute('''
CREATE TABLE cart(
itemId INTEGER)
''');
print("Database was created!");
}
void _onUpgrade(Database db, int oldVersion, int newVersion) {
// Run migration according database versions
}
Future<int> addToCart(int itemId) async {
var client = await db;
return client.rawInsert('INSERT INTO cart(itemID) VALUES ($itemId)');
}
Future<List<int>> fetchAll() async {
var client = await db;
var res = await client.query('cart');
List<int> itemIds = [];
if (res.isNotEmpty) {
res.forEach((element) {
itemIds.add(element['itemId']);
});
return itemIds;
}
return itemIds;
}
Future<void> removeFromCart(int itemId) async {
var client = await db;
return client.delete('cart', where: 'itemId = ?', whereArgs: [itemId]);
}
Future<void> emptyCart() async {
var client = await db;
return client.delete('cart');
}
Future closeDb() async {
var client = await db;
client.close();
}
}
|
package lacour.vincent.calculadora
import net.objecthunter.exp4j.ExpressionBuilder
import java.lang.Exception
class Evaluator(var label: String) {
fun addElement(element: Char): String {
this.label += element
return this.label
}
fun removeLastElement(): String {
this.label = label.dropLast(1)
return this.label
}
fun clear(): String {
this.label = ""
return this.label
}
fun eval(): String {
try {
val convertedLabel = this.label
.replace("÷", "/")
.replace("×", "*")
.replace("%", "*0.01")
val expression = ExpressionBuilder(convertedLabel).build()
val res: Double = expression.evaluate()
return res.toString()
} catch (e: Exception) {
return "Error"
} finally {
this.label = ""
}
}
} |
# frozen_string_literal: true
ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
require 'webmock/minitest'
require 'info_server'
require 'simplecov'
SimpleCov.start 'rails' do
add_filter 'Rakefile'
add_filter '.rake'
add_filter '/app/controllers/graphql_controller.rb'
add_filter '/app/controllers/overrides'
add_filter '/app/graphql/types'
add_filter '/app/graphql/subscriptions'
add_filter '/app/jobs/'
add_filter '/app/mailers/'
add_filter '/app/middleware/response_logger_middleware.rb'
add_filter '/app/models/application_record.rb'
add_filter '/bin/'
add_filter '/lib/scheduler.rb'
end
puts 'Starting SimpleCov'
require 'cancancan'
module ActiveSupport
class TestCase
include FactoryBot::Syntax::Methods
include ActionMailer::TestHelper
setup :database_fixture
INFO_SERVER_NAME = Rails.configuration.nonces['info_server_name']
def info_server_headers(method, path, payload)
next_nonce = InfoServer.current_nonce + 1
{ 'ACCESS-NONCE': next_nonce,
'ACCESS-SIGN': InfoServer.access_signature(
method,
path,
next_nonce,
payload
),
'CONTENT-TYPE': 'application/json' }
end
def auth_headers(eth_key)
user = User.find_by(address: eth_key.address.downcase)
challenge = create(:user_challenge, user_id: user.id)
params = {
address: user.address,
challenge_id: challenge.id,
signature: eth_key.personal_sign(challenge.challenge),
message: challenge.challenge
}
put authorization_path, params: params
::JSON.parse(@response.body)
.fetch('result', {})
.slice('access-token', 'client', 'uid')
end
def create_auth_user(**kwargs)
key = Eth::Key.new
user = create(:user, address: key.address, **kwargs)
[user, auth_headers(key), key]
end
def email_fixture
ActionMailer::Base.deliveries.clear
end
def database_fixture
ActiveRecord::Base.transaction do
Transaction.delete_all
CommentLike.delete_all
ProposalLike.delete_all
Proposal.delete_all
Comment.delete_all
CommentHierarchy.delete_all
Challenge.delete_all
Kyc.delete_all
Group.delete_all
User.delete_all
Nonce.delete_all
create(:server_nonce, server: Rails.configuration.nonces['info_server_name'])
create(:server_nonce, server: Rails.configuration.nonces['self_server_name'])
create(:group, name: Group.groups[:kyc_officer])
create(:group, name: Group.groups[:forum_admin])
end
end
def info_get(path, payload: {}, headers: {}, **kwargs)
info_path = "#{path}?payload=#{payload}"
info_headers = info_server_headers('GET', info_path, payload)
get(info_path,
headers: headers.merge(info_headers),
**kwargs)
end
def info_post(path, payload: {}, headers: {}, **kwargs)
info_headers = info_server_headers('POST', path, payload)
post(path,
params: { payload: payload }.to_json,
headers: headers.merge(info_headers),
env: { 'RAW_POST_DATA' => { payload: payload }.to_json },
**kwargs)
end
def info_put(path, payload: {}, headers: {}, **kwargs)
info_headers = info_server_headers('PUT', path, payload)
put(path,
params: { payload: payload }.to_json,
headers: headers.merge(info_headers),
env: { 'RAW_POST_DATA' => { payload: payload }.to_json },
**kwargs)
end
def info_delete(path, payload: {}, headers: {}, **kwargs)
info_headers = info_server_headers('DELETE', path, payload)
delete(path,
params: { payload: payload }.to_json,
headers: headers.merge(info_headers),
env: { 'RAW_POST_DATA' => { payload: payload }.to_json },
**kwargs)
end
def assert_self_nonce_increased
current_nonce = InfoServer.current_nonce
yield
assert_operator InfoServer.current_nonce, :>, current_nonce
end
end
end
|
FactoryGirl.define do
factory :delivery_organisation do
sequence(:natural_key) { |n| 'D%05d' % n }
name 'Government Delivery Organisation'
website 'http://example.com/government-delivery-organisation'
department
end
end
|
#!/usr/bin/env bash
#############################
# Include scripts
#############################
source /bootstrap/configuration.sh
source /bootstrap/environment.sh
source /bootstrap/semaphore-dependencies.sh
check_dependency "keystone"
#############################
# variables and environment
#############################
get_environment
SQL_SCRIPT=/bootstrap/keystone.sql
############################
# CONFIGURE KEYSTONE
############################
# llamada a la funcion del configuration.sh
re_write_file "/controller/keystone/keystone.conf" "/etc/keystone/"
fix_configs $SQL_SCRIPT
############################
# DATABASE BOOTSTRAP
############################
mkdir /etc/keystone/fernet-keys
chmod 0750 /etc/keystone/fernet-keys/
echo "xRFeIEUineSD9EnHlraby90RAxIkekN_ZdGNhdZ2u3M=">/etc/keystone/fernet-keys/0
if ! does_db_exist keystone; then
# create database keystone
mysql -uroot -p$MYSQL_ROOT_PASSWORD -h $MYSQL_HOST <$SQL_SCRIPT
# Populate the Identity service database
keystone-manage db_sync
# Initialize Fernet keys
#keystone-manage fernet_setup --keystone-user root --keystone-group root
mv /etc/keystone/default_catalog.templates /etc/keystone/default_catalog
# start keystone service and wait
uwsgi --http 0.0.0.0:35357 --wsgi-file $(which keystone-wsgi-admin) &
sleep 5
# Initialize account
export $OS_TOKEN=$ADMIN_TOKEN
openstack service create --name keystone --description "Openstack Identity" identity
openstack endpoint create --region $REGION identity public https://$KEYSTONE_OFUSCADO/v3
openstack endpoint create --region $REGION identity internal http://$KEYSTONE_HOSTNAME:5000/v3
openstack endpoint create --region $REGION identity admin http://$KEYSTONE_HOSTNAME:35357/v3
openstack domain create --description "Default Domain" default
openstack project create --domain default --description "Admin Project" admin
openstack project create --domain default --description "Service Project" services
openstack user create --domain default --password $ADMIN_PASSWORD admin
openstack role create admin
openstack role create user
openstack role add --project admin --user admin admin
unset $OS_TOKEN
fi
#############################
# Write openrc to disk
#############################
cat >~/openrc <<EOF
export OS_PROJECT_DOMAIN_NAME=default
export OS_USER_DOMAIN_NAME=default
export OS_PROJECT_NAME=admin
export OS_USERNAME=admin
export OS_PASSWORD=$ADMIN_PASSWORD
export OS_AUTH_URL=http://$KEYSTONE_HOSTNAME:35357/v3
export OS_IDENTITY_API_VERSION=3
export OS_IMAGE_API_VERSION=2
export OS_INTERFACE=internal
EOF
#############################
# reboot services
#############################
pkill uwsgi
sleep 5
uwsgi --http 0.0.0.0:5000 --wsgi-file $(which keystone-wsgi-public) &
sleep 5
uwsgi --http 0.0.0.0:35357 --wsgi-file $(which keystone-wsgi-admin)
|
using System.Collections.Generic;
using System.Threading.Tasks;
using Newtonsoft.Json.Linq;
namespace Strive.Core.Services.Permissions
{
public interface IPermissionLayersAggregator
{
ValueTask<Dictionary<string, JValue>> FetchAggregatedPermissions(Participant participant);
ValueTask<List<PermissionLayer>> FetchParticipantPermissionLayers(Participant participant);
}
}
|
package org.dbtools.licensemanager
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.Optional
import javax.inject.Inject
/**
* Extension for LicenseManager plugin
*/
open class LicenseManagerExtension @Inject constructor(
defaultOutputDir: String
) {
@get:Optional
@get:Input
var excludeArtifactIds: List<String> = emptyList()
@get:Optional
@get:Input
var excludeGroups: List<String> = emptyList()
/**
* Default output directory
*/
@get:Optional
@get:Input
var outputDirs: List<String> = listOf(defaultOutputDir)
/**
* Name of file without extension
*/
@get:Input
var outputFilename: String = "licenses"
@get:Optional
@get:Input
var variantName: String? = null
@get:Input
var createHtmlReport: Boolean = true
@get:Input
var createJsonReport: Boolean = false
@get:Input
var createCsvReport: Boolean = false
} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by Entitas.CodeGeneration.Plugins.ComponentLookupGenerator.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
public static class GameComponentsLookup {
public const int Asset = 0;
public const int AssetLoaded = 1;
public const int Block = 2;
public const int Color = 3;
public const int ColorListener = 4;
public const int Combo = 5;
public const int Destroyed = 6;
public const int Element = 7;
public const int ElementType = 8;
public const int Exsplosive = 9;
public const int FieldMoved = 10;
public const int GameDestroyedListener = 11;
public const int Id = 12;
public const int InCombo = 13;
public const int Matched = 14;
public const int Movable = 15;
public const int Position = 16;
public const int PositionListener = 17;
public const int RestartHappened = 18;
public const int Reward = 19;
public const int Selected = 20;
public const int SelectedListener = 21;
public const int SelectedRemovedListener = 22;
public const int SelectionId = 23;
public const int TotalComponents = 24;
public static readonly string[] componentNames = {
"Asset",
"AssetLoaded",
"Block",
"Color",
"ColorListener",
"Combo",
"Destroyed",
"Element",
"ElementType",
"Exsplosive",
"FieldMoved",
"GameDestroyedListener",
"Id",
"InCombo",
"Matched",
"Movable",
"Position",
"PositionListener",
"RestartHappened",
"Reward",
"Selected",
"SelectedListener",
"SelectedRemovedListener",
"SelectionId"
};
public static readonly System.Type[] componentTypes = {
typeof(AssetComponent),
typeof(AssetLoadedComponent),
typeof(BlockComponent),
typeof(ColorComponent),
typeof(ColorListenerComponent),
typeof(ComboComponent),
typeof(DestroyedComponent),
typeof(ElementComponent),
typeof(ElementTypeComponent),
typeof(ExsplosiveComponent),
typeof(FieldMovedComponent),
typeof(GameDestroyedListenerComponent),
typeof(IdComponent),
typeof(InComboComponent),
typeof(MatchedComponent),
typeof(MovableComponent),
typeof(PositionComponent),
typeof(PositionListenerComponent),
typeof(RestartHappenedComponent),
typeof(RewardComponent),
typeof(SelectedComponent),
typeof(SelectedListenerComponent),
typeof(SelectedRemovedListenerComponent),
typeof(SelectionIdComponent)
};
}
|
--FILE--
<?php
require \implode(\DIRECTORY_SEPARATOR, [__DIR__, '..', '..', 'vendor', 'autoload.php']);
function throwException()
{
defer($_, function () {
echo "after exception\n";
});
echo "before exception\n";
throw new \Exception('My exception');
}
try {
throwException();
} catch (\Exception $e) {
echo "exception has been caught\n";
}
?>
--EXPECT--
before exception
after exception
exception has been caught
|
# frozen_string_literal: true
#
# A presenter which, for a patient, displays any HD preferences (for example when or where they
# have HD) that do not match their current HD profile.
#
require_dependency "renalware/hd"
module Renalware
module HD
class UnmetPreferencesPresenter
include ActionView::Helpers
COMMON_ATTRIBUTES = %i(schedule_definition other_schedule hospital_unit).freeze
delegate(*COMMON_ATTRIBUTES, to: :hd_preference_set, prefix: :preferred, allow_nil: true)
delegate(*COMMON_ATTRIBUTES, to: :hd_profile, prefix: :current, allow_nil: true)
delegate(:notes, :entered_on, to: :hd_preference_set)
delegate(:to_s, :to_param, :hd_profile, :hd_preference_set, to: :patient)
def initialize(patient)
@patient = patient
end
# Returns the HD::PreferenceSet setting if it differs from that in the HD::Profile
# If the preference is unmet, wrap in a <b> tag. Yield the value so the template
# has a chance to format it before it is wrapped.
def preferred(attribute)
value = public_send(:"preferred_#{attribute}")
value = yield(value) if block_given?
return value if preference_satisfied?(attribute)
tag.b(value)
end
def preference_satisfied?(attribute)
preferred = public_send(:"preferred_#{attribute}")
current = public_send(:"current_#{attribute}")
return true if preferred.blank? || preferred == current
false
end
private
attr_reader :patient
end
end
end
|
const evaluate = require("../src/evaluator");
describe("Evaluating primitives", () => {
test("It should properly evaluate a number", () => {
expect(evaluate(JSON.stringify(42))).toEqual(42);
expect(evaluate(JSON.stringify(3.1415))).toEqual(3.1415);
});
test("It should properly evaluate a string", () => {
expect(evaluate(JSON.stringify("hello"))).toEqual("hello");
});
test("It should properly evaluate a boolean", () => {
expect(evaluate(JSON.stringify(true))).toEqual(true);
expect(evaluate(JSON.stringify(false))).toEqual(false);
});
test("It should properly evaluate null", () => {
expect(evaluate(JSON.stringify(null))).toEqual(null);
});
});
describe("Evaluating arrays", () => {
test("Evaluate an empty array", () => {
expect(evaluate(JSON.stringify([]))).toEqual([]);
});
test("Evaluate one-dimensional arrays", () => {
expect(evaluate(JSON.stringify([1, 2, 3]))).toEqual([1, 2, 3]);
expect(evaluate(JSON.stringify(["a", "b", "c"]))).toEqual(["a", "b", "c"]);
});
test("Evaluate two-dimensional arrays", () => {
const json = JSON.stringify([
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
]);
const arr = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
expect(evaluate(json)).toEqual(arr);
});
});
describe("Evaluating objects", () => {
test("Evaluate an empty object", () => {
expect(evaluate(JSON.stringify({}))).toEqual({});
});
test("Evaluate a simple object", () => {
const json = JSON.stringify({ a: "hi", b: 42 });
expect(evaluate(json)).toEqual({ a: "hi", b: 42 });
});
test("Evaluate an object with an array for a value", () => {
const json = JSON.stringify({ arr: [1, 2, 3], test: "hello" });
expect(evaluate(json)).toEqual({ arr: [1, 2, 3], test: "hello" });
});
test("Evaluate nested objects", () => {
const json = JSON.stringify({
obj: { x: 47, y: 42 },
a: true,
hi: "there",
});
expect(evaluate(json)).toEqual({
obj: { x: 47, y: 42 },
a: true,
hi: "there",
});
});
test("Evaluate a complex object", () => {
const json = JSON.stringify({
obj: {
nested: true,
obj2: { desc: "This is a nested nested object" },
arr: [1, 2, 3, 4],
},
nil: null,
});
expect(evaluate(json)).toEqual({
obj: {
nested: true,
obj2: { desc: "This is a nested nested object" },
arr: [1, 2, 3, 4],
},
nil: null,
});
});
});
|
-- begin SAMPLER_CUSTOMER
create table SAMPLER_CUSTOMER (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(50) not null,
LAST_NAME varchar(100) not null,
AGE integer,
ACTIVE boolean not null,
GRADE integer,
AVATAR_ID varchar(36),
--
primary key (ID)
)^
-- end SAMPLER_CUSTOMER
-- begin SAMPLER_ORDER
create table SAMPLER_ORDER (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
CUSTOMER_ID varchar(36),
DATE_ date not null,
AMOUNT decimal(19, 2),
DESCRIPTION varchar(255),
--
primary key (ID)
)^
-- end SAMPLER_ORDER
-- begin SAMPLER_TASK
create table SAMPLER_TASK (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(255) not null,
DUE_DATE timestamp,
ASSIGNEE_ID varchar(36),
PARENT_TASK_ID varchar(36),
--
primary key (ID)
)^
-- end SAMPLER_TASK
-- begin SAMPLER_PRODUCT
create table SAMPLER_PRODUCT (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(255) not null,
PRICE decimal(19, 2) not null,
--
primary key (ID)
)^
-- end SAMPLER_PRODUCT
-- begin SAMPLER_INVOICE_ITEM
create table SAMPLER_INVOICE_ITEM (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
PRODUCT_ID varchar(36),
QUANTITY decimal(19, 3),
ORDER_ID varchar(36),
--
primary key (ID)
)^
-- end SAMPLER_INVOICE_ITEM
-- begin SAMPLER_DATE_VALUE
create table SAMPLER_DATE_VALUE (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
DATE_ date not null,
VALUE_ integer not null,
--
primary key (ID)
)^
-- end SAMPLER_DATE_VALUE
-- begin SAMPLER_TRANSPORT_COUNT
create table SAMPLER_TRANSPORT_COUNT (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
YEAR_ integer not null,
CARS integer not null,
MOTORCYCLES integer not null,
BICYCLES integer not null,
--
primary key (ID)
)^
-- end SAMPLER_TRANSPORT_COUNT
-- begin SAMPLER_INCOME_EXPENSES
create table SAMPLER_INCOME_EXPENSES (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
YEAR_ integer not null,
INCOME double precision not null,
EXPENSES double precision not null,
ALPHA double precision,
DASH_LENGTH_LINE integer,
DASH_LENGTH_COLUMN integer,
ADDITIONAL varchar(255),
--
primary key (ID)
)^-- end SAMPLER_INCOME_EXPENSES
-- begin SAMPLER_COUNTRY_GROWTH
create table SAMPLER_COUNTRY_GROWTH (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
COUNTRY varchar(255) not null,
FLAG varchar(255),
YEAR2014 double precision not null,
YEAR2015 double precision not null,
--
primary key (ID)
)^
-- end SAMPLER_COUNTRY_GROWTH
-- begin SAMPLER_POINT_PAIR
create table SAMPLER_POINT_PAIR (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
AX double precision not null,
AY double precision not null,
BX double precision not null,
BY_ double precision not null,
--
primary key (ID)
)^
-- end SAMPLER_POINT_PAIR
-- begin SAMPLER_COUNTRY_LITRES
create table SAMPLER_COUNTRY_LITRES (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
COUNTRY varchar(255) not null,
LITRES double precision not null,
--
primary key (ID)
)^
-- end SAMPLER_COUNTRY_LITRES
-- begin SAMPLER_TITLE_VALUE
create table SAMPLER_TITLE_VALUE (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
TITLE varchar(255) not null,
VALUE_ integer not null,
--
primary key (ID)
)^
-- end SAMPLER_TITLE_VALUE
-- begin SAMPLER_SEGMENT
create table SAMPLER_SEGMENT (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
START_ integer,
DURATION integer,
COLOR varchar(255),
TASK_ varchar(255),
TASK_SPAN_ID varchar(36),
INDEX_ integer,
--
primary key (ID)
)^
-- end SAMPLER_SEGMENT
-- begin SAMPLER_TASK_SPAN
create table SAMPLER_TASK_SPAN (
ID varchar(36) not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
VERSION integer,
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
CATEGORY varchar(255),
--
primary key (ID)
)^
-- end SAMPLER_TASK_SPAN
-- begin SAMPLER_DATE_TASK_SPAN
create table SAMPLER_DATE_TASK_SPAN (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
CATEGORY varchar(255),
--
primary key (ID)
)^
-- end SAMPLER_DATE_TASK_SPAN
-- begin SAMPLER_DATE_SEGMENT
create table SAMPLER_DATE_SEGMENT (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
START_ date,
END_ date,
COLOR varchar(255),
TASK_ varchar(255),
TASK_SPAN_ID varchar(36),
--
primary key (ID)
)^
-- end SAMPLER_DATE_SEGMENT
-- begin SAMPLER_CALENDAR_EVENT
create table SAMPLER_CALENDAR_EVENT (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
CAPTION varchar(255),
DESCRIPTION varchar(255),
START_DATE timestamp,
END_DATE timestamp,
STYLENAME varchar(255),
--
primary key (ID)
)^
-- end SAMPLER_CALENDAR_EVENT
-- begin SAMPLER_POINT_VALUE
create table SAMPLER_POINT_VALUE (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
X double precision,
Y double precision,
VALUE_ integer,
--
primary key (ID)
)^
-- end SAMPLER_POINT_VALUE
-- begin SAMPLER_COLOR
create table SAMPLER_COLOR (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(255),
HEX varchar(255),
--
primary key (ID)
)^
-- end SAMPLER_COLOR
-- begin SAMPLER_TIP_INFO
create table SAMPLER_TIP_INFO (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
TOTAL_BILL decimal(19, 2) not null,
TIP decimal(19, 2) not null,
SMOKER boolean not null,
SIZE_ integer not null,
SEX varchar(50) not null,
DAY_ varchar(50) not null,
TIME_ varchar(50) not null,
--
primary key (ID)
)^
-- end SAMPLER_TIP_INFO
-- begin SAMPLER_COUNTRY_SALES
create table SAMPLER_COUNTRY_SALES (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
PRODUCT varchar(255),
CATEGORY varchar(255),
COUNTRY varchar(255),
SALES integer,
EXPENSE integer,
--
primary key (ID)
)^
-- end SAMPLER_COUNTRY_SALES
-- begin SAMPLER_EMPLOYEE
create table SAMPLER_EMPLOYEE (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(255),
LAST_NAME varchar(255),
AGE integer,
DEPARTMENT integer,
EXPERIENCE integer,
SALARY decimal(19, 2),
--
primary key (ID)
)^
-- end SAMPLER_EMPLOYEE
-- begin SAMPLER_LOCATION
create table SAMPLER_LOCATION (
ID varchar(36) not null,
VERSION integer not null,
CREATE_TS timestamp,
CREATED_BY varchar(50),
UPDATE_TS timestamp,
UPDATED_BY varchar(50),
DELETE_TS timestamp,
DELETED_BY varchar(50),
--
NAME varchar(255),
POINT VARCHAR(100),
INTENSITY double precision,
--
primary key (ID)
)^
-- end SAMPLER_LOCATION
|
import {
generateId,
ADD_CARD_SUCCESS,
UPDATE_CARD_SUCCESS,
UPDATE_SELECTED_DECK,
GOTO_NEXT_CARD_INDEX,
GOTO_CARD_INDEX
} from '../actions'
import { combineReducers } from 'redux'
function decksReducer(decks = {}, action) {
if (action.type === ADD_CARD_SUCCESS) {
let card = action.payload.card
if (decks[card.deck]) {
let updatedCardList = [...decks[card.deck].cards, card]
let updatedDeck = Object.assign({}, decks[card.deck], {cards: updatedCardList})
return Object.assign({}, decks, {[card.deck]: updatedDeck})
} else {
// Initialize a new deck with an array of 1 card
let newDeck = {
cards: [card],
currentCardIndex: 0
}
return Object.assign({}, decks, {[card.deck]: newDeck})
}
}
if (action.type === UPDATE_CARD_SUCCESS) {
let card = action.payload.card
let updatedCards = decks[card.deck].cards.map((existingCard) => {
if (existingCard.id === card.id) {
return Object.assign({}, existingCard, card)
} else {
return existingCard
}
})
let updatedDeck = Object.assign({}, decks[card.deck], {cards: updatedCards})
return Object.assign({}, decks, {[action.payload.card.deck]: updatedDeck})
}
if (action.type === GOTO_NEXT_CARD_INDEX) {
let deckId = action.payload.deckId
let currentCardIndex = decks[deckId].currentCardIndex
let wrongOrUnansweredCards = decks[deckId].cards.filter((card) => {
// TODO: Here is where we could search for all the wrong or unanswered
// cards so that the next card we go to is not just one that has already
// been answered correctly. If ALL cards have been answered correctly then
// this should just go back to the very first card in the list and start
// all over again. See (possibly)
// http://rackt.org/redux/docs/recipes/ComputingDerivedData.html
})
let nextIndex = (decks[deckId].currentCardIndex + 1) % decks[deckId].cards.length
let updatedDeck = Object.assign({}, decks[deckId], {currentCardIndex: nextIndex})
return Object.assign({}, decks, {[deckId]: updatedDeck})
}
if (action.type === GOTO_CARD_INDEX) {
let deckId = action.payload.deckId
// In case cardIndex is out of range, take the mod of the number of cards
let nextIndex = action.payload.cardIndex % decks[deckId].cards.length
let updatedDeck = Object.assign({}, decks[deckId], {currentCardIndex: nextIndex})
return Object.assign({}, decks, {[deckId]: updatedDeck})
}
// Assert: nothing to do
return decks
}
function selectedDeckReducer(selectedDeck = null, action) {
if (action.type === UPDATE_SELECTED_DECK) {
return action.payload
}
return selectedDeck
}
const rootReducer = combineReducers({
decks: decksReducer,
selectedDeck: selectedDeckReducer
})
export default rootReducer
|
from typing import List
from functools import partial
import torch
from torch import nn
from catalyst.metrics.functional import dice
class DiceLoss(nn.Module):
"""The Dice loss.
DiceLoss = 1 - dice score
dice score = 2 * intersection / (intersection + union)) = \
= 2 * tp / (2 * tp + fp + fn)
"""
def __init__(
self,
class_dim: int = 1,
mode: str = "macro",
weights: List[float] = None,
eps: float = 1e-7,
):
"""
Args:
class_dim: indicates class dimention (K) for
``outputs`` and ``targets`` tensors (default = 1)
mode: class summation strategy. Must be one of ['micro', 'macro',
'weighted']. If mode='micro', classes are ignored, and metric
are calculated generally. If mode='macro', metric are
calculated per-class and than are averaged over all classes.
If mode='weighted', metric are calculated per-class and than
summed over all classes with weights.
weights: class weights(for mode="weighted")
eps: epsilon to avoid zero division
"""
super().__init__()
assert mode in ["micro", "macro", "weighted"]
self.loss_fn = partial(
dice,
eps=eps,
class_dim=class_dim,
threshold=None,
mode=mode,
weights=weights,
)
def forward(self, outputs: torch.Tensor, targets: torch.Tensor) -> torch.Tensor:
"""Calculates loss between ``logits`` and ``target`` tensors."""
dice_score = self.loss_fn(outputs, targets)
return 1 - dice_score
__all__ = ["DiceLoss"]
|
extern crate bio;
mod pairwise;
use pyo3::prelude::*;
use pyo3::wrap_pyfunction;
use bio::alignment::distance;
use crate::pairwise::{Aligner, Alignment};
#[pyfunction]
fn levenshtein(a: &str, b: &str) -> PyResult<u32> {
Ok(distance::levenshtein(a.as_bytes(), b.as_bytes()))
}
#[pymodule]
fn pyrustbio(_py: Python, m: &PyModule) -> PyResult<()> {
let pairwise_module = PyModule::new(_py, "pairwise")?;
pairwise_module.add_class::<Aligner>()?;
pairwise_module.add_class::<Alignment>()?;
let distance_module = PyModule::new(_py, "distance")?;
distance_module
.add_function(wrap_pyfunction!(levenshtein, distance_module)?)
.unwrap();
m.add_submodule(pairwise_module)?;
m.add_submodule(distance_module)?;
Ok(())
}
|
package com.j0rsa.cracker.tracker.service
import arrow.core.Either
import arrow.core.Right
import arrow.core.Left
import com.j0rsa.cracker.tracker.NotFound
import com.j0rsa.cracker.tracker.TrackerError
import com.j0rsa.cracker.tracker.handler.TagRow
import com.j0rsa.cracker.tracker.model.*
import com.j0rsa.cracker.tracker.repository.ActionRepository
import com.j0rsa.cracker.tracker.repository.HabitRepository
import com.j0rsa.cracker.tracker.repository.TagRepository
import com.j0rsa.cracker.tracker.repository.UserRepository
import org.jetbrains.exposed.sql.*
object TagService {
fun findAll(userId: UserId) = TagRepository.findAll(
userId
)
.map { it.toRow() }
fun createTagIfNotExist(user: User) = { tag: TagRow ->
TagRepository.findOneForUser(tag.name, user.idValue())
?: (TagRepository.findOne(tag.name)
?.also(addUserToTag(user))
?: Tag.new {
name = tag.name
users = SizedCollection(listOf(user))
})
}
fun update(userId: UserId, tag: TagRow): Either<TrackerError, TagRow> =
TagRepository.findOneByIdForUser(tag.id, userId)?.let { oldTag ->
val user = UserRepository.findOne(userId)!!
val newTag = createTagIfNotExist(user)(tag)
updateHabitTags(oldTag, newTag)
updateActionTags(oldTag, newTag)
oldTag.delete()
Right(newTag.toRow())
} ?: Left(NotFound)
private fun updateHabitTags(tag: Tag, newTag: Tag) {
val recordsToAddNewTag =
HabitRepository.findAllWithOneTagWithoutAnother(
tag.idValue(),
newTag.idValue()
)
recordsToAddNewTag.map { it.tags = SizedCollection(it.tags + newTag) }
}
private fun updateActionTags(tag: Tag, newTag: Tag) {
val recordsToAddNewTag =
ActionRepository.findAllWithOneTagWithoutAnother(
tag.idValue(),
newTag.idValue()
)
recordsToAddNewTag.map { it.tags = SizedCollection(it.tags + newTag) }
}
fun createTagsIfNotExist(user: User, tags: List<TagRow>) = tags.map(
createTagIfNotExist(user)
)
fun createTagsIfNotExist(userId: UserId, tags: List<TagRow>) =
createTagsIfNotExist(
UserRepository.findOne(
userId
)!!, tags
)
private fun addUserToTag(user: User) = { foundTag: Tag ->
foundTag.users = SizedCollection(foundTag.users + user)
}
} |
<?php
namespace App\Http\Controllers\Front\Blog;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use App\Models\Admin\WebsiteInfo;
class BlogController extends Controller
{
public function index(){
$model = new WebsiteInfo();
$model_data = $model->wherepermalink('blog')->wherestatus('0')->get();
return view('front.blogs.blogs',compact('model_data'));
}
public function blog_detail($id){
$model = new WebsiteInfo();
$model_data = $model->whereid($id)->get();
$blogs = $model->wherepermalink('blog')->wherestatus('0')->inRandomOrder()->limit(20)->get();
return view('front.blogs.blog_detail',compact('model_data','blogs'));
}
}
|
class CreatePalettes < ActiveRecord::Migration
def change
create_table :palettes do |t|
t.string :palette_name
t.string :color1
t.string :color2
t.string :color3
t.string :color4
t.string :color5
t.timestamps null: false
end
end
end
|
var expect = require('chai').expect;
var sinon = require('sinon');
module.exports = function(Bookshelf) {
describe('Model Registry', function() {
before(function() {
this._relation = sinon.spy(Bookshelf.Model.prototype, '_relation');
this.morphTo = sinon.spy(Bookshelf.Model.prototype, 'morphTo');
});
after(function() {
this._relation.restore();
this.morphTo.restore();
});
beforeEach(function () {
this._relation.reset();
this.morphTo.reset();
});
before(function() {
Bookshelf.plugin('registry');
});
describe('Registering Models', function() {
beforeEach(function() {
Bookshelf._models = {};
this.Model = Bookshelf.Model.extend({
tableName: 'records'
});
this.ModelObj = Bookshelf.model('Model', this.Model);
});
it('returns the registered model', function() {
expect(this.ModelObj).to.equal(this.Model);
});
it('assigns the model the name', function() {
expect(Bookshelf.model('Model')).to.equal(this.Model);
});
it('assigns the tableName', function() {
expect(Bookshelf.model('Model').prototype.tableName).to.equal('records');
});
it('throws when there is a name conflict', function() {
expect(Bookshelf.model.bind(Bookshelf, 'Model', Bookshelf.Model)).to.throw();
});
});
describe('Registering Models with plain object', function() {
var noop = function() {};
beforeEach(function() {
Bookshelf._models = {};
this.Model = Bookshelf.model('Model', {
tableName: 'records'
}, {
noop: noop
});
});
it('assigns the model the name', function() {
expect(Bookshelf.model('Model')).to.equal(this.Model);
});
it('assigns the tableName', function() {
expect(Bookshelf.model('Model').prototype.tableName).to.equal('records');
});
it('assigns static props', function() {
expect(Bookshelf.model('Model').noop).to.equal(noop);
});
it('throws when there is a name conflict', function() {
expect(Bookshelf.model.bind(Bookshelf, 'Model', Bookshelf.Model)).to.throw();
});
});
describe('Registering Collections', function() {
beforeEach(function() {
Bookshelf._collections = {};
this.Collection = Bookshelf.Collection.extend({
property: {}
});
this.collection = Bookshelf.collection('Collection', this.Collection);
});
it('returns the registered collection', function() {
expect(this.collection).to.equal(this.Collection);
});
it('gives the collection a name', function() {
expect(Bookshelf.collection('Collection')).to.equal(this.Collection);
});
it('throws when there is a name conflict', function() {
expect(Bookshelf.collection.bind(Bookshelf, 'Collection', Bookshelf.Collection)).to.throw();
});
});
describe('Custom Relations', function() {
beforeEach(function() {
var related = this.relatedModel = Bookshelf.model('Related', Bookshelf.Model.extend({
tableName: 'related'
}));
this.relatedCollection = Bookshelf.collection('CRelated', Bookshelf.Collection.extend({
property: {}
}));
var Model = Bookshelf.Model.extend({
_hasOne: function() {
return this.hasOne('Related');
},
_normalHasOne: function() {
return this.hasOne(related);
},
_hasMany: function() {
return this.hasMany('CRelated');
},
_morphTo: function() {
return this.morphTo('morphable', 'Related', 'Related');
},
throughTest: function() {
return this.hasMany('CRelated').through('Related');
}
});
this.model = new Model();
});
afterEach(function () {
delete Bookshelf._models;
delete Bookshelf._collections;
});
it('resolves a string name to a model', function() {
expect(this.model._hasOne().relatedData.target).to.equal(this.relatedModel);
});
it('falls back to a collection if no model is found', function() {
expect(this.model._hasMany().relatedData.target).to.equal(this.relatedCollection);
});
it('can still accept a model constructor', function() {
expect(this.model._normalHasOne().relatedData.target).to.equal(this.relatedModel);
});
it('applies the resolved model to the original method', function() {
this.model._hasOne();
expect(this._relation).to.have.been.calledWith('hasOne', this.relatedModel);
});
it('allows for *-through relations', function() {
var relation = this.model.throughTest();
expect(relation.relatedData.throughTableName).to.equal('related');
});
describe('morphTo', function() {
it('resolves all arguments', function() {
// Wrap in a try/catch because Bookshelf actually
// evalautes morph targets and we don't care that the
// target is not a valid morph model
try {
this.model._morphTo();
} catch (e) {
expect(this.morphTo).to.have.been.calledWith('morphable', this.relatedModel, this.relatedModel);
}
});
});
});
describe('bookshelf.resolve', function() {
it('resolves the path to a model with a custom function', function() {
var one = Bookshelf.Model.extend({});
var two = Bookshelf.Model.extend({});
Bookshelf.resolve = function(name) {
return (name === 'one' ? one : name === 'two' ? two : void 0);
};
expect(Bookshelf.model('one')).to.equal(one);
expect(Bookshelf.model('two')).to.equal(two);
expect(Bookshelf.model('three')).to.equal(void 0);
});
});
});
};
|
// IGNORE_BACKEND: JS_IR
// EXPECTED_REACHABLE_NODES: 1110
package foo
fun box(): String {
var b: Byte = 0x7F
b++
if (b.toInt() != -0x80) return "fail1a: $b"
b--
if (b.toInt() != 0x7F) return "fail1b: $b"
var s: Short = 0x7FFF
s++
if (s.toInt() != -0x8000) return "fail2a: $s"
s--
if (s.toInt() != 0x7FFF) return "fail2b: $s"
var i: Int = 0x7FFFFFFF
i++
if (i != -0x80000000) return "fail3a: $i"
i--
if (i != 0x7FFFFFFF) return "fail3b: $i"
return "OK"
} |
docker_id="openmcp"
imagename="keti-coap-generator"
version="v1.1"
# make image
docker build -t $docker_id/$imagename:$version . && \
# push image
docker push $docker_id/$imagename:$version
|
part of 'images_bloc.dart';
abstract class ImagesState extends Equatable {
final String query;
const ImagesState(this.query);
}
class ImagesInitial extends ImagesState {
const ImagesInitial() : super("");
@override
List<Object> get props => [];
}
class ImagesLoading extends ImagesState {
const ImagesLoading({required String query}) : super(query);
@override
List<Object> get props => [];
}
class ImagesLoaded extends ImagesState {
final List<ImageData> images;
const ImagesLoaded({required this.images, required String query})
: super(query);
@override
List<Object?> get props => [images];
}
class ImagesError extends ImagesState {
const ImagesError({required String query}) : super(query);
@override
List<Object> get props => [];
}
|
package org.bouncycastle.cavp.test;
public interface DigestProcessor
{
void update(byte[] msg);
byte[] digest();
}
|
#!/usr/bin/env python3
from typing import List, Tuple
import re
import sys
from PIL import Image
import hashlib
in_image_dir = sys.argv[1]
out_image_dir = sys.argv[2]
parse_ocr_re = re.compile(r'^([^:]+):(\d+),(\d+),(\d+),(\d+):(.*)$')
page_limit = 24
def unquote(t):
return t.replace('_', ' ')
def clean(t):
return t.rstrip(' ')
def parse_ocr_item(item: str) -> Tuple[str, Tuple[int, int, int, int], str]:
m = parse_ocr_re.search(item)
if m:
return (m.group(1), (int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5))), unquote(m.group(6)))
else:
raise "???"
def parse_ocr(line: str) -> List[Tuple[str, Tuple[int, int, int, int], str]]:
if line == '':
return []
return [parse_ocr_item(item) for item in line.split()]
def string_hash(s):
return hashlib.md5(s.encode('utf-8')).hexdigest()
def item_hash(doc_id, pair):
ix, _ = pair
return string_hash(f'{doc_id}-ix')
def new_tuple(t, ix, v):
l = list(t)
l[ix] = v
return tuple(l)
def process_file(png_file, expected):
exp = parse_ocr(expected)
doc_id = png_file
doc_id = re.sub(r'\.png$', '', doc_id)
bbox_id = 1
img = Image.open(f'{in_image_dir}/{png_file}')
width, height = img.size
if page_limit is not None:
exp = [p[1] for p in sorted(
(sorted(enumerate(exp), key=lambda p: item_hash(doc_id, p)))[0:page_limit],
key=lambda p: p[0])]
for _, area, content in exp:
image_ok = True
if area[2] > width:
image_ok = False
if area[3] > height:
image_ok = False
if image_ok:
cropped_img = img.crop(area)
out_png = f'{out_image_dir}/{doc_id}-{bbox_id}.png'
cropped_img.save(out_png)
bbox_id += 1
print('\t'.join([out_png, clean(content)]))
else:
print(f'Something wrong with {doc_id}', file=sys.stderr)
for line in sys.stdin:
line = line.rstrip('\n')
png_file, expected = line.split('\t')
process_file(png_file, expected)
|
define(["require", "exports", "SeedModules.AngularUI/modules/boot"], function (require, exports, boot) {
"use strict";
exports.__esModule = true;
var settings = JSON.parse(document.getElementById('seed-ui').getAttribute('data-site'));
var ngTableDefaults = {
options: {},
schema: {},
params: {
count: settings.pageSize
},
settings: {
counts: settings.pageCounts.split(/[,?]/)
}
};
boot.value('SeedModules.AngularUI/modules/configs/ngTableDefaults', ngTableDefaults);
});
//# sourceMappingURL=ngTableDefaults.js.map |
package org.devshred.tracks.waypoints
import org.devshred.tracks.utils.getCoordinatesFromGoogleMapsLink
import org.devshred.tracks.waypoints.PoiType.FOOD
open class Buffet(coordinates: Coordinates) : CustomPointOfInterest(coordinates, name = "Buffet", type = FOOD) {
companion object {
fun fromRow(row: List<Any>): Buffet {
if ((row[10] as String).isEmpty()) return EmptyBuffet
return Buffet(getCoordinatesFromGoogleMapsLink(row[10] as String))
}
}
fun isEmptyBuffet() = this === EmptyBuffet
}
object EmptyBuffet : Buffet(coordinatesFromDouble(0.0, 0.0))
|
package path
import "testing"
var PrefixPairs = []struct {
A string
B string
}{
{"", ""},
{"foo", ""},
{"", "foo"},
{"foobar", "foobar"},
{"abcd", "abef"},
}
func BenchmarkCompareAfterPrefix(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, pair := range PrefixPairs {
CompareAfterPrefix(pair.A, pair.B)
}
}
}
func BenchmarkCommonPrefixLen(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, pair := range PrefixPairs {
CommonPrefixLen(pair.A, pair.B)
}
}
}
func BenchmarkCompareIgnoringPrefix(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, pair := range PrefixPairs {
CompareIgnoringPrefix(pair.A, pair.B)
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.