_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
5942f414236232f42895c82a555e6716db1a1c86907267f3da68777ea5b8c459 | feeley/etos | rt-gambit.scm | ; File: "rt-gambit.scm"
Copyright ( C ) 1999 , , , All Rights Reserved .
RunTime library for EtoS - Gambit version
(declare
(standard-bindings)
(extended-bindings)
(block)
(not safe)
(inlining-limit 900)
( inlining - limit 300 )
)
(##define-macro (gensymbol . rest)
`(string->symbol (string-append "$" (symbol->string (gensym ,@rest)))))
(##define-macro (continuation-save! v proc)
`(let ((v ,v) (proc ,proc))
(continuation-capture
(lambda (cont)
(vector-set! v 0 cont)
(proc v)))))
(##define-macro (continuation-restore v val)
`(continuation-return (vector-ref ,v 0) ,val))
(##define-macro (erl-fix? x) `(##fixnum? ,x))
(##define-macro (erl-sub? x) `(##subtyped? ,x))
(##define-macro (erl-spc? x) `(##special? ,x))
(##define-macro (erl-con? x) `(##pair? ,x))
; We can assume here that (erl-sub? x) is true...
(##define-macro (erl-big? x) `(##subtyped.bignum? ,x))
(##define-macro (erl-flo? x) `(##subtyped.flonum? ,x))
(##define-macro (erl-ato? x) `(##subtyped.symbol? ,x))
(##define-macro (erl-vec? x) `(##subtyped.vector? ,x))
(##define-macro (erl-chr? x) `(##char? ,x))
(##define-macro (erl-nil? x) `(##null? ,x))
(##define-macro (erl-int? x)
`(let ((a ,x)) (or (erl-fix? a) (and (erl-sub? a) (erl-big? a)))))
(##define-macro (erl-num? x)
`(let ((a ,x)) (or (erl-fix? a)
(and (erl-sub? a) (or (erl-flo? a) (erl-big? a))))))
(##define-macro (big.= x y) `(##bignum.= ,x ,y))
(##define-macro (erl-big=k x k) `(big.= ,x ,k))
(##define-macro (flo.= x y) `(##flonum.= ,x ,y))
(##define-macro (erl-flo=k x k) `(flo.= ,x ,k))
(##define-macro (fix.= x y) `(##fixnum.= ,x ,y))
(##define-macro (erl-fix=k x k) `(fix.= ,x ,k))
(##define-macro (ato.= x y) `(eq? ,x ,y))
(##define-macro (erl-ato=k x k) `(ato.= ,x ,k))
(##define-macro (chr.= x y) `(eq? ,x ,y))
(##define-macro (erl-chr=k x k) `(chr.= ,x ,k))
(##define-macro (num.= x y) `(= ,x ,y))
(##define-macro (fix.< x y) `(##fixnum.< ,x ,y))
(##define-macro (big.< x y) `(##bignum.< ,x ,y))
(##define-macro (flo.< x y) `(##flonum.< ,x ,y))
(##define-macro (num.< x y) `(< ,x ,y))
(##define-macro (fix.u+ x y) `(##fixnum.+ ,x ,y))
(##define-macro (big.+ x y) `(##bignum.+ ,x ,y))
(##define-macro (flo.+ x y) `(##flonum.+ ,x ,y))
(##define-macro (num.+ x y) `(+ ,x ,y))
(##define-macro (fix.u- x y) `(##fixnum.- ,x ,y))
(##define-macro (big.- x y) `(##bignum.- ,x ,y))
(##define-macro (flo.- x y) `(##flonum.- ,x ,y))
(##define-macro (num.- x y) `(- ,x ,y))
(##define-macro (fix.bor x y) `(##fixnum.bitwise-ior ,x ,y))
(##define-macro (fix.bxor x y) `(##fixnum.bitwise-xor ,x ,y))
(##define-macro (fix.band x y) `(##fixnum.bitwise-and ,x ,y))
(##define-macro (fix.bnot x) `(##fixnum.bitwise-not ,x))
(##define-macro (fix.u* x y) `(##fixnum.* ,x ,y))
(##define-macro (big.* x y) `(##bignum.* ,x ,y))
(##define-macro (flo.* x y) `(##flonum.* ,x ,y))
(##define-macro (num.* x y) `(* ,x ,y))
(##define-macro (flo./ x y) `(##flonum./ ,x ,y))
(##define-macro (fix.div x y) `(##fixnum.quotient ,x ,y))
(##define-macro (big.div x y) `(##bignum.quotient ,x ,y))
(##define-macro (fix.rem x y) `(##fixnum.remainder ,x ,y))
(##define-macro (big.rem x y) `(##bignum.remainder ,x ,y))
(##define-macro (fix.mod x y) `(##fixnum.modulo ,x ,y))
(##define-macro (big.mod x y) `(##bignum.modulo ,x ,y))
(##define-macro (fix.even? x) `(##fixnum.even? ,x))
;; Full type tests
(##define-macro (erl-fixnum? x) `(##fixnum? ,x))
(##define-macro (erl-bignum? x) `(##bignum? ,x))
(##define-macro (erl-flonum? x) `(##flonum? ,x))
(##define-macro (erl-atom? x) `(##symbol? ,x))
(##define-macro (erl-byte? x)
`(let ((a ,x))
(and (erl-fix? a) (fix.< a 256) (fix.< -1 a))))
(##define-macro (erl-boolean? x)
`(let ((a ,x)) (or (erl-ato=k a 'true) (erl-ato=k a 'false))))
(##define-macro (erl-cons? x) `(erl-con? ,x))
(##define-macro (erl-char? x) `(##char? ,x))
;; Longer arithmetic macros
(##define-macro (if-fix? x y z) (if (##fixnum? x) y z))
(##define-macro (if-int? x y z) (if (or (##fixnum? x) (##bignum? x)) y z))
(##define-macro (if-zero-fix? x y z) (if (and (##fixnum? x) (= x 0)) y z))
(##define-macro (if-pos-fix? x y z) (if (and (##fixnum? x) (> x 0)) y z))
(##define-macro (if-neg-fix? x y z) (if (and (##fixnum? x) (< x 0)) y z))
(##define-macro (if-non-neg-fix? x y z) (if (and (##fixnum? x) (>= x 0)) y z))
(##define-macro (fixnum-specialized-=:= x y general-case)
`(if-fix? ,x
(fix.= ,x ,y)
(if-fix? ,y
(fix.= ,x ,y)
(let ((a ,x) (b ,y))
(if (or (erl-fix? a) (erl-fix? b))
(fix.= a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-< x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.< ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.< a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.< a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-+ x y general-case)
`(if-zero-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
b
(,general-case ,x b)))
(if-pos-fix? ,x
(let ((b ,y))
(if (and (erl-fix? b) (fix.< b (fix.u+ ,x b)))
(fix.u+ ,x b)
(,general-case ,x b)))
(if-neg-fix? ,x
(let ((b ,y))
(if (and (erl-fix? b) (fix.< (fix.u+ ,x b) b))
(fix.u+ ,x b)
(,general-case ,x b)))
(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u+ a ,y)))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u+ a ,y) a))
(fix.u+ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a)
(erl-fix? b)
(or (fix.< (fix.bxor a b) 0)
(not (fix.< (fix.bxor (fix.u+ a b) b) 0))))
(fix.u+ a b)
(,general-case a b))))))))))
(##define-macro (fixnum-specialized-- x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u- a ,y) a))
(fix.u- a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u- a ,y)))
(fix.u- a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a)
(erl-fix? b)
(or (not (fix.< (fix.bxor a b) 0))
(fix.< (fix.bxor (fix.u- a b) b) 0)))
(fix.u- a b)
(,general-case a b)))))))
(##define-macro (fixnum-specialized-* x y general-case)
`(,general-case ,x ,y))
(##define-macro (fixnum-specialized-div x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.div a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.div a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-mod x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.mod a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.mod a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-rem x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.rem a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.rem a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-bor x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.bor ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.bor a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.bor a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-bxor x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.bxor ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.bxor a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.bxor a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-band x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.band ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.band a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.band a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-bsl x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(,general-case ,x ,y)))
(##define-macro (fixnum-specialized-bsr x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(,general-case ,x ,y)))
(##define-macro (number-specialized-=:= x y general-case)
`(if-fix? ,x
(eq? ,x ,y)
(if-fix? ,y
(eq? ,x ,y)
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.= ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.= a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((or (erl-fix? a) (erl-fix? b))
(eq? a b))
((and (erl-flonum? a) (erl-flonum? b))
(flo.= a b))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-== x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.= ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.= a ,y) (,general-case a ,y)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.= ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.= a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (erl-fix? b) (fix.= a b) (,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b) (flo.= a b) (,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-< x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.< ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.< a ,y) (,general-case a ,y)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.< ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.< a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (erl-fix? b) (fix.< a b) (,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b) (flo.< a b) (,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-+ x y general-case)
`(if-zero-fix? ,x
(let ((b ,y))
(if (num? b)
b
(,general-case ,x b)))
(if-pos-fix? ,x
(let* ((b ,y) (res (fix.u+ ,x b)))
(if (and (erl-fix? b) (fix.< b res))
res
(,general-case ,x b)))
(if-neg-fix? ,x
(let* ((b ,y) (res (fix.u+ ,x b)))
(if (and (erl-fix? b) (fix.< res b))
res
(,general-case ,x b)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b)
(flo.+ ,x b)
(,general-case ,x b)))
(if-zero-fix? ,y
(let ((a ,x))
(if (num? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u+ a ,y)))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u+ a ,y) a))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.+ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (and (erl-fix? b)
(or (fix.< (fix.bxor a b) 0)
(not (fix.< (fix.bxor (fix.u+ a b) b)
0))))
(fix.u+ a b)
(,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b)
(flo.+ a b)
(,general-case a b)))
(else
(,general-case a b)))))))))))))
(##define-macro (number-specialized-- x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
; (let* ((a ,x) (res (fix.u- a ,y)))
; (if (and (fix.< res a) (erl-fix? a))
; res
; (,general-case a ,y)))
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u- a ,y) a))
(fix.u- a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let* ((a ,x) (res (fix.u- a ,y)))
(if (and (fix.< a res) (erl-fix? a))
res
(,general-case a ,y)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.- a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(let ((res (fix.u- a b)))
(if (and (or (not (fix.< (fix.bxor a b) 0))
(fix.< (fix.bxor res b) 0))
(erl-fix? b))
res
(,general-case a b))))
((erl-flonum? a)
(if (erl-flonum? a)
(flo.- a b)
(,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-* x y general-case)
`(if-int? ,x
(,general-case ,x ,y)
(if-int? ,y
(,general-case ,x ,y)
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b)
(flo.* ,x b)
(,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.* a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-sub? a) (erl-sub? b) (erl-flo? a) (erl-flo? b))
(flo.* a b)
(,general-case a b))))))))
(##define-macro (number-specialized-/ x y general-case)
`(if-non-zero-float? ,y
(let ((a ,x))
(if (and (erl-sub? a) (erl-flo? a))
(flo./ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-sub? a) (erl-sub? b) (erl-flo? a) (erl-flo? b) (not (flo.= b 0.0)))
(flo./ a b)
(,general-case a b)))))
(##define-macro (number-specialized-abs x general-case)
`(if-non-neg-fix? ,x
,x
(if-float? ,x
(flo.abs ,x)
(let ((a ,x))
(if (erl-flonum? a)
(flo.abs a)
(,general-case a))))))
(##define-macro (number-specialized-float x general-case)
`(if-int? ,x
(exact->inexact ,x)
(if-float? ,x
,x
(let ((a ,x))
(cond
((erl-fix? a) (exact->inexact a))
((erl-flonum? a) a)
(else (,general-case a)))))))
(##define-macro (number-specialized-round x general-case)
`(if-int? ,x
,x
(if-float? ,x
(inexact->exact (flo.round ,x))
(let ((a ,x))
(cond
((erl-flonum? a) (inexact->exact (flo.round a)))
((erl-fix? a) a)
(else (,general-case a)))))))
(##define-macro (number-specialized-sign x general-case)
`(if-non-neg-num? ,x
0
(if-neg-num? ,x
1
(let ((a ,x))
(cond
((erl-fix? a) (if (fix.< a 0) 1 0))
((erl-flonum? a) (if (flo.< a 0.0) 1 0))
(else (,general-case a)))))))
(##define-macro (number-specialized-trunc x general-case)
`(if-int? ,x
,x
(if-float? ,x
(inexact->exact (flo.trunc ,x))
(let ((a ,x))
(cond
((erl-flonum? a) (inexact->exact (flo.trunc a)))
((erl-fix? a) a)
(else (,general-case a)))))))
; chars
(##define-macro (chr.->integer x) `(char->integer ,x))
; integer arithmetic
(##define-macro (int.= x y) `(fixnum-specialized-=:= ,x ,y =))
(##define-macro (int.< x y) `(fixnum-specialized-< ,x ,y <))
(##define-macro (int.+ x y) `(fixnum-specialized-+ ,x ,y +))
(##define-macro (int.- x y) `(fixnum-specialized-- ,x ,y -))
(##define-macro (int.* x y) `(fixnum-specialized-* ,x ,y *))
(##define-macro (int.div x y) `(fixnum-specialized-div ,x ,y quotient))
(##define-macro (int.rem x y) `(fixnum-specialized-rem ,x ,y remainder))
(##define-macro (int.even? x) `(even? ,x))
; floating-point arithmetic
(##define-macro (if-float? x y z) (if (##flonum? x) y z))
(##define-macro (if-non-zero-float? x y z)
(if (and (##flonum? x) (not (= x 0.0))) y z))
(##define-macro (if-non-neg-num? x y z)
(if (and (number? x) (>= x 0)) y z))
(##define-macro (if-neg-num? x y z)
(if (and (number? x) (< x 0)) y z))
(##define-macro (flo.abs x) `(##flonum.abs ,x))
(##define-macro (flo.acos x) `(##flonum.acos ,x))
(##define-macro (flo.acosh x)
`(let ((a ,x))
(flo.log (flo.+ a (flo.sqrt (flo.- (flo.* a a) 1.))))))
(##define-macro (flo.asin x) `(##flonum.asin ,x))
(##define-macro (flo.asinh x)
`(let ((a ,x))
(flo.log (flo.+ a (flo.sqrt (flo.+ (flo.* a a) 1.))))))
(##define-macro (flo.atan x) `(##flonum.atan ,x))
(##define-macro (flo.atan2 x y) `(##flonum.atan ,x ,y))
(##define-macro (flo.atanh x)
`(let ((a ,x))
(flo.* .5 (flo.log (flo./ (flo.+ 1. a) (flo.- 1. a))))))
(##define-macro (flo.cos x) `(##flonum.cos ,x))
(##define-macro (flo.cosh x)
`(let ((a ,x))
(flo./ (flo.- (flo.exp a) (flo.exp (flo.- 0. a))) 2.)))
(##define-macro (flo.erf x) `'not_implemented_yet)
` ( flo.- 1 . ( flo.erf , x ) ) )
(##define-macro (flo.exp x) `(##flonum.exp ,x))
(##define-macro (flo.log x) `(##flonum.log ,x))
(##define-macro (flo.log10 x) `(flo./ (flo.log ,x) ,(##flonum.log 10.)))
(##define-macro (flo.pow x y) `(flo.exp (flo.* ,y (flo.log ,x))))
(##define-macro (flo.round x) `(##flonum.round ,x))
(##define-macro (flo.sin x) `(##flonum.sin ,x))
(##define-macro (flo.sinh x)
`(let ((a ,x))
(flo./ (flo.+ (flo.exp a) (flo.exp (flo.- 0. a))) 2.)))
(##define-macro (flo.sqrt x) `(##flonum.sqrt ,x))
(##define-macro (flo.tan x) `(##flonum.tan ,x))
(##define-macro (flo.tanh x)
`(let ((a x))
(let ((ea (flo.exp a)) (e-a (flo.exp (flo.- 0. a))))
`(flo./ (flo.+ ea e-a) (flo.- ea e-a)))))
(##define-macro (flo.trunc x) `(##flonum.truncate ,x))
(##define-macro (erl-nil) ''())
(##define-macro (erl-cons x y) `(cons ,x ,y))
(##define-macro (erl-hd x) `(car ,x))
(##define-macro (erl-tl x) `(cdr ,x))
(##define-macro (erl-list . elems) `(list ,@elems))
(##define-macro (erl-append . lists) `(append ,@lists))
(##define-macro (erl-tuple . elems) `(vector 'tuple ,@elems))
(##define-macro (erl-tuple-size x) `(fix.u- (erl-vector-length ,x) 1))
(##define-macro (erl-tuple-ref x i) `(##vector-ref ,x ,i))
(##define-macro (erl-vector . elems) `(vector ,@elems))
(##define-macro (erl-vector-length v) `(##vector-length ,v))
(##define-macro (erl-vector-ref v i) `(##vector-ref ,v ,i))
(##define-macro (erl-vector-set! v i k) `(##vector-set! ,v ,i ,k))
(##define-macro (erl-make-vector n) `(make-vector ,n))
(##define-macro (erl-function arit lam) `(vector 'function ,arit ,lam))
(##define-macro (erl-function-arity f) `(vector-ref ,f 1))
(##define-macro (erl-function-lambda f) `(vector-ref ,f 2))
(##define-macro (erl-make-binary u8 off siz)
`(vector 'binary ,u8 ,off ,siz))
(##define-macro (erl-u8vector->binary u8)
`(let ((a ,u8))
(erl-make-binary a 0 (u8vector-length a))))
(##define-macro (erl-binary-u8vector x) `(vector-ref ,x 1))
(##define-macro (erl-binary-offset x) `(vector-ref ,x 2))
(##define-macro (erl-binary-size x) `(vector-ref ,x 3))
(##define-macro (erl-vector? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a))))
(##define-macro (erl-tuple? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'tuple))))
(##define-macro (erl-pid? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'pid))))
(##define-macro (erl-port? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'port))))
(##define-macro (erl-ref? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'ref))))
(##define-macro (erl-binary? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'binary))))
(##define-macro (erl-function? x)
`(let ((a ,x))
(and (erl-sub? a)
(erl-vec? a)
(erl-ato=k (erl-vector-ref a 0) 'function))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
BIFS
;; abs/1 (Guard BIF)
(##define-macro (erl-tst-abs/1 x)
`(number-specialized-abs ,x erl-generic-tst-abs/1))
(##define-macro (erl-safe-abs/1 x)
`(number-specialized-abs ,x abs))
(##define-macro (erl-abs/1 x)
`(number-specialized-abs ,x erl-generic-abs/1))
;; apply/3
(##define-macro (erl-apply/3 x y z) `(erlang:apply/3 ,x ,y ,z))
;; atom_to_list/1
(##define-macro (erl-atom_to_list/1 x) `(erlang:atom_to_list/1 ,x))
atom_to_string/1 ( PROPOSED BIF )
(##define-macro (erl-atom_to_string/1 x) `(erlang:atom_to_string/1 ,x))
;; binary_to_list/1
(##define-macro (erl-binary_to_list/1 x) `(erlang:binary_to_list/1 ,x))
;; binary_to_list/3
(##define-macro (erl-binary_to_list/3 x y z)
`(erlang:binary_to_list/3 ,x ,y ,z))
;; binary_to_string/1 (PROPOSED BIF)
(##define-macro (erl-binary_to_string/1 x) `(erlang:binary_to_string/1 ,x))
char_to_integer/1 ( PROPOSED BIF )
(##define-macro (erl-char_to_integer/1 x) `(erlang:char_to_integer/1 ,x))
concat_binary/1
(##define-macro (erl-concat_binary/1 x) `(erlang:concat_binary/1 ,x))
;; date/0
(##define-macro (erl-date/0) `(erlang:date/0))
;; element/2 (Guard BIF)
(##define-macro (erl-tst-element/2 x y) `(erl-generic-tst-element/2 ,x ,y))
(##define-macro (erl-element/2 x y)
`(let ((x ,x) (y ,y))
(if (and (erl-fixnum? x)
(erl-tuple? y))
(if (and (fix.< 0 x)
(fix.< x (erl-vector-length y)))
(erl-vector-ref y x)
(erl-exit-badindex))
(erl-exit-badarg))))
;; erase/0
(##define-macro (erl-erase/0) `(erlang:erase/0))
;; erase/1
(##define-macro (erl-erase/1 x) `(erlang:erase/1 x))
;; exit/1
(##define-macro (erl-exit/1 x) `(erlang:exit/1 ,x))
;; exit/2
(##define-macro (erl-exit/2 x y) `(erlang:exit/2 ,x ,y))
;; float/1 (Guard BIF)
(##define-macro (erl-tst-float/1 x)
`(number-specialized-float ,x erl-generic-tst-float/1))
(##define-macro (erl-safe-float/1 x) `(exact->inexact ,x))
(##define-macro (erl-float/1 x)
`(number-specialized-float ,x erl-generic-float/1))
;; float_to_list/1
(##define-macro (erl-float_to_list/1 x)
`(erlang:float_to_list/1 ,x))
;; get/0
(##define-macro (erl-get/0) `(erlang:get/0))
;; get/1
(##define-macro (erl-get/1 x) `(erlang:get/1 ,x))
;; get_keys/1
(##define-macro (erl-get_keys/1 x) `(erlang:get_keys/1 x))
group_leader/0
(##define-macro (erl-group_leader/0)
`(process-group-leader node.current-process))
;; group_leader/2
(##define-macro (erl-group_leader/2 x y) `(erlang:group_leader/2 ,x ,y))
;; hash/2
(##define-macro (erl-hash/2 x y) `(erlang:hash/2 ,x ,y))
;; hd/1 (Guard BIF)
(##define-macro (erl-hd/1 x) `(erl-generic-hd/1 ,x))
integer_to_char/1 ( PROPOSED BIF )
(##define-macro (erl-integer_to_char/1 x)
`(erlang:integer_to_char/1 ,x))
;; integer_to_list/1
(##define-macro (erl-integer_to_list/1 x)
`(erlang:integer_to_list/1 ,x))
;; integer_to_string/1 (PROPOSED BIF)
(##define-macro (erl-integer_to_string/1 x)
`(erlang:integer_to_string/1 ,x))
;; is_alive/0
(##define-macro (erl-is_alive/0) 'node.communicating)
;; is_atom/1 (Recognizer BIF)
(##define-macro (erl-is_atom/1 x) `(if (erl-atom? ,x) 'true 'false))
;; is_binary/1 (Recognizer BIF)
(##define-macro (erl-is_binary/1 x) `(if (erl-binary? ,x) 'true 'false))
;; is_char/1 (Recognizer BIF)
(##define-macro (erl-is_char/1 x) `(if (erl-chr? ,x) 'true 'false))
is_compound/1 ( Recognizer BIF )
(##define-macro (erl-is_compound/1 x)
`(let ((a ,x))
(if (or (erl-nil? a) (erl-con? a) (erl-tuple? a))
'true
'false)))
;; is_cons/1 (Recognizer BIF)
(##define-macro (erl-is_cons/1 x) `(if (erl-con? ,x) 'true 'false))
;; is_float/1 (Recognizer BIF)
(##define-macro (erl-is_float/1 x) `(if (erl-flonum? ,x) 'true 'false))
is_function/1 ( Recognizer BIF )
(##define-macro (erl-is_function/1 x) `(if (erl-function? ,x) 'true 'false))
;; is_integer/1 (Recognizer BIF)
(##define-macro (erl-is_integer/1 x) `(if (erl-int? ,x) 'true 'false))
;; is_list/1 (Recognizer BIF)
(##define-macro (erl-is_list/1 x) `(if (erl-lst? ,x) 'true 'false))
;; is_null/1 (Recognizer BIF)
(##define-macro (erl-is_null/1 x) `(if (erl-nil? ,x) 'true 'false))
;; is_number/1 (Recognizer BIF)
(##define-macro (erl-is_number/1 x) `(if (erl-num? ,x) 'true 'false))
;; is_pid/1 (Recognizer BIF)
(##define-macro (erl-is_pid/1 x) `(if (erl-pid? ,x) 'true 'false))
;; is_port/1 (Recognizer BIF)
(##define-macro (erl-is_port/1 x) `(if (erl-port? ,x) 'true 'false))
;; is_ref/1 (Recognizer BIF)
(##define-macro (erl-is_ref/1 x) `(if (erl-ref? ,x) 'true 'false))
;; is_string/1 (Recognizer BIF)
(##define-macro (erl-is_string/1 x) `(if (erl-str? ,x) 'true 'false))
( Recognizer BIF )
(##define-macro (erl-is_tuple/1 x) `(if (erl-tuple? ,x) 'true 'false))
;; length/1 (Guard BIF)
(##define-macro (erl-tst-length/1 x) `(erl-generic-tst-length/1 ,x))
(##define-macro (erl-safe-length/1 x) `(length ,x))
(##define-macro (erl-length/1 x) `(erlang:length/1 ,x))
link/1
(##define-macro (erl-link/1 x) `(erlang:link/1 ,x))
;; list_to_atom/1
(##define-macro (erl-list_to_atom/1 x) `(erlang:list_to_atom/1 ,x))
;; list_to_binary/1
(##define-macro (erl-safe-list_to_binary/1 x)
`(erl-u8vector->binary (list->u8vector ,x)))
(##define-macro (erl-list_to_binary/1 x) `(erlang:list_to_binary/1 ,x))
;; list_to_float/1
(##define-macro (erl-list_to_float/1 x) `(erlang:list_to_float/1 ,x))
;; list_to_integer/1
(##define-macro (erl-list_to_integer/1 x) `(erlang:list_to_integer/1 ,x))
;; list_to_string/1 (PROPOSED BIF)
(##define-macro (erl-safe-list_to_string/1 x)
`(map integer->char ,x))
(##define-macro (erl-list_to_string/1 x) `(erlang:list_to_string/1 ,x))
;; list_to_tuple/1
(##define-macro (erl-list_to_tuple/1 x) `(erlang:list_to_tuple/1 ,x))
make_ref/0
(##define-macro (erl-make_ref/0 x) `(erlang:make_ref/0 ,x))
node/0 ( Guard BIF )
(##define-macro (erl-node/0)
`(process-node node.current-process))
;; node/1 (Guard BIF)
(##define-macro (erl-tst-node/1 x) `(erl-generic-tst-node/1 ,x))
(##define-macro (erl-safe-node/1 x) `(erl-generic-safe-node/1 ,x))
(##define-macro (erl-node/1 x) `(erlang:node/1 ,x))
;; now/0
(##define-macro (erl-now/0)
`(let* ((us (current-time-in-usecs))
(s (quotient us 1000000)))
(erl-tuple (quotient s 1000000)
(modulo s 1000000)
(modulo us 1000000))))
open_port/2
(##define-macro (erl-open_port/2 x y) `(erlang:open_port/2 ,x ,y))
;; port_close/1
(##define-macro (erl-port_close/1 x) `(erl-generic-port_close/1 ,x))
;; port_info/1
(##define-macro (erl-port_info/1 x) `(erlang:port_info/1 ,x))
;; port_info/2
(##define-macro (erl-port_info/2 x y) `(erlang:port_info/2 ,x ,y))
;; ports/0
(##define-macro (erl-ports/0) `(erlang:ports/0))
;; process_info/2
(##define-macro (erl-process_info/2 x y) `(erlang:process_info/2 ,x ,y))
;; process_flag/2
(##define-macro (erl-process_flag/2 x y) `(erlang:process_flag/2 ,x ,y))
;; processes/0
(##define-macro (erl-processes/0) `(erlang:processes/0))
(##define-macro (erl-put/2 x y) `(erlang:put/2 ,x ,y))
register/2
(##define-macro (erl-register/2 x y) `(erlang:register/2 ,x ,y))
;; registered/0
(##define-macro (erl-registered/0) `(erlang:registered/0))
;; round/1 (Guard BIF)
(##define-macro (erl-tst-round/1 x)
`(number-specialized-round ,x erl-generic-tst-round/1))
(##define-macro (erl-safe-round/1 x)
`(number-specialized-round ,x round))
(##define-macro (erl-round/1 x)
`(number-specialized-round ,x erl-generic-round/1))
self/0 ( Guard BIF )
(##define-macro (erl-self/0) `(process-pid node.current-process))
;; setelement/3
(##define-macro (erl-setelement/3 x y z) `(erlang:setelement/3 ,x ,y ,z))
;; sign/1 (Guard BIF)
(##define-macro (erl-tst-sign/1 x)
`(number-specialized-sign ,x erl-generic-tst-sign/1))
(##define-macro (erl-safe-sign/1 x)
`(number-specialized-sign ,x erl-generic-safe-sign/1))
(##define-macro (erl-sign/1 x)
`(number-specialized-sign ,x erl-generic-sign/1))
;; size/1 (Guard BIF)
(##define-macro (erl-tst-size/1 x) `(erl-generic-tst-size/1 ,x))
(##define-macro (erl-safe-size/1 x) `(erl-generic-safe-size/1 ,x))
(##define-macro (erl-size/1 x) `(erlang:size/1 ,x))
spawn/3
(##define-macro (erl-spawn/3 x y z) `(erlang:spawn/3 ,x ,y ,z))
spawn_link/3
(##define-macro (erl-spawn_link/3 x y z) `(erlang:spawn_link/3 ,x ,y ,z))
;; split_binary/2
(##define-macro (erl-split_binary/2 x y) `(erlang:split_binary/2 ,x ,y))
statistics/1
(##define-macro (erl-statistics/1 x) `(erlang:statistics/1 ,x))
string_to_list/1 ( PROPOSED BIF )
(##define-macro (erl-string_to_list/1 x) `(erlang:string_to_list/1 ,x))
;; throw/1
(##define-macro (erl-throw/1 x) `(erlang:throw/1 ,x))
;; time/0
(##define-macro (erl-time/0) `(erlang:time/0))
;; tl/1 (Guard BIF)
(##define-macro (erl-tl/1 x) `(erl-generic-tl/1 ,x))
;; trunc/1 (Guard BIF)
(##define-macro (erl-tst-trunc/1 x)
`(number-specialized-trunc ,x erl-generic-tst-trunc/1))
(##define-macro (erl-safe-trunc/1 x)
`(number-specialized-trunc ,x truncate))
(##define-macro (erl-trunc/1 x)
`(number-specialized-trunc ,x erl-generic-trunc/1))
;; tuple_to_list/1
(##define-macro (erl-tuple_to_list/1 x) `(erlang:tuple_to_list/1 ,x))
;; unlink/1
(##define-macro (erl-unlink/1 x) `(erlang:unlink/1 ,x))
;; unregister/1
(##define-macro (erl-unregister/1 x) `(erlang:unregister/1 ,x))
whereis/1
(##define-macro (erl-whereis/1 x) `(erlang:whereis/1 ,x))
(##define-macro (erl-=:= x y)
`(number-specialized-=:= ,x ,y erl-generic-=:=))
(##define-macro (erl-== x y)
`(number-specialized-== ,x ,y erl-generic-==))
(##define-macro (erl-< x y)
`(number-specialized-< ,x ,y erl-generic-<))
;;;;;;;;;;;;;;;;;;;;
Operators ' BIFs '
;; +/1 (Guard BIF)
(##define-macro (erl-tst-+/1 x)
`(number-specialized-+ 0 ,x erl-generic-tst-+/2))
(##define-macro (erl-safe-+/1 x)
`(number-specialized-+ 0 ,x +))
(##define-macro (erl-+/1 x)
`(number-specialized-+ 0 ,x erl-generic-+/2))
;; -/1 (Guard BIF)
(##define-macro (erl-tst--/1 x)
`(number-specialized-- 0 ,x erl-generic-tst--/2))
(##define-macro (erl-safe--/1 x)
`(number-specialized-- 0 ,x -))
(##define-macro (erl--/1 x)
`(number-specialized-- 0 ,x erl-generic--/2))
;; bnot/1 (Guard BIF)
(##define-macro (erl-tst-bnot/1 x)
`(fixnum-specialized-- -1 ,x erl-generic-tst--/2))
(##define-macro (erl-safe-bnot/1 x)
`(fixnum-specialized-- -1 ,x -))
(##define-macro (erl-bnot/1 x)
`(fixnum-specialized-- -1 ,x erl-generic--/2))
not/1 ( Guard BIF )
(##define-macro (erl-tst-not/1 x) `(erl-generic-tst-not/1 ,x))
(##define-macro (erl-safe-not/1 x) `(erl-generic-safe-not/1 ,x))
(##define-macro (erl-not/1 x) `(erl-generic-not/1 ,x))
;; +/2 (Guard BIF)
(##define-macro (erl-tst-+/2 x y)
`(number-specialized-+ ,x ,y erl-generic-tst-+/2))
(##define-macro (erl-safe-+/2 x y)
`(number-specialized-+ ,x ,y +))
(##define-macro (erl-+/2 x y)
`(number-specialized-+ ,x ,y erl-generic-+/2))
;; -/2 (Guard BIF)
(##define-macro (erl-tst--/2 x y)
`(number-specialized-- ,x ,y erl-generic-tst--/2))
(##define-macro (erl-safe--/2 x y)
`(number-specialized-- ,x ,y -))
(##define-macro (erl--/2 x y)
`(number-specialized-- ,x ,y erl-generic--/2))
;; bor/2 (Guard BIF)
(##define-macro (erl-tst-bor/2 x y)
`(fixnum-specialized-bor ,x ,y erl-generic-tst-bor/2))
(##define-macro (erl-safe-bor/2 x y)
`(fixnum-specialized-bor ,x ,y int.bor))
(##define-macro (erl-bor/2 x y)
`(fixnum-specialized-bor ,x ,y erl-generic-bor/2))
;; bxor/2 (Guard BIF)
(##define-macro (erl-tst-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y erl-generic-tst-bxor/2))
(##define-macro (erl-safe-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y int.bxor))
(##define-macro (erl-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y erl-generic-bxor/2))
;; bsl/2 (Guard BIF)
(##define-macro (erl-tst-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y erl-generic-tst-bsl/2))
(##define-macro (erl-safe-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y int.bsl))
(##define-macro (erl-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y erl-generic-bsl/2))
;; bsr/2 (Guard BIF)
(##define-macro (erl-tst-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y erl-generic-tst-bsr/2))
(##define-macro (erl-safe-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y int.bsl))
(##define-macro (erl-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y erl-generic-bsr/2))
;; */2 (Guard BIF)
(##define-macro (erl-tst-*/2 x y)
`(number-specialized-* ,x ,y erl-generic-tst-*/2))
(##define-macro (erl-safe-*/2 x y)
`(number-specialized-* ,x ,y *))
(##define-macro (erl-*/2 x y)
`(number-specialized-* ,x ,y erl-generic-*/2))
;; //2 (Guard BIF)
(##define-macro (erl-tst-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-tst-//2))
(##define-macro (erl-safe-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-safe-//2))
(##define-macro (erl-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-//2))
;; ///2 (Guard BIF)
(##define-macro (erl-tst-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-tst-///2))
(##define-macro (erl-safe-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-safe-///2))
(##define-macro (erl-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-///2))
;; div/2 (Guard BIF)
(##define-macro (erl-tst-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-tst-div/2))
(##define-macro (erl-safe-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-safe-div/2))
(##define-macro (erl-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-div/2))
;; mod/2 (Guard BIF)
(##define-macro (erl-tst-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-tst-mod/2))
(##define-macro (erl-safe-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-safe-mod/2))
(##define-macro (erl-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-mod/2))
rem/2 ( Guard BIF )
(##define-macro (erl-tst-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-tst-rem/2))
(##define-macro (erl-safe-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-safe-rem/2))
(##define-macro (erl-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-rem/2))
;; band/2 (Guard BIF)
(##define-macro (erl-tst-band/2 x y)
`(fixnum-specialized-band ,x ,y erl-generic-tst-band/2))
(##define-macro (erl-safe-band/2 x y)
`(fixnum-specialized-band ,x ,y int.band))
(##define-macro (erl-band/2 x y)
`(fixnum-specialized-band ,x ,y erl-generic-band/2))
;; ++/2
(##define-macro (erl-safe-++/2 x y) `(erl-generic-safe-++/2 ,x ,y))
(##define-macro (erl-++/2 x y) `(erl-generic-++/2 ,x ,y))
;; --/2 (Guard BIF) ??
(##define-macro (erl---/2 x y) `(erl-generic---/2 ,x ,y))
;; or/2
(##define-macro (erl-or/2 x y) `(erl-generic-or/2 ,x ,y))
;; xor/2
(##define-macro (erl-xor/2 x y) `(erl-generic-xor/2 ,x ,y))
(##define-macro (erl-and/2 x y) `(erl-generic-and/2 ,x ,y))
Comparison ' BIFs '
;; =:=/2
(##define-macro (erl-=:=/2 x y)
`(number-specialized-=:= ,x ,y erl-generic-=:=))
;; =/=/2
(##define-macro (erl-=/=/2 x y) `(not (erl-=:=/2 ,x ,y)))
;; ==/2
(##define-macro (erl-==/2 x y)
`(number-specialized-== ,x ,y erl-generic-==))
;; /=/2
(##define-macro (erl-/=/2 x y) `(not (erl-==/2 ,x ,y)))
;; </2
(##define-macro (erl-</2 x y)
`(number-specialized-< ,x ,y erl-generic-<))
;; >/2
(##define-macro (erl->/2 x y) `(erl-</2 ,y ,x))
;; >=/2
(##define-macro (erl->=/2 x y) `(not (erl-</2 ,x ,y)))
;; <=/2
(##define-macro (erl-<=/2 x y) `(not (erl-</2 ,y ,x)))
(##define-macro (erl-send/2 x y) `(erl-generic-send/2 ,x ,y))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Semi BIFS
;; Cannot be invoked without 'erlang:' prefix
;; check_process_code/2
(##define-macro (erl-check_process_code/2 x y)
`(erlang:check_process_code/2 ,x ,y))
;; delete_module/1
(##define-macro (erl-delete_module/1 x)
`(erlang:delete_module/1 ,x))
etos_rootdir/0 ( ETOS SPECIFIC BIF )
(##define-macro (erl-etos_rootdir/0) `(erlang:etos_rootdir/0))
;; get_cookie/0
(##define-macro (erl-get_cookie/0) 'node.magic_cookie)
;; halt/0
(##define-macro (erl-halt/0) `(erlang:halt/0))
;; hash/2
(##define-macro (erl-hash/2 x y) `(erlang:hash/2 ,x ,y))
load_module/2 ( ETOS SPECIFIC VERSION )
(##define-macro (erl-load_module/2 x y) `(erlang:load_module/2 ,x ,y))
;; m_acos/1
m_acosh/1
;; m_asin/1
m_asinh/1
;; m_atan/1
;; m_atan2/2
m_atanh/1
;; m_cos/1
;; m_cosh/1
;; m_erf/1
;; m_erfc/1
m_exp/1
;; m_log/1
;; m_log10/1
;; m_pow/2
;; m_sin/1
;; m_sinh/1
;; m_sqrt/1
;; m_tan/1
;; m_tanh/1
;; module_loaded/1
(##define-macro (erl-module_loaded/1 x) `(erlang:module_loaded/1 ,x))
;; preloaded/0
(##define-macro (erl-preloaded/0) `(erlang:preloaded/0))
;; purge_module/1
(##define-macro (erl-purge_module/1 x) `(erlang:purge_module/1 ,x))
;; set_cookie/2
(##define-macro (erl-set_cookie/2 x y) `(erlang:set_cookie/2 ,x ,y))
;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; List comprehension utils
(##define-macro (erl-map f l) `(map ,f ,l))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Erlang function definition
;; Returns a function definition from its name passed as an atom
;; will return #f if undefined
(##define-macro (erl-function-ref x)
(if (and (pair? x)
(eq? (car x) 'quote)
(pair? (cdr x))
(null? (cddr x)))
`(and (not (##unbound? ,(cadr x)))
,(cadr x))
`(erl-generic-function-ref ,x)))
;;;;;;;;;;;;
;; Processes
(##define-macro (make-process
state
continuation
mailbox
mailbox-probe
wake-up
trap-exit
pid
linked-pids
group-leader
error-handler
node
priority
mailbox-probe-cont
abrupt-stack
dict
initial-call
exit-hook)
; state of process can be: running, ready, waiting, dead
`(let ((process
(vector #f
#f
,state
,continuation
,mailbox
,mailbox-probe
,wake-up
,trap-exit
,pid
,linked-pids
,group-leader
,error-handler
,node
,priority
,mailbox-probe-cont
,abrupt-stack
,dict
,initial-call
,exit-hook
)))
(process-succ-set! process process)
(process-prev-set! process process)
process))
(##define-macro (process-succ process)
`(vector-ref ,process 0))
(##define-macro (process-succ-set! process succ)
`(vector-set! ,process 0 ,succ))
(##define-macro (process-prev process)
`(vector-ref ,process 1))
(##define-macro (process-prev-set! process prev)
`(vector-set! ,process 1 ,prev))
(##define-macro (process-state process)
`(vector-ref ,process 2))
(##define-macro (process-state-set! process state)
`(vector-set! ,process 2 ,state))
(##define-macro (process-continuation process)
`(vector-ref ,process 3))
(##define-macro (process-continuation-set! process cont)
`(vector-set! ,process 3 ,cont))
(##define-macro (process-mailbox process)
`(vector-ref ,process 4))
(##define-macro (process-mailbox-probe process)
`(vector-ref ,process 5))
(##define-macro (process-mailbox-probe-set! process probe)
`(vector-set! ,process 5 ,probe))
(##define-macro (process-wake-up process)
`(vector-ref ,process 6))
(##define-macro (process-wake-up-set! process cont)
`(vector-set! ,process 6 ,cont))
(##define-macro (process-trap-exit process)
`(vector-ref ,process 7))
(##define-macro (process-trap-exit-set! process cont)
`(vector-set! ,process 7 ,cont))
(##define-macro (process-pid process)
`(vector-ref ,process 8))
(##define-macro (process-pid-set! process pid)
`(vector-set! ,process 8 ,pid))
(##define-macro (process-linked-pids process)
`(vector-ref ,process 9))
(##define-macro (process-linked-pids-set! process pids)
`(vector-set! ,process 9 ,pids))
(##define-macro (process-group-leader process)
`(vector-ref ,process 10))
(##define-macro (process-group-leader-set! process group-leader)
`(vector-set! ,process 10 ,group-leader))
(##define-macro (process-error-handler process)
`(vector-ref ,process 11))
(##define-macro (process-error-handler-set! process error-handler)
`(vector-set! ,process 11 ,error-handler))
(##define-macro (process-node process)
`(vector-ref ,process 12))
(##define-macro (process-priority process)
`(vector-ref ,process 13))
(##define-macro (process-priority-set! process priority)
`(vector-set! ,process 13 ,priority))
(##define-macro (process-mailbox-probe-cont process)
`(vector-ref ,process 14))
(##define-macro (process-mailbox-probe-cont-set! process priority)
`(vector-set! ,process 14 ,priority))
(##define-macro (process-abrupt-stack process)
`(vector-ref ,process 15))
(##define-macro (process-abrupt-stack-set! process abrupt-stack)
`(vector-set! ,process 15 ,abrupt-stack))
(##define-macro (process-dict process)
`(vector-ref ,process 16))
(##define-macro (process-dict-set! process dict)
`(vector-set! ,process 16 ,dict))
(##define-macro (process-initial-call process)
`(vector-ref ,process 17))
(##define-macro (process-initial-call-set! process initial-call)
`(vector-set! ,process 17 ,initial-call))
(##define-macro (process-exit-hook process)
`(vector-ref ,process 18))
(##define-macro (process-exit-hook-set! process exit-hook)
`(vector-set! ,process 18 ,exit-hook))
(##define-macro (erl-error_handler)
`(process-error-handler node.current-process))
(##define-macro (erl-push-abrupt! cont)
`(process-abrupt-stack-set!
node.current-process
(cons ,cont
(process-abrupt-stack node.current-process))))
(##define-macro (erl-pop-abrupt!)
`(process-abrupt-stack-set!
node.current-process
(cdr (process-abrupt-stack node.current-process))))
(##define-macro (erl-abrupt-top)
`(car (process-abrupt-stack node.current-process)))
(##define-macro (erl-receive_accept)
`(with-no-interrupts
(lambda ()
(timer-interrupt-disable!)
(queue-extract! (process-mailbox node.current-process)
(process-mailbox-probe node.current-process))
(timer-interrupt-enable!))))
(##define-macro (erl-receive_first timeout thunk)
`(,thunk
(with-no-interrupts
(lambda ()
(process-wake-up-set!
node.current-process
,(if (and (pair? timeout)
(eq? (car timeout) 'quote)
(pair? (cdr timeout))
(eq? (cadr timeout) (string->symbol "infinity")))
#f
`(if-non-neg-fix? ,timeout
(int.+ (current-time-in-msecs) ,timeout)
(let ((timeout ,timeout))
(if (eq? timeout infinity-atom)
#f
(if (and (erl-int? timeout)
(not (int.< timeout 0)))
(int.+ (current-time-in-msecs) timeout)
(erl-exit-badarg)))))))
(continuation-save!
(process-mailbox-probe-cont node.current-process)
(lambda (cont)
(let ((probe (queue-probe (process-mailbox node.current-process))))
(process-mailbox-probe-set! node.current-process probe)
(erl-receive_check probe cont))))
(let ((next (queue-next (process-mailbox-probe node.current-process))))
(if (pair? next)
(car next)
'$timeout))))))
(##define-macro (erl-receive_next)
`(let ((probe (queue-next (process-mailbox-probe node.current-process)))
(cont (process-mailbox-probe-cont node.current-process)))
(process-mailbox-probe-set! node.current-process probe)
(erl-receive_check probe cont)))
(##define-macro (erl-receive_check probe cont)
`(with-no-interrupts
(lambda ()
(if (pair? (queue-next ,probe))
(continuation-restore ,cont 'dummy)
(let ((wake-up (process-wake-up node.current-process)))
(if (or (not wake-up)
(int.< (current-time-in-msecs) wake-up))
(process-suspend-waiting-with-continuation! ,cont)
(continuation-restore ,cont 'dummy)))))))
;;;;;;;
;; PIDs
(##define-macro (make-pid process id node creation)
`(vector 'pid ,process ,id ,node ,creation))
(##define-macro (erl-pid-process pid) `(vector-ref ,pid 1))
(##define-macro (erl-pid-id pid) `(vector-ref ,pid 2))
(##define-macro (erl-pid-node pid) `(vector-ref ,pid 3))
(##define-macro (erl-pid-creation pid) `(vector-ref ,pid 4))
(##define-macro (erl-pid-local? pid) `(eq? (erl-pid-node pid) node.name))
;;;;;;;
;; Refs
(##define-macro (make-ref id node creation)
`(vector 'ref ,id ,node ,creation))
(##define-macro (erl-ref-id ref) `(vector-ref ,ref 1))
(##define-macro (erl-ref-node ref) `(vector-ref ,ref 2))
(##define-macro (erl-ref-creation ref) `(vector-ref ,ref 3))
;;;;;;;;
Ports
(##define-macro (make-port pidx s_res owner packeting binary? linked-pids
in? eof? creation node id opened?)
`(vector 'port ,pidx ,s_res ,owner ,packeting ,binary? ,linked-pids
,in? ,eof? ,creation ,node ,id ,opened?))
(##define-macro (erl-port-pidx port) `(vector-ref ,port 1))
(##define-macro (erl-port-s_res port) `(vector-ref ,port 2))
(##define-macro (erl-port-owner port) `(vector-ref ,port 3))
(##define-macro (erl-port-owner-set! port owner)
`(vector-set! ,port 3 ,owner))
(##define-macro (erl-port-packeting port) `(vector-ref ,port 4))
(##define-macro (erl-port-io_type port) `(vector-ref ,port 5))
(##define-macro (erl-port-linked-pids port) `(vector-ref ,port 6))
(##define-macro (erl-port-linked-pids-set! port linked-pids)
`(vector-set! ,port 6 ,linked-pids))
(##define-macro (erl-port-in? port) `(vector-ref ,port 7))
(##define-macro (erl-port-eof? port) `(vector-ref ,port 8))
(##define-macro (erl-port-creation port) `(vector-ref ,port 9))
(##define-macro (erl-port-node port) `(vector-ref ,port 10))
(##define-macro (erl-port-id port) `(vector-ref ,port 11))
(##define-macro (erl-port-opened? port) `(vector-ref ,port 12))
(##define-macro (erl-port-opened?-set! port opened?)
`(vector-set! ,port 12 ,opened?))
( # # define - macro ( make - port pidx s_res owner packeting binary ? in ? eof ? )
; `(let ((p (vector 'port
; ,pidx ;; pidx
; ,s_res ;; start result
; ,owner ;; owner's PID
; ,packeting ;; packeting size
; ,binary? ;; receive binaries?
; (erl-nil) ;; linked processes
; ,in? ;; may receive input?
, eof ? ; ; the eof flag
; node.creation ;; creation
; node.name ;; node
; node.port-count ;; id
; )))
( set ! node.port - count ( int.+ node.port - count 1 ) )
( vector - set ! port - table idx p )
; p))
;; try to convert ioterm to binary, #f if failure
(##define-macro (erl-ioterm_to_binary x)
`(let ((a ,x))
(if (erl-binary? a)
a
(let ((r (erl-generic-ioterm_to_list a)))
(and r (erl-safe-list_to_binary/1 r))))))
Timer interrupts
(##define-macro (setup-time!) #f)
(##define-macro (advance-time!) #f)
(##define-macro (current-time-in-usecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e6 (time->seconds (current-time))))))))
(##define-macro (current-time-in-msecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e3 (time->seconds (current-time))))))))
(##define-macro (current-cputime-in-msecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e3 (f64vector-ref (##process-statistics) 0)))))))
(##define-macro (timer-interrupt-disable!)
`(set! timer-interrupt-allowed? #f))
(##define-macro (timer-interrupt-enable!)
`(set! timer-interrupt-allowed? #t))
(##define-macro (with-no-interrupts thunk)
`(let ()
(##declare (not interrupts-enabled))
(,thunk)))
(##define-macro (allow-interrupts)
`(##declare (interrupts-enabled)))
(##define-macro (add-timer-interrupt-job job)
`(##interrupt-vector-set! 1 ,job))
(##define-macro (cleanup-timer-interrupt!)
`(##interrupt-vector-set! 1 (lambda () #f)))
;;;;;;;;;;;;;;
FIFO Queues
(##define-macro (make-queue)
`(let ((q (cons '() '())))
(set-car! q q)
q))
(##define-macro (queue-empty? q)
`(let ((q ,q))
(eq? (car q) q)))
(##define-macro (queue-probe q) q)
(##define-macro (queue-next p) `(cdr ,p))
(##define-macro (queue-add-to-tail! q x)
`(let ((q ,q))
(with-no-interrupts
(lambda ()
(let ((cell (cons ,x '())))
(set-cdr! (car q) cell)
(set-car! q cell))))))
(##define-macro (queue-extract! q probe)
`(let ((q ,q) (probe ,probe))
(with-no-interrupts
(lambda ()
(let ((curr (cdr probe)))
(if (eq? curr (car q)) ; last element?
(set-car! q probe))
(set-cdr! probe (cdr curr)))))))
;;;;;;;;;;
;; Signals
;; Assume local for now.
(##define-macro (erl-group-leader-signal! dest_pid new_gl)
`(process-group-leader-set! (erl-pid-process ,dest_pid) ,new_gl))
(##define-macro (erl-link-signal! dest_pid)
`(process-link! (erl-pid-process ,dest_pid) (erl-self/0)))
(##define-macro (erl-unlink-signal! dest_pid)
`(process-unlink! (erl-pid-process ,dest_pid) (erl-self/0)))
(##define-macro (erl-message-signal! dest_pid msg)
`(process-deliver! (erl-pid-process ,dest_pid) ,msg))
(##define-macro (erl-info-signal dest_pid prop)
`(process-get-property (erl-pid-process ,dest_pid) ,prop))
;; This one should check the trap flag
;; also self-signals!!!
(##define-macro (erl-exit-signal! dest_pid reason)
`(process-die! (erl-pid-process ,dest_pid) ,reason))
;------------------------------------------------------------------------------
Added by for ETOS 2.2
(##define-macro (erl-function-unbound? m f a)
(let ((var (string->symbol (string-append m ":" f "/" (number->string a)))))
`(##unbound? (##global-var-ref (##make-global-var ',var)))))
(##define-macro (erl-undefined-function m f)
(define-macro (erl-atom<-string str) `(string->symbol ,str));********kludge
(let ((mod (erl-atom<-string m))
(fun (erl-atom<-string f)))
`(lambda args (erl-undefined-function-handler args ',mod ',fun))))
(##define-macro (erl-function-set! global-var val)
`(##global-var-set! (##make-global-var ',global-var) ,val))
(define-macro (erl-false) `'false)
(define-macro (erl-true) `'true)
(define-macro (erl-impossible-obj1) `#f)
(define-macro (erl-impossible-obj2) `#t)
;(define-macro (erl-atom? x) `(symbol? ,x))
(define-macro (erl-atom<-string str) `(string->symbol ,str))
(define-macro (erl-atom->string atom) `(symbol->string ,atom))
;(define-macro (erl-false) `#f)
;(define-macro (erl-true) `#t)
;(define-macro (erl-impossible-obj1) `'false)
;(define-macro (erl-impossible-obj2) `'true)
;
;(define-macro (erl-atom? x) `(let ((x ,x)) (or (symbol? x) (boolean? x))))
;
;(define-macro (erl-atom<-string str)
` ( let ( ( atom ( string->symbol , ) ) )
; (cond ((eq? atom 'false) #f)
; ((eq? atom 'true) #t)
; (else atom))))
;
;(define-macro (erl-atom->string atom)
; `(let ((atom ,atom))
; (symbol->string
; (cond ((eq? atom #f) 'false)
; ((eq? atom #t) 'true)
; (else atom)))))
;(define-macro (erl-char? x)
` ( let ( ( x , x ) ) ( and ( erl - fix ? x ) ( not ( fix . < x 0 ) ) ( not ( fix . < 65535 x ) ) ) ) )
;
( define - macro ( erl - char<-char c ) ` ( char->integer , c ) ) ; hope for
;(define-macro (erl-char->char c) `(integer->char ,c))
;(define-macro (erl-char? x) `(char? ,x))
(define-macro (erl-char<-char c) c)
(define-macro (erl-char->char c) c)
(define-macro (erl-float<-real n) `(exact->inexact ,n))
(define-macro (erl-float->real n) n)
(define-macro (erl-int<-exact-integer n) n)
(define-macro (erl-int->exact-integer n) n)
(define-macro (erl-true? x) `(not (eq? ,x (erl-false))))
(define-macro (erl-equal? x y) `(equal? ,x ,y))
;(define-macro (erl-nil) `'())
;(define-macro (erl-nil? x) `(null? ,x))
;(define-macro (erl-cons x y) `(cons ,x ,y))
;(define-macro (erl-cons? x) `(pair? ,x))
;(define-macro (erl-hd x) `(car ,x))
;(define-macro (erl-tl x) `(cdr ,x))
( define - macro ( erl - list . elems ) ` ( list , @elems ) )
( define - macro ( erl - append . lists ) ` ( append , ) )
(define-macro (erl-list<-list lst) lst)
;(define-macro (erl-tuple? x)
; `(let ((x ,x)) (and (vector? x) (fix.< 0 (vector-length x)) (eq? (vector-ref x 0) 'tuple))))
( define - macro ( erl - tuple . elems ) ` ( vector ' tuple , @elems ) )
( define - macro ( erl - tuple - size tup ) ` ( fix.u- ( vector - length , tup ) 1 ) )
( define - macro ( erl - tuple - ref tup i ) ` ( vector - ref , tup , i ) )
(define-macro (erl-tuple<-list lst) `(list->vector (cons 'tuple ,lst)))
| null | https://raw.githubusercontent.com/feeley/etos/da9f089c1a7232d97827f8aa4f4b0862b7c5551f/compiler/rt-gambit.scm | scheme | File: "rt-gambit.scm"
We can assume here that (erl-sub? x) is true...
Full type tests
Longer arithmetic macros
(let* ((a ,x) (res (fix.u- a ,y)))
(if (and (fix.< res a) (erl-fix? a))
res
(,general-case a ,y)))
chars
integer arithmetic
floating-point arithmetic
abs/1 (Guard BIF)
apply/3
atom_to_list/1
binary_to_list/1
binary_to_list/3
binary_to_string/1 (PROPOSED BIF)
date/0
element/2 (Guard BIF)
erase/0
erase/1
exit/1
exit/2
float/1 (Guard BIF)
float_to_list/1
get/0
get/1
get_keys/1
group_leader/2
hash/2
hd/1 (Guard BIF)
integer_to_list/1
integer_to_string/1 (PROPOSED BIF)
is_alive/0
is_atom/1 (Recognizer BIF)
is_binary/1 (Recognizer BIF)
is_char/1 (Recognizer BIF)
is_cons/1 (Recognizer BIF)
is_float/1 (Recognizer BIF)
is_integer/1 (Recognizer BIF)
is_list/1 (Recognizer BIF)
is_null/1 (Recognizer BIF)
is_number/1 (Recognizer BIF)
is_pid/1 (Recognizer BIF)
is_port/1 (Recognizer BIF)
is_ref/1 (Recognizer BIF)
is_string/1 (Recognizer BIF)
length/1 (Guard BIF)
list_to_atom/1
list_to_binary/1
list_to_float/1
list_to_integer/1
list_to_string/1 (PROPOSED BIF)
list_to_tuple/1
node/1 (Guard BIF)
now/0
port_close/1
port_info/1
port_info/2
ports/0
process_info/2
process_flag/2
processes/0
registered/0
round/1 (Guard BIF)
setelement/3
sign/1 (Guard BIF)
size/1 (Guard BIF)
split_binary/2
throw/1
time/0
tl/1 (Guard BIF)
trunc/1 (Guard BIF)
tuple_to_list/1
unlink/1
unregister/1
+/1 (Guard BIF)
-/1 (Guard BIF)
bnot/1 (Guard BIF)
+/2 (Guard BIF)
-/2 (Guard BIF)
bor/2 (Guard BIF)
bxor/2 (Guard BIF)
bsl/2 (Guard BIF)
bsr/2 (Guard BIF)
*/2 (Guard BIF)
//2 (Guard BIF)
///2 (Guard BIF)
div/2 (Guard BIF)
mod/2 (Guard BIF)
band/2 (Guard BIF)
++/2
--/2 (Guard BIF) ??
or/2
xor/2
=:=/2
=/=/2
==/2
/=/2
</2
>/2
>=/2
<=/2
Semi BIFS
Cannot be invoked without 'erlang:' prefix
check_process_code/2
delete_module/1
get_cookie/0
halt/0
hash/2
m_acos/1
m_asin/1
m_atan/1
m_atan2/2
m_cos/1
m_cosh/1
m_erf/1
m_erfc/1
m_log/1
m_log10/1
m_pow/2
m_sin/1
m_sinh/1
m_sqrt/1
m_tan/1
m_tanh/1
module_loaded/1
preloaded/0
purge_module/1
set_cookie/2
List comprehension utils
Returns a function definition from its name passed as an atom
will return #f if undefined
Processes
state of process can be: running, ready, waiting, dead
PIDs
Refs
`(let ((p (vector 'port
,pidx ;; pidx
,s_res ;; start result
,owner ;; owner's PID
,packeting ;; packeting size
,binary? ;; receive binaries?
(erl-nil) ;; linked processes
,in? ;; may receive input?
; the eof flag
node.creation ;; creation
node.name ;; node
node.port-count ;; id
)))
p))
try to convert ioterm to binary, #f if failure
last element?
Signals
Assume local for now.
This one should check the trap flag
also self-signals!!!
------------------------------------------------------------------------------
********kludge
(define-macro (erl-atom? x) `(symbol? ,x))
(define-macro (erl-false) `#f)
(define-macro (erl-true) `#t)
(define-macro (erl-impossible-obj1) `'false)
(define-macro (erl-impossible-obj2) `'true)
(define-macro (erl-atom? x) `(let ((x ,x)) (or (symbol? x) (boolean? x))))
(define-macro (erl-atom<-string str)
(cond ((eq? atom 'false) #f)
((eq? atom 'true) #t)
(else atom))))
(define-macro (erl-atom->string atom)
`(let ((atom ,atom))
(symbol->string
(cond ((eq? atom #f) 'false)
((eq? atom #t) 'true)
(else atom)))))
(define-macro (erl-char? x)
hope for
(define-macro (erl-char->char c) `(integer->char ,c))
(define-macro (erl-char? x) `(char? ,x))
(define-macro (erl-nil) `'())
(define-macro (erl-nil? x) `(null? ,x))
(define-macro (erl-cons x y) `(cons ,x ,y))
(define-macro (erl-cons? x) `(pair? ,x))
(define-macro (erl-hd x) `(car ,x))
(define-macro (erl-tl x) `(cdr ,x))
(define-macro (erl-tuple? x)
`(let ((x ,x)) (and (vector? x) (fix.< 0 (vector-length x)) (eq? (vector-ref x 0) 'tuple)))) |
Copyright ( C ) 1999 , , , All Rights Reserved .
RunTime library for EtoS - Gambit version
(declare
(standard-bindings)
(extended-bindings)
(block)
(not safe)
(inlining-limit 900)
( inlining - limit 300 )
)
(##define-macro (gensymbol . rest)
`(string->symbol (string-append "$" (symbol->string (gensym ,@rest)))))
(##define-macro (continuation-save! v proc)
`(let ((v ,v) (proc ,proc))
(continuation-capture
(lambda (cont)
(vector-set! v 0 cont)
(proc v)))))
(##define-macro (continuation-restore v val)
`(continuation-return (vector-ref ,v 0) ,val))
(##define-macro (erl-fix? x) `(##fixnum? ,x))
(##define-macro (erl-sub? x) `(##subtyped? ,x))
(##define-macro (erl-spc? x) `(##special? ,x))
(##define-macro (erl-con? x) `(##pair? ,x))
(##define-macro (erl-big? x) `(##subtyped.bignum? ,x))
(##define-macro (erl-flo? x) `(##subtyped.flonum? ,x))
(##define-macro (erl-ato? x) `(##subtyped.symbol? ,x))
(##define-macro (erl-vec? x) `(##subtyped.vector? ,x))
(##define-macro (erl-chr? x) `(##char? ,x))
(##define-macro (erl-nil? x) `(##null? ,x))
(##define-macro (erl-int? x)
`(let ((a ,x)) (or (erl-fix? a) (and (erl-sub? a) (erl-big? a)))))
(##define-macro (erl-num? x)
`(let ((a ,x)) (or (erl-fix? a)
(and (erl-sub? a) (or (erl-flo? a) (erl-big? a))))))
(##define-macro (big.= x y) `(##bignum.= ,x ,y))
(##define-macro (erl-big=k x k) `(big.= ,x ,k))
(##define-macro (flo.= x y) `(##flonum.= ,x ,y))
(##define-macro (erl-flo=k x k) `(flo.= ,x ,k))
(##define-macro (fix.= x y) `(##fixnum.= ,x ,y))
(##define-macro (erl-fix=k x k) `(fix.= ,x ,k))
(##define-macro (ato.= x y) `(eq? ,x ,y))
(##define-macro (erl-ato=k x k) `(ato.= ,x ,k))
(##define-macro (chr.= x y) `(eq? ,x ,y))
(##define-macro (erl-chr=k x k) `(chr.= ,x ,k))
(##define-macro (num.= x y) `(= ,x ,y))
(##define-macro (fix.< x y) `(##fixnum.< ,x ,y))
(##define-macro (big.< x y) `(##bignum.< ,x ,y))
(##define-macro (flo.< x y) `(##flonum.< ,x ,y))
(##define-macro (num.< x y) `(< ,x ,y))
(##define-macro (fix.u+ x y) `(##fixnum.+ ,x ,y))
(##define-macro (big.+ x y) `(##bignum.+ ,x ,y))
(##define-macro (flo.+ x y) `(##flonum.+ ,x ,y))
(##define-macro (num.+ x y) `(+ ,x ,y))
(##define-macro (fix.u- x y) `(##fixnum.- ,x ,y))
(##define-macro (big.- x y) `(##bignum.- ,x ,y))
(##define-macro (flo.- x y) `(##flonum.- ,x ,y))
(##define-macro (num.- x y) `(- ,x ,y))
(##define-macro (fix.bor x y) `(##fixnum.bitwise-ior ,x ,y))
(##define-macro (fix.bxor x y) `(##fixnum.bitwise-xor ,x ,y))
(##define-macro (fix.band x y) `(##fixnum.bitwise-and ,x ,y))
(##define-macro (fix.bnot x) `(##fixnum.bitwise-not ,x))
(##define-macro (fix.u* x y) `(##fixnum.* ,x ,y))
(##define-macro (big.* x y) `(##bignum.* ,x ,y))
(##define-macro (flo.* x y) `(##flonum.* ,x ,y))
(##define-macro (num.* x y) `(* ,x ,y))
(##define-macro (flo./ x y) `(##flonum./ ,x ,y))
(##define-macro (fix.div x y) `(##fixnum.quotient ,x ,y))
(##define-macro (big.div x y) `(##bignum.quotient ,x ,y))
(##define-macro (fix.rem x y) `(##fixnum.remainder ,x ,y))
(##define-macro (big.rem x y) `(##bignum.remainder ,x ,y))
(##define-macro (fix.mod x y) `(##fixnum.modulo ,x ,y))
(##define-macro (big.mod x y) `(##bignum.modulo ,x ,y))
(##define-macro (fix.even? x) `(##fixnum.even? ,x))
(##define-macro (erl-fixnum? x) `(##fixnum? ,x))
(##define-macro (erl-bignum? x) `(##bignum? ,x))
(##define-macro (erl-flonum? x) `(##flonum? ,x))
(##define-macro (erl-atom? x) `(##symbol? ,x))
(##define-macro (erl-byte? x)
`(let ((a ,x))
(and (erl-fix? a) (fix.< a 256) (fix.< -1 a))))
(##define-macro (erl-boolean? x)
`(let ((a ,x)) (or (erl-ato=k a 'true) (erl-ato=k a 'false))))
(##define-macro (erl-cons? x) `(erl-con? ,x))
(##define-macro (erl-char? x) `(##char? ,x))
(##define-macro (if-fix? x y z) (if (##fixnum? x) y z))
(##define-macro (if-int? x y z) (if (or (##fixnum? x) (##bignum? x)) y z))
(##define-macro (if-zero-fix? x y z) (if (and (##fixnum? x) (= x 0)) y z))
(##define-macro (if-pos-fix? x y z) (if (and (##fixnum? x) (> x 0)) y z))
(##define-macro (if-neg-fix? x y z) (if (and (##fixnum? x) (< x 0)) y z))
(##define-macro (if-non-neg-fix? x y z) (if (and (##fixnum? x) (>= x 0)) y z))
(##define-macro (fixnum-specialized-=:= x y general-case)
`(if-fix? ,x
(fix.= ,x ,y)
(if-fix? ,y
(fix.= ,x ,y)
(let ((a ,x) (b ,y))
(if (or (erl-fix? a) (erl-fix? b))
(fix.= a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-< x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.< ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.< a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.< a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-+ x y general-case)
`(if-zero-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
b
(,general-case ,x b)))
(if-pos-fix? ,x
(let ((b ,y))
(if (and (erl-fix? b) (fix.< b (fix.u+ ,x b)))
(fix.u+ ,x b)
(,general-case ,x b)))
(if-neg-fix? ,x
(let ((b ,y))
(if (and (erl-fix? b) (fix.< (fix.u+ ,x b) b))
(fix.u+ ,x b)
(,general-case ,x b)))
(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u+ a ,y)))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u+ a ,y) a))
(fix.u+ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a)
(erl-fix? b)
(or (fix.< (fix.bxor a b) 0)
(not (fix.< (fix.bxor (fix.u+ a b) b) 0))))
(fix.u+ a b)
(,general-case a b))))))))))
(##define-macro (fixnum-specialized-- x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u- a ,y) a))
(fix.u- a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u- a ,y)))
(fix.u- a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a)
(erl-fix? b)
(or (not (fix.< (fix.bxor a b) 0))
(fix.< (fix.bxor (fix.u- a b) b) 0)))
(fix.u- a b)
(,general-case a b)))))))
(##define-macro (fixnum-specialized-* x y general-case)
`(,general-case ,x ,y))
(##define-macro (fixnum-specialized-div x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.div a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.div a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-mod x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.mod a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.mod a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-rem x y general-case)
`(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.rem a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.rem a b)
(,general-case ,x ,y)))))
(##define-macro (fixnum-specialized-bor x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.bor ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.bor a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.bor a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-bxor x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.bxor ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.bxor a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.bxor a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-band x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b)
(fix.band ,x b)
(,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
(fix.band a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-fix? a) (erl-fix? b))
(fix.band a b)
(,general-case a b))))))
(##define-macro (fixnum-specialized-bsl x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(,general-case ,x ,y)))
(##define-macro (fixnum-specialized-bsr x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(,general-case ,x ,y)))
(##define-macro (number-specialized-=:= x y general-case)
`(if-fix? ,x
(eq? ,x ,y)
(if-fix? ,y
(eq? ,x ,y)
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.= ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.= a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((or (erl-fix? a) (erl-fix? b))
(eq? a b))
((and (erl-flonum? a) (erl-flonum? b))
(flo.= a b))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-== x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.= ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.= a ,y) (,general-case a ,y)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.= ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.= a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (erl-fix? b) (fix.= a b) (,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b) (flo.= a b) (,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-< x y general-case)
`(if-fix? ,x
(let ((b ,y))
(if (erl-fix? b) (fix.< ,x b) (,general-case ,x b)))
(if-fix? ,y
(let ((a ,x))
(if (erl-fix? a) (fix.< a ,y) (,general-case a ,y)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b) (flo.< ,x b) (,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a) (flo.< a ,y) (,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (erl-fix? b) (fix.< a b) (,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b) (flo.< a b) (,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-+ x y general-case)
`(if-zero-fix? ,x
(let ((b ,y))
(if (num? b)
b
(,general-case ,x b)))
(if-pos-fix? ,x
(let* ((b ,y) (res (fix.u+ ,x b)))
(if (and (erl-fix? b) (fix.< b res))
res
(,general-case ,x b)))
(if-neg-fix? ,x
(let* ((b ,y) (res (fix.u+ ,x b)))
(if (and (erl-fix? b) (fix.< res b))
res
(,general-case ,x b)))
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b)
(flo.+ ,x b)
(,general-case ,x b)))
(if-zero-fix? ,y
(let ((a ,x))
(if (num? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< a (fix.u+ a ,y)))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u+ a ,y) a))
(fix.u+ a ,y)
(,general-case a ,y)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.+ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(if (and (erl-fix? b)
(or (fix.< (fix.bxor a b) 0)
(not (fix.< (fix.bxor (fix.u+ a b) b)
0))))
(fix.u+ a b)
(,general-case a b)))
((erl-flonum? a)
(if (erl-flonum? b)
(flo.+ a b)
(,general-case a b)))
(else
(,general-case a b)))))))))))))
(##define-macro (number-specialized-- x y general-case)
`(if-zero-fix? ,y
(let ((a ,x))
(if (erl-fix? a)
a
(,general-case a ,y)))
(if-pos-fix? ,y
(let ((a ,x))
(if (and (erl-fix? a) (fix.< (fix.u- a ,y) a))
(fix.u- a ,y)
(,general-case a ,y)))
(if-neg-fix? ,y
(let* ((a ,x) (res (fix.u- a ,y)))
(if (and (fix.< a res) (erl-fix? a))
res
(,general-case a ,y)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.- a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(cond ((erl-fix? a)
(let ((res (fix.u- a b)))
(if (and (or (not (fix.< (fix.bxor a b) 0))
(fix.< (fix.bxor res b) 0))
(erl-fix? b))
res
(,general-case a b))))
((erl-flonum? a)
(if (erl-flonum? a)
(flo.- a b)
(,general-case a b)))
(else
(,general-case a b)))))))))
(##define-macro (number-specialized-* x y general-case)
`(if-int? ,x
(,general-case ,x ,y)
(if-int? ,y
(,general-case ,x ,y)
(if-float? ,x
(let ((b ,y))
(if (erl-flonum? b)
(flo.* ,x b)
(,general-case ,x b)))
(if-float? ,y
(let ((a ,x))
(if (erl-flonum? a)
(flo.* a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-sub? a) (erl-sub? b) (erl-flo? a) (erl-flo? b))
(flo.* a b)
(,general-case a b))))))))
(##define-macro (number-specialized-/ x y general-case)
`(if-non-zero-float? ,y
(let ((a ,x))
(if (and (erl-sub? a) (erl-flo? a))
(flo./ a ,y)
(,general-case a ,y)))
(let ((a ,x) (b ,y))
(if (and (erl-sub? a) (erl-sub? b) (erl-flo? a) (erl-flo? b) (not (flo.= b 0.0)))
(flo./ a b)
(,general-case a b)))))
(##define-macro (number-specialized-abs x general-case)
`(if-non-neg-fix? ,x
,x
(if-float? ,x
(flo.abs ,x)
(let ((a ,x))
(if (erl-flonum? a)
(flo.abs a)
(,general-case a))))))
(##define-macro (number-specialized-float x general-case)
`(if-int? ,x
(exact->inexact ,x)
(if-float? ,x
,x
(let ((a ,x))
(cond
((erl-fix? a) (exact->inexact a))
((erl-flonum? a) a)
(else (,general-case a)))))))
(##define-macro (number-specialized-round x general-case)
`(if-int? ,x
,x
(if-float? ,x
(inexact->exact (flo.round ,x))
(let ((a ,x))
(cond
((erl-flonum? a) (inexact->exact (flo.round a)))
((erl-fix? a) a)
(else (,general-case a)))))))
(##define-macro (number-specialized-sign x general-case)
`(if-non-neg-num? ,x
0
(if-neg-num? ,x
1
(let ((a ,x))
(cond
((erl-fix? a) (if (fix.< a 0) 1 0))
((erl-flonum? a) (if (flo.< a 0.0) 1 0))
(else (,general-case a)))))))
(##define-macro (number-specialized-trunc x general-case)
`(if-int? ,x
,x
(if-float? ,x
(inexact->exact (flo.trunc ,x))
(let ((a ,x))
(cond
((erl-flonum? a) (inexact->exact (flo.trunc a)))
((erl-fix? a) a)
(else (,general-case a)))))))
(##define-macro (chr.->integer x) `(char->integer ,x))
(##define-macro (int.= x y) `(fixnum-specialized-=:= ,x ,y =))
(##define-macro (int.< x y) `(fixnum-specialized-< ,x ,y <))
(##define-macro (int.+ x y) `(fixnum-specialized-+ ,x ,y +))
(##define-macro (int.- x y) `(fixnum-specialized-- ,x ,y -))
(##define-macro (int.* x y) `(fixnum-specialized-* ,x ,y *))
(##define-macro (int.div x y) `(fixnum-specialized-div ,x ,y quotient))
(##define-macro (int.rem x y) `(fixnum-specialized-rem ,x ,y remainder))
(##define-macro (int.even? x) `(even? ,x))
(##define-macro (if-float? x y z) (if (##flonum? x) y z))
(##define-macro (if-non-zero-float? x y z)
(if (and (##flonum? x) (not (= x 0.0))) y z))
(##define-macro (if-non-neg-num? x y z)
(if (and (number? x) (>= x 0)) y z))
(##define-macro (if-neg-num? x y z)
(if (and (number? x) (< x 0)) y z))
(##define-macro (flo.abs x) `(##flonum.abs ,x))
(##define-macro (flo.acos x) `(##flonum.acos ,x))
(##define-macro (flo.acosh x)
`(let ((a ,x))
(flo.log (flo.+ a (flo.sqrt (flo.- (flo.* a a) 1.))))))
(##define-macro (flo.asin x) `(##flonum.asin ,x))
(##define-macro (flo.asinh x)
`(let ((a ,x))
(flo.log (flo.+ a (flo.sqrt (flo.+ (flo.* a a) 1.))))))
(##define-macro (flo.atan x) `(##flonum.atan ,x))
(##define-macro (flo.atan2 x y) `(##flonum.atan ,x ,y))
(##define-macro (flo.atanh x)
`(let ((a ,x))
(flo.* .5 (flo.log (flo./ (flo.+ 1. a) (flo.- 1. a))))))
(##define-macro (flo.cos x) `(##flonum.cos ,x))
(##define-macro (flo.cosh x)
`(let ((a ,x))
(flo./ (flo.- (flo.exp a) (flo.exp (flo.- 0. a))) 2.)))
(##define-macro (flo.erf x) `'not_implemented_yet)
` ( flo.- 1 . ( flo.erf , x ) ) )
(##define-macro (flo.exp x) `(##flonum.exp ,x))
(##define-macro (flo.log x) `(##flonum.log ,x))
(##define-macro (flo.log10 x) `(flo./ (flo.log ,x) ,(##flonum.log 10.)))
(##define-macro (flo.pow x y) `(flo.exp (flo.* ,y (flo.log ,x))))
(##define-macro (flo.round x) `(##flonum.round ,x))
(##define-macro (flo.sin x) `(##flonum.sin ,x))
(##define-macro (flo.sinh x)
`(let ((a ,x))
(flo./ (flo.+ (flo.exp a) (flo.exp (flo.- 0. a))) 2.)))
(##define-macro (flo.sqrt x) `(##flonum.sqrt ,x))
(##define-macro (flo.tan x) `(##flonum.tan ,x))
(##define-macro (flo.tanh x)
`(let ((a x))
(let ((ea (flo.exp a)) (e-a (flo.exp (flo.- 0. a))))
`(flo./ (flo.+ ea e-a) (flo.- ea e-a)))))
(##define-macro (flo.trunc x) `(##flonum.truncate ,x))
(##define-macro (erl-nil) ''())
(##define-macro (erl-cons x y) `(cons ,x ,y))
(##define-macro (erl-hd x) `(car ,x))
(##define-macro (erl-tl x) `(cdr ,x))
(##define-macro (erl-list . elems) `(list ,@elems))
(##define-macro (erl-append . lists) `(append ,@lists))
(##define-macro (erl-tuple . elems) `(vector 'tuple ,@elems))
(##define-macro (erl-tuple-size x) `(fix.u- (erl-vector-length ,x) 1))
(##define-macro (erl-tuple-ref x i) `(##vector-ref ,x ,i))
(##define-macro (erl-vector . elems) `(vector ,@elems))
(##define-macro (erl-vector-length v) `(##vector-length ,v))
(##define-macro (erl-vector-ref v i) `(##vector-ref ,v ,i))
(##define-macro (erl-vector-set! v i k) `(##vector-set! ,v ,i ,k))
(##define-macro (erl-make-vector n) `(make-vector ,n))
(##define-macro (erl-function arit lam) `(vector 'function ,arit ,lam))
(##define-macro (erl-function-arity f) `(vector-ref ,f 1))
(##define-macro (erl-function-lambda f) `(vector-ref ,f 2))
(##define-macro (erl-make-binary u8 off siz)
`(vector 'binary ,u8 ,off ,siz))
(##define-macro (erl-u8vector->binary u8)
`(let ((a ,u8))
(erl-make-binary a 0 (u8vector-length a))))
(##define-macro (erl-binary-u8vector x) `(vector-ref ,x 1))
(##define-macro (erl-binary-offset x) `(vector-ref ,x 2))
(##define-macro (erl-binary-size x) `(vector-ref ,x 3))
(##define-macro (erl-vector? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a))))
(##define-macro (erl-tuple? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'tuple))))
(##define-macro (erl-pid? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'pid))))
(##define-macro (erl-port? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'port))))
(##define-macro (erl-ref? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'ref))))
(##define-macro (erl-binary? x)
`(let ((a ,x))
(and (erl-sub? a) (erl-vec? a) (erl-ato=k (erl-vector-ref a 0) 'binary))))
(##define-macro (erl-function? x)
`(let ((a ,x))
(and (erl-sub? a)
(erl-vec? a)
(erl-ato=k (erl-vector-ref a 0) 'function))))
BIFS
(##define-macro (erl-tst-abs/1 x)
`(number-specialized-abs ,x erl-generic-tst-abs/1))
(##define-macro (erl-safe-abs/1 x)
`(number-specialized-abs ,x abs))
(##define-macro (erl-abs/1 x)
`(number-specialized-abs ,x erl-generic-abs/1))
(##define-macro (erl-apply/3 x y z) `(erlang:apply/3 ,x ,y ,z))
(##define-macro (erl-atom_to_list/1 x) `(erlang:atom_to_list/1 ,x))
atom_to_string/1 ( PROPOSED BIF )
(##define-macro (erl-atom_to_string/1 x) `(erlang:atom_to_string/1 ,x))
(##define-macro (erl-binary_to_list/1 x) `(erlang:binary_to_list/1 ,x))
(##define-macro (erl-binary_to_list/3 x y z)
`(erlang:binary_to_list/3 ,x ,y ,z))
(##define-macro (erl-binary_to_string/1 x) `(erlang:binary_to_string/1 ,x))
char_to_integer/1 ( PROPOSED BIF )
(##define-macro (erl-char_to_integer/1 x) `(erlang:char_to_integer/1 ,x))
concat_binary/1
(##define-macro (erl-concat_binary/1 x) `(erlang:concat_binary/1 ,x))
(##define-macro (erl-date/0) `(erlang:date/0))
(##define-macro (erl-tst-element/2 x y) `(erl-generic-tst-element/2 ,x ,y))
(##define-macro (erl-element/2 x y)
`(let ((x ,x) (y ,y))
(if (and (erl-fixnum? x)
(erl-tuple? y))
(if (and (fix.< 0 x)
(fix.< x (erl-vector-length y)))
(erl-vector-ref y x)
(erl-exit-badindex))
(erl-exit-badarg))))
(##define-macro (erl-erase/0) `(erlang:erase/0))
(##define-macro (erl-erase/1 x) `(erlang:erase/1 x))
(##define-macro (erl-exit/1 x) `(erlang:exit/1 ,x))
(##define-macro (erl-exit/2 x y) `(erlang:exit/2 ,x ,y))
(##define-macro (erl-tst-float/1 x)
`(number-specialized-float ,x erl-generic-tst-float/1))
(##define-macro (erl-safe-float/1 x) `(exact->inexact ,x))
(##define-macro (erl-float/1 x)
`(number-specialized-float ,x erl-generic-float/1))
(##define-macro (erl-float_to_list/1 x)
`(erlang:float_to_list/1 ,x))
(##define-macro (erl-get/0) `(erlang:get/0))
(##define-macro (erl-get/1 x) `(erlang:get/1 ,x))
(##define-macro (erl-get_keys/1 x) `(erlang:get_keys/1 x))
group_leader/0
(##define-macro (erl-group_leader/0)
`(process-group-leader node.current-process))
(##define-macro (erl-group_leader/2 x y) `(erlang:group_leader/2 ,x ,y))
(##define-macro (erl-hash/2 x y) `(erlang:hash/2 ,x ,y))
(##define-macro (erl-hd/1 x) `(erl-generic-hd/1 ,x))
integer_to_char/1 ( PROPOSED BIF )
(##define-macro (erl-integer_to_char/1 x)
`(erlang:integer_to_char/1 ,x))
(##define-macro (erl-integer_to_list/1 x)
`(erlang:integer_to_list/1 ,x))
(##define-macro (erl-integer_to_string/1 x)
`(erlang:integer_to_string/1 ,x))
(##define-macro (erl-is_alive/0) 'node.communicating)
(##define-macro (erl-is_atom/1 x) `(if (erl-atom? ,x) 'true 'false))
(##define-macro (erl-is_binary/1 x) `(if (erl-binary? ,x) 'true 'false))
(##define-macro (erl-is_char/1 x) `(if (erl-chr? ,x) 'true 'false))
is_compound/1 ( Recognizer BIF )
(##define-macro (erl-is_compound/1 x)
`(let ((a ,x))
(if (or (erl-nil? a) (erl-con? a) (erl-tuple? a))
'true
'false)))
(##define-macro (erl-is_cons/1 x) `(if (erl-con? ,x) 'true 'false))
(##define-macro (erl-is_float/1 x) `(if (erl-flonum? ,x) 'true 'false))
is_function/1 ( Recognizer BIF )
(##define-macro (erl-is_function/1 x) `(if (erl-function? ,x) 'true 'false))
(##define-macro (erl-is_integer/1 x) `(if (erl-int? ,x) 'true 'false))
(##define-macro (erl-is_list/1 x) `(if (erl-lst? ,x) 'true 'false))
(##define-macro (erl-is_null/1 x) `(if (erl-nil? ,x) 'true 'false))
(##define-macro (erl-is_number/1 x) `(if (erl-num? ,x) 'true 'false))
(##define-macro (erl-is_pid/1 x) `(if (erl-pid? ,x) 'true 'false))
(##define-macro (erl-is_port/1 x) `(if (erl-port? ,x) 'true 'false))
(##define-macro (erl-is_ref/1 x) `(if (erl-ref? ,x) 'true 'false))
(##define-macro (erl-is_string/1 x) `(if (erl-str? ,x) 'true 'false))
( Recognizer BIF )
(##define-macro (erl-is_tuple/1 x) `(if (erl-tuple? ,x) 'true 'false))
(##define-macro (erl-tst-length/1 x) `(erl-generic-tst-length/1 ,x))
(##define-macro (erl-safe-length/1 x) `(length ,x))
(##define-macro (erl-length/1 x) `(erlang:length/1 ,x))
link/1
(##define-macro (erl-link/1 x) `(erlang:link/1 ,x))
(##define-macro (erl-list_to_atom/1 x) `(erlang:list_to_atom/1 ,x))
(##define-macro (erl-safe-list_to_binary/1 x)
`(erl-u8vector->binary (list->u8vector ,x)))
(##define-macro (erl-list_to_binary/1 x) `(erlang:list_to_binary/1 ,x))
(##define-macro (erl-list_to_float/1 x) `(erlang:list_to_float/1 ,x))
(##define-macro (erl-list_to_integer/1 x) `(erlang:list_to_integer/1 ,x))
(##define-macro (erl-safe-list_to_string/1 x)
`(map integer->char ,x))
(##define-macro (erl-list_to_string/1 x) `(erlang:list_to_string/1 ,x))
(##define-macro (erl-list_to_tuple/1 x) `(erlang:list_to_tuple/1 ,x))
make_ref/0
(##define-macro (erl-make_ref/0 x) `(erlang:make_ref/0 ,x))
node/0 ( Guard BIF )
(##define-macro (erl-node/0)
`(process-node node.current-process))
(##define-macro (erl-tst-node/1 x) `(erl-generic-tst-node/1 ,x))
(##define-macro (erl-safe-node/1 x) `(erl-generic-safe-node/1 ,x))
(##define-macro (erl-node/1 x) `(erlang:node/1 ,x))
(##define-macro (erl-now/0)
`(let* ((us (current-time-in-usecs))
(s (quotient us 1000000)))
(erl-tuple (quotient s 1000000)
(modulo s 1000000)
(modulo us 1000000))))
open_port/2
(##define-macro (erl-open_port/2 x y) `(erlang:open_port/2 ,x ,y))
(##define-macro (erl-port_close/1 x) `(erl-generic-port_close/1 ,x))
(##define-macro (erl-port_info/1 x) `(erlang:port_info/1 ,x))
(##define-macro (erl-port_info/2 x y) `(erlang:port_info/2 ,x ,y))
(##define-macro (erl-ports/0) `(erlang:ports/0))
(##define-macro (erl-process_info/2 x y) `(erlang:process_info/2 ,x ,y))
(##define-macro (erl-process_flag/2 x y) `(erlang:process_flag/2 ,x ,y))
(##define-macro (erl-processes/0) `(erlang:processes/0))
(##define-macro (erl-put/2 x y) `(erlang:put/2 ,x ,y))
register/2
(##define-macro (erl-register/2 x y) `(erlang:register/2 ,x ,y))
(##define-macro (erl-registered/0) `(erlang:registered/0))
(##define-macro (erl-tst-round/1 x)
`(number-specialized-round ,x erl-generic-tst-round/1))
(##define-macro (erl-safe-round/1 x)
`(number-specialized-round ,x round))
(##define-macro (erl-round/1 x)
`(number-specialized-round ,x erl-generic-round/1))
self/0 ( Guard BIF )
(##define-macro (erl-self/0) `(process-pid node.current-process))
(##define-macro (erl-setelement/3 x y z) `(erlang:setelement/3 ,x ,y ,z))
(##define-macro (erl-tst-sign/1 x)
`(number-specialized-sign ,x erl-generic-tst-sign/1))
(##define-macro (erl-safe-sign/1 x)
`(number-specialized-sign ,x erl-generic-safe-sign/1))
(##define-macro (erl-sign/1 x)
`(number-specialized-sign ,x erl-generic-sign/1))
(##define-macro (erl-tst-size/1 x) `(erl-generic-tst-size/1 ,x))
(##define-macro (erl-safe-size/1 x) `(erl-generic-safe-size/1 ,x))
(##define-macro (erl-size/1 x) `(erlang:size/1 ,x))
spawn/3
(##define-macro (erl-spawn/3 x y z) `(erlang:spawn/3 ,x ,y ,z))
spawn_link/3
(##define-macro (erl-spawn_link/3 x y z) `(erlang:spawn_link/3 ,x ,y ,z))
(##define-macro (erl-split_binary/2 x y) `(erlang:split_binary/2 ,x ,y))
statistics/1
(##define-macro (erl-statistics/1 x) `(erlang:statistics/1 ,x))
string_to_list/1 ( PROPOSED BIF )
(##define-macro (erl-string_to_list/1 x) `(erlang:string_to_list/1 ,x))
(##define-macro (erl-throw/1 x) `(erlang:throw/1 ,x))
(##define-macro (erl-time/0) `(erlang:time/0))
(##define-macro (erl-tl/1 x) `(erl-generic-tl/1 ,x))
(##define-macro (erl-tst-trunc/1 x)
`(number-specialized-trunc ,x erl-generic-tst-trunc/1))
(##define-macro (erl-safe-trunc/1 x)
`(number-specialized-trunc ,x truncate))
(##define-macro (erl-trunc/1 x)
`(number-specialized-trunc ,x erl-generic-trunc/1))
(##define-macro (erl-tuple_to_list/1 x) `(erlang:tuple_to_list/1 ,x))
(##define-macro (erl-unlink/1 x) `(erlang:unlink/1 ,x))
(##define-macro (erl-unregister/1 x) `(erlang:unregister/1 ,x))
whereis/1
(##define-macro (erl-whereis/1 x) `(erlang:whereis/1 ,x))
(##define-macro (erl-=:= x y)
`(number-specialized-=:= ,x ,y erl-generic-=:=))
(##define-macro (erl-== x y)
`(number-specialized-== ,x ,y erl-generic-==))
(##define-macro (erl-< x y)
`(number-specialized-< ,x ,y erl-generic-<))
Operators ' BIFs '
(##define-macro (erl-tst-+/1 x)
`(number-specialized-+ 0 ,x erl-generic-tst-+/2))
(##define-macro (erl-safe-+/1 x)
`(number-specialized-+ 0 ,x +))
(##define-macro (erl-+/1 x)
`(number-specialized-+ 0 ,x erl-generic-+/2))
(##define-macro (erl-tst--/1 x)
`(number-specialized-- 0 ,x erl-generic-tst--/2))
(##define-macro (erl-safe--/1 x)
`(number-specialized-- 0 ,x -))
(##define-macro (erl--/1 x)
`(number-specialized-- 0 ,x erl-generic--/2))
(##define-macro (erl-tst-bnot/1 x)
`(fixnum-specialized-- -1 ,x erl-generic-tst--/2))
(##define-macro (erl-safe-bnot/1 x)
`(fixnum-specialized-- -1 ,x -))
(##define-macro (erl-bnot/1 x)
`(fixnum-specialized-- -1 ,x erl-generic--/2))
not/1 ( Guard BIF )
(##define-macro (erl-tst-not/1 x) `(erl-generic-tst-not/1 ,x))
(##define-macro (erl-safe-not/1 x) `(erl-generic-safe-not/1 ,x))
(##define-macro (erl-not/1 x) `(erl-generic-not/1 ,x))
(##define-macro (erl-tst-+/2 x y)
`(number-specialized-+ ,x ,y erl-generic-tst-+/2))
(##define-macro (erl-safe-+/2 x y)
`(number-specialized-+ ,x ,y +))
(##define-macro (erl-+/2 x y)
`(number-specialized-+ ,x ,y erl-generic-+/2))
(##define-macro (erl-tst--/2 x y)
`(number-specialized-- ,x ,y erl-generic-tst--/2))
(##define-macro (erl-safe--/2 x y)
`(number-specialized-- ,x ,y -))
(##define-macro (erl--/2 x y)
`(number-specialized-- ,x ,y erl-generic--/2))
(##define-macro (erl-tst-bor/2 x y)
`(fixnum-specialized-bor ,x ,y erl-generic-tst-bor/2))
(##define-macro (erl-safe-bor/2 x y)
`(fixnum-specialized-bor ,x ,y int.bor))
(##define-macro (erl-bor/2 x y)
`(fixnum-specialized-bor ,x ,y erl-generic-bor/2))
(##define-macro (erl-tst-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y erl-generic-tst-bxor/2))
(##define-macro (erl-safe-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y int.bxor))
(##define-macro (erl-bxor/2 x y)
`(fixnum-specialized-bxor ,x ,y erl-generic-bxor/2))
(##define-macro (erl-tst-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y erl-generic-tst-bsl/2))
(##define-macro (erl-safe-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y int.bsl))
(##define-macro (erl-bsl/2 x y)
`(fixnum-specialized-bsl ,x ,y erl-generic-bsl/2))
(##define-macro (erl-tst-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y erl-generic-tst-bsr/2))
(##define-macro (erl-safe-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y int.bsl))
(##define-macro (erl-bsr/2 x y)
`(fixnum-specialized-bsr ,x ,y erl-generic-bsr/2))
(##define-macro (erl-tst-*/2 x y)
`(number-specialized-* ,x ,y erl-generic-tst-*/2))
(##define-macro (erl-safe-*/2 x y)
`(number-specialized-* ,x ,y *))
(##define-macro (erl-*/2 x y)
`(number-specialized-* ,x ,y erl-generic-*/2))
(##define-macro (erl-tst-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-tst-//2))
(##define-macro (erl-safe-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-safe-//2))
(##define-macro (erl-//2 x y)
`(number-specialized-/ ,x ,y erl-generic-//2))
(##define-macro (erl-tst-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-tst-///2))
(##define-macro (erl-safe-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-safe-///2))
(##define-macro (erl-///2 x y)
`(fixnum-specialized-// ,x ,y erl-generic-///2))
(##define-macro (erl-tst-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-tst-div/2))
(##define-macro (erl-safe-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-safe-div/2))
(##define-macro (erl-div/2 x y)
`(fixnum-specialized-div ,x ,y erl-generic-div/2))
(##define-macro (erl-tst-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-tst-mod/2))
(##define-macro (erl-safe-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-safe-mod/2))
(##define-macro (erl-mod/2 x y)
`(fixnum-specialized-mod ,x ,y erl-generic-mod/2))
rem/2 ( Guard BIF )
(##define-macro (erl-tst-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-tst-rem/2))
(##define-macro (erl-safe-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-safe-rem/2))
(##define-macro (erl-rem/2 x y)
`(fixnum-specialized-rem ,x ,y erl-generic-rem/2))
(##define-macro (erl-tst-band/2 x y)
`(fixnum-specialized-band ,x ,y erl-generic-tst-band/2))
(##define-macro (erl-safe-band/2 x y)
`(fixnum-specialized-band ,x ,y int.band))
(##define-macro (erl-band/2 x y)
`(fixnum-specialized-band ,x ,y erl-generic-band/2))
(##define-macro (erl-safe-++/2 x y) `(erl-generic-safe-++/2 ,x ,y))
(##define-macro (erl-++/2 x y) `(erl-generic-++/2 ,x ,y))
(##define-macro (erl---/2 x y) `(erl-generic---/2 ,x ,y))
(##define-macro (erl-or/2 x y) `(erl-generic-or/2 ,x ,y))
(##define-macro (erl-xor/2 x y) `(erl-generic-xor/2 ,x ,y))
(##define-macro (erl-and/2 x y) `(erl-generic-and/2 ,x ,y))
Comparison ' BIFs '
(##define-macro (erl-=:=/2 x y)
`(number-specialized-=:= ,x ,y erl-generic-=:=))
(##define-macro (erl-=/=/2 x y) `(not (erl-=:=/2 ,x ,y)))
(##define-macro (erl-==/2 x y)
`(number-specialized-== ,x ,y erl-generic-==))
(##define-macro (erl-/=/2 x y) `(not (erl-==/2 ,x ,y)))
(##define-macro (erl-</2 x y)
`(number-specialized-< ,x ,y erl-generic-<))
(##define-macro (erl->/2 x y) `(erl-</2 ,y ,x))
(##define-macro (erl->=/2 x y) `(not (erl-</2 ,x ,y)))
(##define-macro (erl-<=/2 x y) `(not (erl-</2 ,y ,x)))
(##define-macro (erl-send/2 x y) `(erl-generic-send/2 ,x ,y))
(##define-macro (erl-check_process_code/2 x y)
`(erlang:check_process_code/2 ,x ,y))
(##define-macro (erl-delete_module/1 x)
`(erlang:delete_module/1 ,x))
etos_rootdir/0 ( ETOS SPECIFIC BIF )
(##define-macro (erl-etos_rootdir/0) `(erlang:etos_rootdir/0))
(##define-macro (erl-get_cookie/0) 'node.magic_cookie)
(##define-macro (erl-halt/0) `(erlang:halt/0))
(##define-macro (erl-hash/2 x y) `(erlang:hash/2 ,x ,y))
load_module/2 ( ETOS SPECIFIC VERSION )
(##define-macro (erl-load_module/2 x y) `(erlang:load_module/2 ,x ,y))
m_acosh/1
m_asinh/1
m_atanh/1
m_exp/1
(##define-macro (erl-module_loaded/1 x) `(erlang:module_loaded/1 ,x))
(##define-macro (erl-preloaded/0) `(erlang:preloaded/0))
(##define-macro (erl-purge_module/1 x) `(erlang:purge_module/1 ,x))
(##define-macro (erl-set_cookie/2 x y) `(erlang:set_cookie/2 ,x ,y))
(##define-macro (erl-map f l) `(map ,f ,l))
Erlang function definition
(##define-macro (erl-function-ref x)
(if (and (pair? x)
(eq? (car x) 'quote)
(pair? (cdr x))
(null? (cddr x)))
`(and (not (##unbound? ,(cadr x)))
,(cadr x))
`(erl-generic-function-ref ,x)))
(##define-macro (make-process
state
continuation
mailbox
mailbox-probe
wake-up
trap-exit
pid
linked-pids
group-leader
error-handler
node
priority
mailbox-probe-cont
abrupt-stack
dict
initial-call
exit-hook)
`(let ((process
(vector #f
#f
,state
,continuation
,mailbox
,mailbox-probe
,wake-up
,trap-exit
,pid
,linked-pids
,group-leader
,error-handler
,node
,priority
,mailbox-probe-cont
,abrupt-stack
,dict
,initial-call
,exit-hook
)))
(process-succ-set! process process)
(process-prev-set! process process)
process))
(##define-macro (process-succ process)
`(vector-ref ,process 0))
(##define-macro (process-succ-set! process succ)
`(vector-set! ,process 0 ,succ))
(##define-macro (process-prev process)
`(vector-ref ,process 1))
(##define-macro (process-prev-set! process prev)
`(vector-set! ,process 1 ,prev))
(##define-macro (process-state process)
`(vector-ref ,process 2))
(##define-macro (process-state-set! process state)
`(vector-set! ,process 2 ,state))
(##define-macro (process-continuation process)
`(vector-ref ,process 3))
(##define-macro (process-continuation-set! process cont)
`(vector-set! ,process 3 ,cont))
(##define-macro (process-mailbox process)
`(vector-ref ,process 4))
(##define-macro (process-mailbox-probe process)
`(vector-ref ,process 5))
(##define-macro (process-mailbox-probe-set! process probe)
`(vector-set! ,process 5 ,probe))
(##define-macro (process-wake-up process)
`(vector-ref ,process 6))
(##define-macro (process-wake-up-set! process cont)
`(vector-set! ,process 6 ,cont))
(##define-macro (process-trap-exit process)
`(vector-ref ,process 7))
(##define-macro (process-trap-exit-set! process cont)
`(vector-set! ,process 7 ,cont))
(##define-macro (process-pid process)
`(vector-ref ,process 8))
(##define-macro (process-pid-set! process pid)
`(vector-set! ,process 8 ,pid))
(##define-macro (process-linked-pids process)
`(vector-ref ,process 9))
(##define-macro (process-linked-pids-set! process pids)
`(vector-set! ,process 9 ,pids))
(##define-macro (process-group-leader process)
`(vector-ref ,process 10))
(##define-macro (process-group-leader-set! process group-leader)
`(vector-set! ,process 10 ,group-leader))
(##define-macro (process-error-handler process)
`(vector-ref ,process 11))
(##define-macro (process-error-handler-set! process error-handler)
`(vector-set! ,process 11 ,error-handler))
(##define-macro (process-node process)
`(vector-ref ,process 12))
(##define-macro (process-priority process)
`(vector-ref ,process 13))
(##define-macro (process-priority-set! process priority)
`(vector-set! ,process 13 ,priority))
(##define-macro (process-mailbox-probe-cont process)
`(vector-ref ,process 14))
(##define-macro (process-mailbox-probe-cont-set! process priority)
`(vector-set! ,process 14 ,priority))
(##define-macro (process-abrupt-stack process)
`(vector-ref ,process 15))
(##define-macro (process-abrupt-stack-set! process abrupt-stack)
`(vector-set! ,process 15 ,abrupt-stack))
(##define-macro (process-dict process)
`(vector-ref ,process 16))
(##define-macro (process-dict-set! process dict)
`(vector-set! ,process 16 ,dict))
(##define-macro (process-initial-call process)
`(vector-ref ,process 17))
(##define-macro (process-initial-call-set! process initial-call)
`(vector-set! ,process 17 ,initial-call))
(##define-macro (process-exit-hook process)
`(vector-ref ,process 18))
(##define-macro (process-exit-hook-set! process exit-hook)
`(vector-set! ,process 18 ,exit-hook))
(##define-macro (erl-error_handler)
`(process-error-handler node.current-process))
(##define-macro (erl-push-abrupt! cont)
`(process-abrupt-stack-set!
node.current-process
(cons ,cont
(process-abrupt-stack node.current-process))))
(##define-macro (erl-pop-abrupt!)
`(process-abrupt-stack-set!
node.current-process
(cdr (process-abrupt-stack node.current-process))))
(##define-macro (erl-abrupt-top)
`(car (process-abrupt-stack node.current-process)))
(##define-macro (erl-receive_accept)
`(with-no-interrupts
(lambda ()
(timer-interrupt-disable!)
(queue-extract! (process-mailbox node.current-process)
(process-mailbox-probe node.current-process))
(timer-interrupt-enable!))))
(##define-macro (erl-receive_first timeout thunk)
`(,thunk
(with-no-interrupts
(lambda ()
(process-wake-up-set!
node.current-process
,(if (and (pair? timeout)
(eq? (car timeout) 'quote)
(pair? (cdr timeout))
(eq? (cadr timeout) (string->symbol "infinity")))
#f
`(if-non-neg-fix? ,timeout
(int.+ (current-time-in-msecs) ,timeout)
(let ((timeout ,timeout))
(if (eq? timeout infinity-atom)
#f
(if (and (erl-int? timeout)
(not (int.< timeout 0)))
(int.+ (current-time-in-msecs) timeout)
(erl-exit-badarg)))))))
(continuation-save!
(process-mailbox-probe-cont node.current-process)
(lambda (cont)
(let ((probe (queue-probe (process-mailbox node.current-process))))
(process-mailbox-probe-set! node.current-process probe)
(erl-receive_check probe cont))))
(let ((next (queue-next (process-mailbox-probe node.current-process))))
(if (pair? next)
(car next)
'$timeout))))))
(##define-macro (erl-receive_next)
`(let ((probe (queue-next (process-mailbox-probe node.current-process)))
(cont (process-mailbox-probe-cont node.current-process)))
(process-mailbox-probe-set! node.current-process probe)
(erl-receive_check probe cont)))
(##define-macro (erl-receive_check probe cont)
`(with-no-interrupts
(lambda ()
(if (pair? (queue-next ,probe))
(continuation-restore ,cont 'dummy)
(let ((wake-up (process-wake-up node.current-process)))
(if (or (not wake-up)
(int.< (current-time-in-msecs) wake-up))
(process-suspend-waiting-with-continuation! ,cont)
(continuation-restore ,cont 'dummy)))))))
(##define-macro (make-pid process id node creation)
`(vector 'pid ,process ,id ,node ,creation))
(##define-macro (erl-pid-process pid) `(vector-ref ,pid 1))
(##define-macro (erl-pid-id pid) `(vector-ref ,pid 2))
(##define-macro (erl-pid-node pid) `(vector-ref ,pid 3))
(##define-macro (erl-pid-creation pid) `(vector-ref ,pid 4))
(##define-macro (erl-pid-local? pid) `(eq? (erl-pid-node pid) node.name))
(##define-macro (make-ref id node creation)
`(vector 'ref ,id ,node ,creation))
(##define-macro (erl-ref-id ref) `(vector-ref ,ref 1))
(##define-macro (erl-ref-node ref) `(vector-ref ,ref 2))
(##define-macro (erl-ref-creation ref) `(vector-ref ,ref 3))
Ports
(##define-macro (make-port pidx s_res owner packeting binary? linked-pids
in? eof? creation node id opened?)
`(vector 'port ,pidx ,s_res ,owner ,packeting ,binary? ,linked-pids
,in? ,eof? ,creation ,node ,id ,opened?))
(##define-macro (erl-port-pidx port) `(vector-ref ,port 1))
(##define-macro (erl-port-s_res port) `(vector-ref ,port 2))
(##define-macro (erl-port-owner port) `(vector-ref ,port 3))
(##define-macro (erl-port-owner-set! port owner)
`(vector-set! ,port 3 ,owner))
(##define-macro (erl-port-packeting port) `(vector-ref ,port 4))
(##define-macro (erl-port-io_type port) `(vector-ref ,port 5))
(##define-macro (erl-port-linked-pids port) `(vector-ref ,port 6))
(##define-macro (erl-port-linked-pids-set! port linked-pids)
`(vector-set! ,port 6 ,linked-pids))
(##define-macro (erl-port-in? port) `(vector-ref ,port 7))
(##define-macro (erl-port-eof? port) `(vector-ref ,port 8))
(##define-macro (erl-port-creation port) `(vector-ref ,port 9))
(##define-macro (erl-port-node port) `(vector-ref ,port 10))
(##define-macro (erl-port-id port) `(vector-ref ,port 11))
(##define-macro (erl-port-opened? port) `(vector-ref ,port 12))
(##define-macro (erl-port-opened?-set! port opened?)
`(vector-set! ,port 12 ,opened?))
( # # define - macro ( make - port pidx s_res owner packeting binary ? in ? eof ? )
( set ! node.port - count ( int.+ node.port - count 1 ) )
( vector - set ! port - table idx p )
(##define-macro (erl-ioterm_to_binary x)
`(let ((a ,x))
(if (erl-binary? a)
a
(let ((r (erl-generic-ioterm_to_list a)))
(and r (erl-safe-list_to_binary/1 r))))))
Timer interrupts
(##define-macro (setup-time!) #f)
(##define-macro (advance-time!) #f)
(##define-macro (current-time-in-usecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e6 (time->seconds (current-time))))))))
(##define-macro (current-time-in-msecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e3 (time->seconds (current-time))))))))
(##define-macro (current-cputime-in-msecs)
`(with-no-interrupts
(lambda ()
(inexact->exact (flround (fl* 1e3 (f64vector-ref (##process-statistics) 0)))))))
(##define-macro (timer-interrupt-disable!)
`(set! timer-interrupt-allowed? #f))
(##define-macro (timer-interrupt-enable!)
`(set! timer-interrupt-allowed? #t))
(##define-macro (with-no-interrupts thunk)
`(let ()
(##declare (not interrupts-enabled))
(,thunk)))
(##define-macro (allow-interrupts)
`(##declare (interrupts-enabled)))
(##define-macro (add-timer-interrupt-job job)
`(##interrupt-vector-set! 1 ,job))
(##define-macro (cleanup-timer-interrupt!)
`(##interrupt-vector-set! 1 (lambda () #f)))
FIFO Queues
(##define-macro (make-queue)
`(let ((q (cons '() '())))
(set-car! q q)
q))
(##define-macro (queue-empty? q)
`(let ((q ,q))
(eq? (car q) q)))
(##define-macro (queue-probe q) q)
(##define-macro (queue-next p) `(cdr ,p))
(##define-macro (queue-add-to-tail! q x)
`(let ((q ,q))
(with-no-interrupts
(lambda ()
(let ((cell (cons ,x '())))
(set-cdr! (car q) cell)
(set-car! q cell))))))
(##define-macro (queue-extract! q probe)
`(let ((q ,q) (probe ,probe))
(with-no-interrupts
(lambda ()
(let ((curr (cdr probe)))
(set-car! q probe))
(set-cdr! probe (cdr curr)))))))
(##define-macro (erl-group-leader-signal! dest_pid new_gl)
`(process-group-leader-set! (erl-pid-process ,dest_pid) ,new_gl))
(##define-macro (erl-link-signal! dest_pid)
`(process-link! (erl-pid-process ,dest_pid) (erl-self/0)))
(##define-macro (erl-unlink-signal! dest_pid)
`(process-unlink! (erl-pid-process ,dest_pid) (erl-self/0)))
(##define-macro (erl-message-signal! dest_pid msg)
`(process-deliver! (erl-pid-process ,dest_pid) ,msg))
(##define-macro (erl-info-signal dest_pid prop)
`(process-get-property (erl-pid-process ,dest_pid) ,prop))
(##define-macro (erl-exit-signal! dest_pid reason)
`(process-die! (erl-pid-process ,dest_pid) ,reason))
Added by for ETOS 2.2
(##define-macro (erl-function-unbound? m f a)
(let ((var (string->symbol (string-append m ":" f "/" (number->string a)))))
`(##unbound? (##global-var-ref (##make-global-var ',var)))))
(##define-macro (erl-undefined-function m f)
(let ((mod (erl-atom<-string m))
(fun (erl-atom<-string f)))
`(lambda args (erl-undefined-function-handler args ',mod ',fun))))
(##define-macro (erl-function-set! global-var val)
`(##global-var-set! (##make-global-var ',global-var) ,val))
(define-macro (erl-false) `'false)
(define-macro (erl-true) `'true)
(define-macro (erl-impossible-obj1) `#f)
(define-macro (erl-impossible-obj2) `#t)
(define-macro (erl-atom<-string str) `(string->symbol ,str))
(define-macro (erl-atom->string atom) `(symbol->string ,atom))
` ( let ( ( atom ( string->symbol , ) ) )
` ( let ( ( x , x ) ) ( and ( erl - fix ? x ) ( not ( fix . < x 0 ) ) ( not ( fix . < 65535 x ) ) ) ) )
(define-macro (erl-char<-char c) c)
(define-macro (erl-char->char c) c)
(define-macro (erl-float<-real n) `(exact->inexact ,n))
(define-macro (erl-float->real n) n)
(define-macro (erl-int<-exact-integer n) n)
(define-macro (erl-int->exact-integer n) n)
(define-macro (erl-true? x) `(not (eq? ,x (erl-false))))
(define-macro (erl-equal? x y) `(equal? ,x ,y))
( define - macro ( erl - list . elems ) ` ( list , @elems ) )
( define - macro ( erl - append . lists ) ` ( append , ) )
(define-macro (erl-list<-list lst) lst)
( define - macro ( erl - tuple . elems ) ` ( vector ' tuple , @elems ) )
( define - macro ( erl - tuple - size tup ) ` ( fix.u- ( vector - length , tup ) 1 ) )
( define - macro ( erl - tuple - ref tup i ) ` ( vector - ref , tup , i ) )
(define-macro (erl-tuple<-list lst) `(list->vector (cons 'tuple ,lst)))
|
d5a29d5c1a51ef6eba534babf2f41f71b0a5d7c84a5fd23398039cd3544a0ad9 | biocaml/phylogenetics | newick.mli | include module type of Newick_ast
val from_file : string -> (t, [> error]) result
val from_file_exn : string -> t
val from_string : string -> (t, [> error]) result
val from_string_exn : string -> t
val of_tree :
?node_id:('a -> string option) ->
?node_tags:('a -> tag list) ->
?leaf_id:('b -> string option) ->
?leaf_tags:('b -> tag list) ->
?branch_length:('c -> float option) ->
?parent_branch:float ->
('a, 'b, 'c) Tree.t ->
t
val to_string : t -> string
val to_file : t -> string -> unit
module Tree_repr : sig
type ast = t
type node_info = {
name : string option ;
tags : tag list ;
}
type tree = (node_info, node_info, float option) Tree.t
type branch = (node_info, node_info, float option) Tree.branch
type t =
| Tree of tree
| Branch of branch
val of_ast : ast -> t
val to_ast : t -> ast
val map_inner_tree : t -> f:(tree -> tree) -> t
val with_inner_tree : t -> f:(tree -> 'a) -> 'a
end
| null | https://raw.githubusercontent.com/biocaml/phylogenetics/6250d5edcf0930698473c64911e74a300c94e11c/lib/newick.mli | ocaml | include module type of Newick_ast
val from_file : string -> (t, [> error]) result
val from_file_exn : string -> t
val from_string : string -> (t, [> error]) result
val from_string_exn : string -> t
val of_tree :
?node_id:('a -> string option) ->
?node_tags:('a -> tag list) ->
?leaf_id:('b -> string option) ->
?leaf_tags:('b -> tag list) ->
?branch_length:('c -> float option) ->
?parent_branch:float ->
('a, 'b, 'c) Tree.t ->
t
val to_string : t -> string
val to_file : t -> string -> unit
module Tree_repr : sig
type ast = t
type node_info = {
name : string option ;
tags : tag list ;
}
type tree = (node_info, node_info, float option) Tree.t
type branch = (node_info, node_info, float option) Tree.branch
type t =
| Tree of tree
| Branch of branch
val of_ast : ast -> t
val to_ast : t -> ast
val map_inner_tree : t -> f:(tree -> tree) -> t
val with_inner_tree : t -> f:(tree -> 'a) -> 'a
end
|
|
f5b2138c186c194eeb715db1794804cc454b1924b6d00429e0e3234843d4994c | nikivazou/verified_string_matching | castConcat.hs | #define IncludedcastConcat
@ automatic - instances castConcat @
castConcat :: RString -> RString -> RString -> RString -> List Integer -> Proof
@ : : tg : RString - > xi : RString - > yi : RString - > zi : RString
- > xis : List ( GoodIndex xi tg )
- > { map ( castGoodIndexRight tg xi ( yi < + > zi ) ) xis = = map ( castGoodIndexRight tg ( xi < + > yi ) ) ( map ( castGoodIndexRight tg ) xis ) } @
-> xis:List (GoodIndex xi tg)
-> { map (castGoodIndexRight tg xi (yi <+> zi)) xis == map (castGoodIndexRight tg (xi <+> yi) zi) (map (castGoodIndexRight tg xi yi) xis)} @-}
castConcat tg xi yi zi xis
= mapCastId tg xi (yi <+> zi) xis
&&& mapCastId tg xi yi xis
&&& mapCastId tg (xi <+> yi) zi (map (castGoodIndexRight tg xi yi) xis)
| null | https://raw.githubusercontent.com/nikivazou/verified_string_matching/abdd611a0758467f776c59c3d6c9e4705d36a3a0/src/AutoProofs/castConcat.hs | haskell | #define IncludedcastConcat
@ automatic - instances castConcat @
castConcat :: RString -> RString -> RString -> RString -> List Integer -> Proof
@ : : tg : RString - > xi : RString - > yi : RString - > zi : RString
- > xis : List ( GoodIndex xi tg )
- > { map ( castGoodIndexRight tg xi ( yi < + > zi ) ) xis = = map ( castGoodIndexRight tg ( xi < + > yi ) ) ( map ( castGoodIndexRight tg ) xis ) } @
-> xis:List (GoodIndex xi tg)
-> { map (castGoodIndexRight tg xi (yi <+> zi)) xis == map (castGoodIndexRight tg (xi <+> yi) zi) (map (castGoodIndexRight tg xi yi) xis)} @-}
castConcat tg xi yi zi xis
= mapCastId tg xi (yi <+> zi) xis
&&& mapCastId tg xi yi xis
&&& mapCastId tg (xi <+> yi) zi (map (castGoodIndexRight tg xi yi) xis)
|
|
e9c5a9caf15819c5e2ade8fef52e386dbfe2cc602b170e9e3f5b3216a781dc7e | JunSuzukiJapan/cl-reex | default-if-empty.lisp | (in-package :cl-user)
(defpackage cl-reex.operator.default-if-empty
(:use :cl)
(:import-from :cl-reex.observer
:observer
:on-next
:on-error
:on-completed)
(:import-from :cl-reex.observable
:observable
:dispose
:is-active
:set-error
:set-completed
:set-disposed
:subscribe)
(:import-from :cl-reex.macro.operator-table
:set-one-arg-operator)
(:import-from :cl-reex.operator
:operator )
(:export :operator-default-if-empty
:default-if-empty
:make-operator-default-if-empty))
(in-package :cl-reex.operator.default-if-empty)
(defclass operator-default-if-empty (operator)
((default :initarg :default
:accessor default )
(has-some-item :initarg :has-some-item
:initform nil
:accessor has-some-item ))
(:documentation "Default-If-Empty operator"))
(defun make-operator-default-if-empty (observable default)
(make-instance 'operator-default-if-empty
:observable observable
:default default ))
(defmethod on-next ((op operator-default-if-empty) x)
(when (is-active op)
(setf (has-some-item op) t)
(on-next (observer op) x) ))
(defmethod on-error ((op operator-default-if-empty) x)
(when (is-active op)
(on-error (observer op) x)
(set-error op) ))
(defmethod on-completed ((op operator-default-if-empty))
(when (is-active op)
(when (not (has-some-item op))
(on-next (observer op) (default op)) )
(on-completed (observer op)) ))
(set-one-arg-operator 'default-if-empty 'make-operator-default-if-empty)
| null | https://raw.githubusercontent.com/JunSuzukiJapan/cl-reex/94928c7949c235b41902138d9e4a5654b92d67eb/src/operator/default-if-empty.lisp | lisp | (in-package :cl-user)
(defpackage cl-reex.operator.default-if-empty
(:use :cl)
(:import-from :cl-reex.observer
:observer
:on-next
:on-error
:on-completed)
(:import-from :cl-reex.observable
:observable
:dispose
:is-active
:set-error
:set-completed
:set-disposed
:subscribe)
(:import-from :cl-reex.macro.operator-table
:set-one-arg-operator)
(:import-from :cl-reex.operator
:operator )
(:export :operator-default-if-empty
:default-if-empty
:make-operator-default-if-empty))
(in-package :cl-reex.operator.default-if-empty)
(defclass operator-default-if-empty (operator)
((default :initarg :default
:accessor default )
(has-some-item :initarg :has-some-item
:initform nil
:accessor has-some-item ))
(:documentation "Default-If-Empty operator"))
(defun make-operator-default-if-empty (observable default)
(make-instance 'operator-default-if-empty
:observable observable
:default default ))
(defmethod on-next ((op operator-default-if-empty) x)
(when (is-active op)
(setf (has-some-item op) t)
(on-next (observer op) x) ))
(defmethod on-error ((op operator-default-if-empty) x)
(when (is-active op)
(on-error (observer op) x)
(set-error op) ))
(defmethod on-completed ((op operator-default-if-empty))
(when (is-active op)
(when (not (has-some-item op))
(on-next (observer op) (default op)) )
(on-completed (observer op)) ))
(set-one-arg-operator 'default-if-empty 'make-operator-default-if-empty)
|
|
2e842ec14c0058630079794eaf64e552c2a62c02d7fd8d12d10f046785bdd9ab | fulcrologic/statecharts | history_spec.cljc | (ns com.fulcrologic.statecharts.algorithms.v20150901.history-spec
(:require [com.fulcrologic.statecharts.elements :refer
[state initial parallel final transition raise on-entry on-exit
data-model assign script history log]]
[com.fulcrologic.statecharts :as sc]
[com.fulcrologic.statecharts.chart :as chart]
[com.fulcrologic.statecharts.testing :as testing]
[com.fulcrologic.statecharts.data-model.operations :as ops]
[fulcro-spec.core :refer [specification assertions =>]]))
(specification
"history0"
(let [chart (chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state {:id :b, :initial :b1}
(history {:id :h} (transition {:target :b2}))
(state {:id :b1})
(state {:id :b2} (transition {:event :t2, :target :b3}))
(state {:id :b3} (transition {:event :t3, :target :a}))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b3) => true)))
(specification
"history1"
(let [chart (chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state
{:id :b, :initial :b1}
(history {:id :h, :type :deep} (transition {:target :b1.2}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1})
(state {:id :b1.2} (transition {:event :t2, :target :b1.3}))
(state {:id :b1.3} (transition {:event :t3, :target :a})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.3) => true)))
(specification
"history2"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state
{:id :b, :initial :b1}
(history {:id :h, :type :shallow} (transition {:target :b1.2}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1})
(state {:id :b1.2} (transition {:event :t2, :target :b1.3}))
(state {:id :b1.3} (transition {:event :t3, :target :a})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true)))
(specification
"history3"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target :h, :event :t4}))
(parallel
{:id :p}
(history {:id :h, :type :deep} (transition {:target :b}))
(state {:id :b, :initial :b1}
(state {:id :b1} (transition {:target :b2, :event :t2}))
(state {:id :b2}))
(state {:id :c, :initial :c1}
(state {:id :c1} (transition {:target :c2, :event :t2}))
(state {:id :c2}))
(transition {:target :a, :event :t3})))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1) => true (testing/in? env :c1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b2) => true (testing/in? env :c2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2) => true (testing/in? env :c2) => true)))
(specification
"history4"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target :p, :event :t6})
(transition {:target :hp, :event :t9}))
(parallel
{:id :p}
(history {:id :hp, :type :deep} (transition {:target :b}))
(state
{:id :b, :initial :hb}
(history {:id :hb, :type :deep} (transition {:target :b1}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1} (transition {:target :b1.2, :event :t2}))
(state {:id :b1.2} (transition {:target :b2, :event :t3})))
(state {:id :b2, :initial :b2.1}
(state {:id :b2.1}
(transition {:target :b2.2, :event :t4}))
(state {:id :b2.2}
(transition {:target :a, :event :t5})
(transition {:target :a, :event :t8}))))
(state
{:id :c, :initial :hc}
(history {:id :hc, :type :shallow} (transition {:target :c1}))
(state
{:id :c1, :initial :c1.1}
(state {:id :c1.1} (transition {:target :c1.2, :event :t2}))
(state {:id :c1.2} (transition {:target :c2, :event :t3})))
(state {:id :c2, :initial :c2.1}
(state {:id :c2.1}
(transition {:target :c2.2, :event :t4})
(transition {:target :c2.2, :event :t7}))
(state {:id :c2.2})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true (testing/in? env :c1.1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.2) => true (testing/in? env :c1.2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :b2.1) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t5)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t6)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t7)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t8)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t9)
(assertions (testing/in? env :b2.2)
=>
true
(testing/in? env :c2.2)
=>
true)))
(specification
"history4b"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target [:hb :hc], :event :t6})
(transition {:target :hp, :event :t9}))
(parallel
{:id :p}
(history {:id :hp, :type :deep} (transition {:target :b}))
(state
{:id :b, :initial :hb}
(history {:id :hb, :type :deep} (transition {:target :b1}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1} (transition {:target :b1.2, :event :t2}))
(state {:id :b1.2} (transition {:target :b2, :event :t3})))
(state {:id :b2, :initial :b2.1}
(state {:id :b2.1}
(transition {:target :b2.2, :event :t4}))
(state {:id :b2.2}
(transition {:target :a, :event :t5})
(transition {:target :a, :event :t8}))))
(state
{:id :c, :initial :hc}
(history {:id :hc, :type :shallow} (transition {:target :c1}))
(state
{:id :c1, :initial :c1.1}
(state {:id :c1.1} (transition {:target :c1.2, :event :t2}))
(state {:id :c1.2} (transition {:target :c2, :event :t3})))
(state {:id :c2, :initial :c2.1}
(state {:id :c2.1}
(transition {:target :c2.2, :event :t4})
(transition {:target :c2.2, :event :t7}))
(state {:id :c2.2})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true (testing/in? env :c1.1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.2) => true (testing/in? env :c1.2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :b2.1) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t5)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t6)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t7)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t8)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t9)
(assertions (testing/in? env :b2.2)
=>
true
(testing/in? env :c2.2)
=>
true)))
(specification
"history5"
(let [chart
(chart/statechart
{:initial :a}
(parallel
{:id :a}
(history {:id :ha, :type :deep} (transition {:target :b}))
(parallel
{:id :b}
(parallel
{:id :c}
(parallel
{:id :d}
(parallel
{:id :e}
(state
{:id :i, :initial :i1}
(state {:id :i1} (transition {:target :i2, :event :t1}))
(state {:id :i2} (transition {:target :l, :event :t2})))
(state {:id :j}))
(state {:id :h}))
(state {:id :g}))
(state {:id :f, :initial :f1}
(state {:id :f1} (transition {:target :f2, :event :t1}))
(state {:id :f2})))
(state {:id :k}))
(state {:id :l} (transition {:target :ha, :event :t3})))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions
(testing/in? env :i1) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f1) => true
(testing/in? env :k) => true)
(testing/run-events! env :t1)
(assertions
(testing/in? env :i2) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f2) => true
(testing/in? env :k) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :l) => true)
(testing/run-events! env :t3)
(assertions
(testing/in? env :i2) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f2) => true
(testing/in? env :k) => true)))
(specification
"history6"
(let [chart (chart/statechart
{:initial :a}
(data-model
{:expr {:x 2}})
(state {:id :a}
(transition {:target :h, :event :t1}))
(state {:id :b, :initial :b1}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x] :as env}] (* x 3))}))
(history {:id :h} (transition {:target :b2}))
(state {:id :b1})
(state {:id :b2}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x]}] (* x 5))}))
(transition {:event :t2, :target :b3}))
(state {:id :b3}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x]}] (* x 7))}))
(transition {:event :t3, :target :a}))
(transition
{:event :t4, :target :success, :cond (fn [_ {:keys [x]}] (= x 4410))})
(transition
{:event :t4, :target :really-fail, :cond (fn [_ {:keys [x]}] (= x 1470))})
(transition {:event :t4, :target :fail}))
(state {:id :success})
(state {:id :fail})
(state {:id :really-fail}))
env (testing/new-testing-env {:statechart chart :mocking-options {:run-unmocked? true}}
{})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :success) => true)))
| null | https://raw.githubusercontent.com/fulcrologic/statecharts/9a081be7da28ba9f9e2f7cdca75d1be4c030e3e0/src/test/com/fulcrologic/statecharts/algorithms/v20150901/history_spec.cljc | clojure | (ns com.fulcrologic.statecharts.algorithms.v20150901.history-spec
(:require [com.fulcrologic.statecharts.elements :refer
[state initial parallel final transition raise on-entry on-exit
data-model assign script history log]]
[com.fulcrologic.statecharts :as sc]
[com.fulcrologic.statecharts.chart :as chart]
[com.fulcrologic.statecharts.testing :as testing]
[com.fulcrologic.statecharts.data-model.operations :as ops]
[fulcro-spec.core :refer [specification assertions =>]]))
(specification
"history0"
(let [chart (chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state {:id :b, :initial :b1}
(history {:id :h} (transition {:target :b2}))
(state {:id :b1})
(state {:id :b2} (transition {:event :t2, :target :b3}))
(state {:id :b3} (transition {:event :t3, :target :a}))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b3) => true)))
(specification
"history1"
(let [chart (chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state
{:id :b, :initial :b1}
(history {:id :h, :type :deep} (transition {:target :b1.2}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1})
(state {:id :b1.2} (transition {:event :t2, :target :b1.3}))
(state {:id :b1.3} (transition {:event :t3, :target :a})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.3) => true)))
(specification
"history2"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a} (transition {:target :h, :event :t1}))
(state
{:id :b, :initial :b1}
(history {:id :h, :type :shallow} (transition {:target :b1.2}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1})
(state {:id :b1.2} (transition {:event :t2, :target :b1.3}))
(state {:id :b1.3} (transition {:event :t3, :target :a})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true)))
(specification
"history3"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target :h, :event :t4}))
(parallel
{:id :p}
(history {:id :h, :type :deep} (transition {:target :b}))
(state {:id :b, :initial :b1}
(state {:id :b1} (transition {:target :b2, :event :t2}))
(state {:id :b2}))
(state {:id :c, :initial :c1}
(state {:id :c1} (transition {:target :c2, :event :t2}))
(state {:id :c2}))
(transition {:target :a, :event :t3})))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1) => true (testing/in? env :c1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b2) => true (testing/in? env :c2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2) => true (testing/in? env :c2) => true)))
(specification
"history4"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target :p, :event :t6})
(transition {:target :hp, :event :t9}))
(parallel
{:id :p}
(history {:id :hp, :type :deep} (transition {:target :b}))
(state
{:id :b, :initial :hb}
(history {:id :hb, :type :deep} (transition {:target :b1}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1} (transition {:target :b1.2, :event :t2}))
(state {:id :b1.2} (transition {:target :b2, :event :t3})))
(state {:id :b2, :initial :b2.1}
(state {:id :b2.1}
(transition {:target :b2.2, :event :t4}))
(state {:id :b2.2}
(transition {:target :a, :event :t5})
(transition {:target :a, :event :t8}))))
(state
{:id :c, :initial :hc}
(history {:id :hc, :type :shallow} (transition {:target :c1}))
(state
{:id :c1, :initial :c1.1}
(state {:id :c1.1} (transition {:target :c1.2, :event :t2}))
(state {:id :c1.2} (transition {:target :c2, :event :t3})))
(state {:id :c2, :initial :c2.1}
(state {:id :c2.1}
(transition {:target :c2.2, :event :t4})
(transition {:target :c2.2, :event :t7}))
(state {:id :c2.2})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true (testing/in? env :c1.1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.2) => true (testing/in? env :c1.2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :b2.1) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t5)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t6)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t7)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t8)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t9)
(assertions (testing/in? env :b2.2)
=>
true
(testing/in? env :c2.2)
=>
true)))
(specification
"history4b"
(let [chart
(chart/statechart
{:initial :a}
(state {:id :a}
(transition {:target :p, :event :t1})
(transition {:target [:hb :hc], :event :t6})
(transition {:target :hp, :event :t9}))
(parallel
{:id :p}
(history {:id :hp, :type :deep} (transition {:target :b}))
(state
{:id :b, :initial :hb}
(history {:id :hb, :type :deep} (transition {:target :b1}))
(state
{:id :b1, :initial :b1.1}
(state {:id :b1.1} (transition {:target :b1.2, :event :t2}))
(state {:id :b1.2} (transition {:target :b2, :event :t3})))
(state {:id :b2, :initial :b2.1}
(state {:id :b2.1}
(transition {:target :b2.2, :event :t4}))
(state {:id :b2.2}
(transition {:target :a, :event :t5})
(transition {:target :a, :event :t8}))))
(state
{:id :c, :initial :hc}
(history {:id :hc, :type :shallow} (transition {:target :c1}))
(state
{:id :c1, :initial :c1.1}
(state {:id :c1.1} (transition {:target :c1.2, :event :t2}))
(state {:id :c1.2} (transition {:target :c2, :event :t3})))
(state {:id :c2, :initial :c2.1}
(state {:id :c2.1}
(transition {:target :c2.2, :event :t4})
(transition {:target :c2.2, :event :t7}))
(state {:id :c2.2})))))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b1.1) => true (testing/in? env :c1.1) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b1.2) => true (testing/in? env :c1.2) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :b2.1) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t5)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t6)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.1) => true)
(testing/run-events! env :t7)
(assertions (testing/in? env :b2.2) => true (testing/in? env :c2.2) => true)
(testing/run-events! env :t8)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t9)
(assertions (testing/in? env :b2.2)
=>
true
(testing/in? env :c2.2)
=>
true)))
(specification
"history5"
(let [chart
(chart/statechart
{:initial :a}
(parallel
{:id :a}
(history {:id :ha, :type :deep} (transition {:target :b}))
(parallel
{:id :b}
(parallel
{:id :c}
(parallel
{:id :d}
(parallel
{:id :e}
(state
{:id :i, :initial :i1}
(state {:id :i1} (transition {:target :i2, :event :t1}))
(state {:id :i2} (transition {:target :l, :event :t2})))
(state {:id :j}))
(state {:id :h}))
(state {:id :g}))
(state {:id :f, :initial :f1}
(state {:id :f1} (transition {:target :f2, :event :t1}))
(state {:id :f2})))
(state {:id :k}))
(state {:id :l} (transition {:target :ha, :event :t3})))
env (testing/new-testing-env {:statechart chart} {})]
(testing/start! env)
(assertions
(testing/in? env :i1) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f1) => true
(testing/in? env :k) => true)
(testing/run-events! env :t1)
(assertions
(testing/in? env :i2) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f2) => true
(testing/in? env :k) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :l) => true)
(testing/run-events! env :t3)
(assertions
(testing/in? env :i2) => true
(testing/in? env :j) => true
(testing/in? env :h) => true
(testing/in? env :g) => true
(testing/in? env :f2) => true
(testing/in? env :k) => true)))
(specification
"history6"
(let [chart (chart/statechart
{:initial :a}
(data-model
{:expr {:x 2}})
(state {:id :a}
(transition {:target :h, :event :t1}))
(state {:id :b, :initial :b1}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x] :as env}] (* x 3))}))
(history {:id :h} (transition {:target :b2}))
(state {:id :b1})
(state {:id :b2}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x]}] (* x 5))}))
(transition {:event :t2, :target :b3}))
(state {:id :b3}
(on-entry {}
(assign {:location :x, :expr (fn [_ {:keys [x]}] (* x 7))}))
(transition {:event :t3, :target :a}))
(transition
{:event :t4, :target :success, :cond (fn [_ {:keys [x]}] (= x 4410))})
(transition
{:event :t4, :target :really-fail, :cond (fn [_ {:keys [x]}] (= x 1470))})
(transition {:event :t4, :target :fail}))
(state {:id :success})
(state {:id :fail})
(state {:id :really-fail}))
env (testing/new-testing-env {:statechart chart :mocking-options {:run-unmocked? true}}
{})]
(testing/start! env)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b2) => true)
(testing/run-events! env :t2)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t3)
(assertions (testing/in? env :a) => true)
(testing/run-events! env :t1)
(assertions (testing/in? env :b3) => true)
(testing/run-events! env :t4)
(assertions (testing/in? env :success) => true)))
|
|
bfabf815c8cc5a583cccdbb862e58e7f82298332ed4acbc2403c27b7a819135f | chaoxu/fancy-walks | B.hs | {-# OPTIONS_GHC -O2 #-}
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Data.Tree
import Data.Graph
solve m a
| ans <= 0 = "IMPOSSIBLE"
| otherwise = show (ax + 1) ++ " " ++ show (ay + 1) ++ " " ++ show ans
where
((ans, _), ax, ay) = maximum [(go m (a !! x) (a !! y), x, y) | x <- [0..10], y <- [x+1..11]]
go m x y = ((y - x) * (m `div` x), -x)
parseInput = do
cas <- readInt
replicateM cas $ (,) <$> readInt <*> replicateM 12 readInt
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, (m,a)) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ solve m a
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/code.google.com/codejam/Code%20Jam%20Africa%20and%20Arabia%202011/Qualification%20Round/B.hs | haskell | # OPTIONS_GHC -O2 # |
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import Data.Tree
import Data.Graph
solve m a
| ans <= 0 = "IMPOSSIBLE"
| otherwise = show (ax + 1) ++ " " ++ show (ay + 1) ++ " " ++ show ans
where
((ans, _), ax, ay) = maximum [(go m (a !! x) (a !! y), x, y) | x <- [0..10], y <- [x+1..11]]
go m x y = ((y - x) * (m `div` x), -x)
parseInput = do
cas <- readInt
replicateM cas $ (,) <$> readInt <*> replicateM 12 readInt
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, (m,a)) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ solve m a
|
08e26f27336f2ac5ac78f067448a90af1dc323302cb397cb26b3834a583e6a0b | TaylanUB/scheme-srfis | primitive.body.scm | Copyright ( C ) ( 2007 ) . All Rights Reserved .
Made an R7RS library by , Copyright ( C ) 2014 .
;;; Permission is hereby granted, free of charge, to any person obtaining a copy
;;; of this software and associated documentation files (the "Software"), to
deal in the Software without restriction , including without limitation the
;;; rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
;;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
;;; IN THE SOFTWARE.
(define-record-type <stream>
(make-stream promise)
stream?
(promise stream-promise stream-promise!))
(define-syntax stream-lazy
(syntax-rules ()
((stream-lazy expr)
(make-stream
(cons 'lazy (lambda () expr))))))
(define (stream-eager expr)
(make-stream
(cons 'eager expr)))
(define-syntax stream-delay
(syntax-rules ()
((stream-delay expr)
(stream-lazy (stream-eager expr)))))
(define (stream-force promise)
(let ((content (stream-promise promise)))
(case (car content)
((eager) (cdr content))
((lazy) (let* ((promise* ((cdr content)))
(content (stream-promise promise)))
(if (not (eqv? (car content) 'eager))
(begin (set-car! content (car (stream-promise promise*)))
(set-cdr! content (cdr (stream-promise promise*)))
(stream-promise! promise* content)))
(stream-force promise))))))
(define stream-null (stream-delay (cons 'stream 'null)))
(define-record-type <stream-pare>
(make-stream-pare kar kdr)
stream-pare?
(kar stream-kar)
(kdr stream-kdr))
(define (stream-pair? obj)
(and (stream? obj) (stream-pare? (stream-force obj))))
(define (stream-null? obj)
(and (stream? obj)
(eqv? (stream-force obj)
(stream-force stream-null))))
(define-syntax stream-cons
(syntax-rules ()
((stream-cons obj strm)
(stream-eager (make-stream-pare (stream-delay obj) (stream-lazy strm))))))
(define (stream-car strm)
(cond ((not (stream? strm)) (error "non-stream" strm))
((stream-null? strm) (error "null stream" strm))
(else (stream-force (stream-kar (stream-force strm))))))
(define (stream-cdr strm)
(cond ((not (stream? strm)) (error "non-stream" strm))
((stream-null? strm) (error "null stream" strm))
(else (stream-kdr (stream-force strm)))))
(define-syntax stream-lambda
(syntax-rules ()
((stream-lambda formals body0 body1 ...)
(lambda formals (stream-lazy (let () body0 body1 ...))))))
| null | https://raw.githubusercontent.com/TaylanUB/scheme-srfis/2d2b306e7a20a7155f639001a02b0870d5a3d3f7/srfi/41/primitive.body.scm | scheme | Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE. | Copyright ( C ) ( 2007 ) . All Rights Reserved .
Made an R7RS library by , Copyright ( C ) 2014 .
deal in the Software without restriction , including without limitation the
sell copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(define-record-type <stream>
(make-stream promise)
stream?
(promise stream-promise stream-promise!))
(define-syntax stream-lazy
(syntax-rules ()
((stream-lazy expr)
(make-stream
(cons 'lazy (lambda () expr))))))
(define (stream-eager expr)
(make-stream
(cons 'eager expr)))
(define-syntax stream-delay
(syntax-rules ()
((stream-delay expr)
(stream-lazy (stream-eager expr)))))
(define (stream-force promise)
(let ((content (stream-promise promise)))
(case (car content)
((eager) (cdr content))
((lazy) (let* ((promise* ((cdr content)))
(content (stream-promise promise)))
(if (not (eqv? (car content) 'eager))
(begin (set-car! content (car (stream-promise promise*)))
(set-cdr! content (cdr (stream-promise promise*)))
(stream-promise! promise* content)))
(stream-force promise))))))
(define stream-null (stream-delay (cons 'stream 'null)))
(define-record-type <stream-pare>
(make-stream-pare kar kdr)
stream-pare?
(kar stream-kar)
(kdr stream-kdr))
(define (stream-pair? obj)
(and (stream? obj) (stream-pare? (stream-force obj))))
(define (stream-null? obj)
(and (stream? obj)
(eqv? (stream-force obj)
(stream-force stream-null))))
(define-syntax stream-cons
(syntax-rules ()
((stream-cons obj strm)
(stream-eager (make-stream-pare (stream-delay obj) (stream-lazy strm))))))
(define (stream-car strm)
(cond ((not (stream? strm)) (error "non-stream" strm))
((stream-null? strm) (error "null stream" strm))
(else (stream-force (stream-kar (stream-force strm))))))
(define (stream-cdr strm)
(cond ((not (stream? strm)) (error "non-stream" strm))
((stream-null? strm) (error "null stream" strm))
(else (stream-kdr (stream-force strm)))))
(define-syntax stream-lambda
(syntax-rules ()
((stream-lambda formals body0 body1 ...)
(lambda formals (stream-lazy (let () body0 body1 ...))))))
|
a5024fd9dc1474b37069a6b8dc727a6ca34f975402e1e5edcbc9987c6efd0f17 | fetburner/Coq2SML | ccalgo.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Util
open Term
open Names
type cinfo =
{ci_constr: constructor; (* inductive type *)
ci_arity: int; (* # args *)
ci_nhyps: int} (* # projectable args *)
type term =
Symb of constr
| Product of sorts_family * sorts_family
| Eps of identifier
| Appli of term*term
constructor arity + nhyps
val term_equal : term -> term -> bool
type patt_kind =
Normal
| Trivial of types
| Creates_variables
type ccpattern =
PApp of term * ccpattern list
| PVar of int
type pa_constructor =
{ cnode : int;
arity : int;
args : int list}
module PacMap : Map.S with type key = pa_constructor
type forest
type state
type rule=
Congruence
| Axiom of constr * bool
| Injection of int * pa_constructor * int * pa_constructor * int
type from=
Goal
| Hyp of constr
| HeqG of constr
| HeqnH of constr*constr
type 'a eq = {lhs:int;rhs:int;rule:'a}
type equality = rule eq
type disequality = from eq
type explanation =
Discrimination of (int*pa_constructor*int*pa_constructor)
| Contradiction of disequality
| Incomplete
module Constrhash : Hashtbl.S with type key = constr
module Termhash : Hashtbl.S with type key = term
val constr_of_term : term -> constr
val debug : (Pp.std_ppcmds -> unit) -> Pp.std_ppcmds -> unit
val forest : state -> forest
val axioms : forest -> (term * term) Constrhash.t
val epsilons : forest -> pa_constructor list
val empty : int -> Proof_type.goal Tacmach.sigma -> state
val add_term : state -> term -> int
val add_equality : state -> constr -> term -> term -> unit
val add_disequality : state -> from -> term -> term -> unit
val add_quant : state -> identifier -> bool ->
int * patt_kind * ccpattern * patt_kind * ccpattern -> unit
val tail_pac : pa_constructor -> pa_constructor
val find : forest -> int -> int
val find_pac : forest -> int -> pa_constructor -> int
val term : forest -> int -> term
val get_constructor_info : forest -> int -> cinfo
val subterms : forest -> int -> int * int
val join_path : forest -> int -> int ->
((int * int) * equality) list * ((int * int) * equality) list
type quant_eq=
{qe_hyp_id: identifier;
qe_pol: bool;
qe_nvars:int;
qe_lhs: ccpattern;
qe_lhs_valid:patt_kind;
qe_rhs: ccpattern;
qe_rhs_valid:patt_kind}
type pa_fun=
{fsym:int;
fnargs:int}
type matching_problem
module PafMap: Map.S with type key = pa_fun
val make_fun_table : state -> Intset.t PafMap.t
val do_match : state ->
(quant_eq * int array) list ref -> matching_problem Stack.t -> unit
val init_pb_stack : state -> matching_problem Stack.t
val paf_of_patt : int Termhash.t -> ccpattern -> pa_fun
val find_instances : state -> (quant_eq * int array) list
val execute : bool -> state -> explanation option
type pa_constructor
module PacMap : Map . S with type key = pa_constructor
type term =
Symb of Term.constr
| Eps
| Appli of term * term
| Constructor of Names.constructor*int*int
type rule =
Congruence
| Axiom of Names.identifier
| Injection of int*int*int*int
type equality =
{ lhs : int ;
rhs : int ;
rule : rule }
module ST :
sig
type t
val empty : unit - > t
val enter : int - > int * int - > t - > unit
val query : int * int - > t - > int
val delete : int - > t - > unit
val delete_list : int list - > t - > unit
end
module UF :
sig
type t
exception of int * int * int * int * t
val empty : unit - > t
val find : t - > int - > int
val size : t - > int - > int
val get_constructor : t - > int - > Names.constructor
val pac_arity : t - > int - > int * int - > int
val mem_node_pac : t - > int - > int * int - > int
val add_pacs : t - > int - > pa_constructor PacMap.t - >
int list * equality list
val term : t - > int - > term
val subterms : t - > int - > int * int
val add : t - > term - > int
val union : t - > int - > int - > equality - > int list * equality list
val join_path : t - > int - > int - >
( ( int*int)*equality ) list *
( ( int*int)*equality ) list
end
: UF.t - > int list - > equality list
val process_rec : UF.t - > equality list - > int list
: UF.t - > unit
:
( Names.identifier * ( term * term ) ) list - > UF.t
val add_one_diseq : UF.t - > ( term * term ) - > int * int
val add_disaxioms :
UF.t - > ( Names.identifier * ( term * term ) ) list - >
( Names.identifier * ( int * int ) ) list
val check_equal : UF.t - > int * int - > bool
val find_contradiction : UF.t - >
( Names.identifier * ( int * int ) ) list - >
( Names.identifier * ( int * int ) )
module PacMap:Map.S with type key=pa_constructor
type term =
Symb of Term.constr
| Eps
| Appli of term * term
| Constructor of Names.constructor*int*int
type rule =
Congruence
| Axiom of Names.identifier
| Injection of int*int*int*int
type equality =
{lhs : int;
rhs : int;
rule : rule}
module ST :
sig
type t
val empty : unit -> t
val enter : int -> int * int -> t -> unit
val query : int * int -> t -> int
val delete : int -> t -> unit
val delete_list : int list -> t -> unit
end
module UF :
sig
type t
exception Discriminable of int * int * int * int * t
val empty : unit -> t
val find : t -> int -> int
val size : t -> int -> int
val get_constructor : t -> int -> Names.constructor
val pac_arity : t -> int -> int * int -> int
val mem_node_pac : t -> int -> int * int -> int
val add_pacs : t -> int -> pa_constructor PacMap.t ->
int list * equality list
val term : t -> int -> term
val subterms : t -> int -> int * int
val add : t -> term -> int
val union : t -> int -> int -> equality -> int list * equality list
val join_path : t -> int -> int ->
((int*int)*equality) list*
((int*int)*equality) list
end
val combine_rec : UF.t -> int list -> equality list
val process_rec : UF.t -> equality list -> int list
val cc : UF.t -> unit
val make_uf :
(Names.identifier * (term * term)) list -> UF.t
val add_one_diseq : UF.t -> (term * term) -> int * int
val add_disaxioms :
UF.t -> (Names.identifier * (term * term)) list ->
(Names.identifier * (int * int)) list
val check_equal : UF.t -> int * int -> bool
val find_contradiction : UF.t ->
(Names.identifier * (int * int)) list ->
(Names.identifier * (int * int))
*)
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/plugins/cc/ccalgo.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
inductive type
# args
# projectable args | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util
open Term
open Names
type cinfo =
type term =
Symb of constr
| Product of sorts_family * sorts_family
| Eps of identifier
| Appli of term*term
constructor arity + nhyps
val term_equal : term -> term -> bool
type patt_kind =
Normal
| Trivial of types
| Creates_variables
type ccpattern =
PApp of term * ccpattern list
| PVar of int
type pa_constructor =
{ cnode : int;
arity : int;
args : int list}
module PacMap : Map.S with type key = pa_constructor
type forest
type state
type rule=
Congruence
| Axiom of constr * bool
| Injection of int * pa_constructor * int * pa_constructor * int
type from=
Goal
| Hyp of constr
| HeqG of constr
| HeqnH of constr*constr
type 'a eq = {lhs:int;rhs:int;rule:'a}
type equality = rule eq
type disequality = from eq
type explanation =
Discrimination of (int*pa_constructor*int*pa_constructor)
| Contradiction of disequality
| Incomplete
module Constrhash : Hashtbl.S with type key = constr
module Termhash : Hashtbl.S with type key = term
val constr_of_term : term -> constr
val debug : (Pp.std_ppcmds -> unit) -> Pp.std_ppcmds -> unit
val forest : state -> forest
val axioms : forest -> (term * term) Constrhash.t
val epsilons : forest -> pa_constructor list
val empty : int -> Proof_type.goal Tacmach.sigma -> state
val add_term : state -> term -> int
val add_equality : state -> constr -> term -> term -> unit
val add_disequality : state -> from -> term -> term -> unit
val add_quant : state -> identifier -> bool ->
int * patt_kind * ccpattern * patt_kind * ccpattern -> unit
val tail_pac : pa_constructor -> pa_constructor
val find : forest -> int -> int
val find_pac : forest -> int -> pa_constructor -> int
val term : forest -> int -> term
val get_constructor_info : forest -> int -> cinfo
val subterms : forest -> int -> int * int
val join_path : forest -> int -> int ->
((int * int) * equality) list * ((int * int) * equality) list
type quant_eq=
{qe_hyp_id: identifier;
qe_pol: bool;
qe_nvars:int;
qe_lhs: ccpattern;
qe_lhs_valid:patt_kind;
qe_rhs: ccpattern;
qe_rhs_valid:patt_kind}
type pa_fun=
{fsym:int;
fnargs:int}
type matching_problem
module PafMap: Map.S with type key = pa_fun
val make_fun_table : state -> Intset.t PafMap.t
val do_match : state ->
(quant_eq * int array) list ref -> matching_problem Stack.t -> unit
val init_pb_stack : state -> matching_problem Stack.t
val paf_of_patt : int Termhash.t -> ccpattern -> pa_fun
val find_instances : state -> (quant_eq * int array) list
val execute : bool -> state -> explanation option
type pa_constructor
module PacMap : Map . S with type key = pa_constructor
type term =
Symb of Term.constr
| Eps
| Appli of term * term
| Constructor of Names.constructor*int*int
type rule =
Congruence
| Axiom of Names.identifier
| Injection of int*int*int*int
type equality =
{ lhs : int ;
rhs : int ;
rule : rule }
module ST :
sig
type t
val empty : unit - > t
val enter : int - > int * int - > t - > unit
val query : int * int - > t - > int
val delete : int - > t - > unit
val delete_list : int list - > t - > unit
end
module UF :
sig
type t
exception of int * int * int * int * t
val empty : unit - > t
val find : t - > int - > int
val size : t - > int - > int
val get_constructor : t - > int - > Names.constructor
val pac_arity : t - > int - > int * int - > int
val mem_node_pac : t - > int - > int * int - > int
val add_pacs : t - > int - > pa_constructor PacMap.t - >
int list * equality list
val term : t - > int - > term
val subterms : t - > int - > int * int
val add : t - > term - > int
val union : t - > int - > int - > equality - > int list * equality list
val join_path : t - > int - > int - >
( ( int*int)*equality ) list *
( ( int*int)*equality ) list
end
: UF.t - > int list - > equality list
val process_rec : UF.t - > equality list - > int list
: UF.t - > unit
:
( Names.identifier * ( term * term ) ) list - > UF.t
val add_one_diseq : UF.t - > ( term * term ) - > int * int
val add_disaxioms :
UF.t - > ( Names.identifier * ( term * term ) ) list - >
( Names.identifier * ( int * int ) ) list
val check_equal : UF.t - > int * int - > bool
val find_contradiction : UF.t - >
( Names.identifier * ( int * int ) ) list - >
( Names.identifier * ( int * int ) )
module PacMap:Map.S with type key=pa_constructor
type term =
Symb of Term.constr
| Eps
| Appli of term * term
| Constructor of Names.constructor*int*int
type rule =
Congruence
| Axiom of Names.identifier
| Injection of int*int*int*int
type equality =
{lhs : int;
rhs : int;
rule : rule}
module ST :
sig
type t
val empty : unit -> t
val enter : int -> int * int -> t -> unit
val query : int * int -> t -> int
val delete : int -> t -> unit
val delete_list : int list -> t -> unit
end
module UF :
sig
type t
exception Discriminable of int * int * int * int * t
val empty : unit -> t
val find : t -> int -> int
val size : t -> int -> int
val get_constructor : t -> int -> Names.constructor
val pac_arity : t -> int -> int * int -> int
val mem_node_pac : t -> int -> int * int -> int
val add_pacs : t -> int -> pa_constructor PacMap.t ->
int list * equality list
val term : t -> int -> term
val subterms : t -> int -> int * int
val add : t -> term -> int
val union : t -> int -> int -> equality -> int list * equality list
val join_path : t -> int -> int ->
((int*int)*equality) list*
((int*int)*equality) list
end
val combine_rec : UF.t -> int list -> equality list
val process_rec : UF.t -> equality list -> int list
val cc : UF.t -> unit
val make_uf :
(Names.identifier * (term * term)) list -> UF.t
val add_one_diseq : UF.t -> (term * term) -> int * int
val add_disaxioms :
UF.t -> (Names.identifier * (term * term)) list ->
(Names.identifier * (int * int)) list
val check_equal : UF.t -> int * int -> bool
val find_contradiction : UF.t ->
(Names.identifier * (int * int)) list ->
(Names.identifier * (int * int))
*)
|
47387a15fe516fede5d424f841224c1bf1293e7b539e0ea3ab7d7a99f715529e | softwarelanguageslab/maf | R5RS_scp1_flip2-5.scm | ; Changes:
* removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
; * swapped branches: 0
; * calls to id fun: 0
(letrec ((make-flip (lambda ()
(let ((state 0))
(lambda ()
(if (= state 0) (set! state 1) (set! state 0))
state))))
(flip (make-flip)))
(if (= (flip) 1)
(if (= (flip) 0)
(if (<change> (= (flip) 1) (not (= (flip) 1)))
(= (flip) 0)
#f)
#f)
#f)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_flip2-5.scm | scheme | Changes:
* swapped branches: 0
* calls to id fun: 0 | * removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
(letrec ((make-flip (lambda ()
(let ((state 0))
(lambda ()
(if (= state 0) (set! state 1) (set! state 0))
state))))
(flip (make-flip)))
(if (= (flip) 1)
(if (= (flip) 0)
(if (<change> (= (flip) 1) (not (= (flip) 1)))
(= (flip) 0)
#f)
#f)
#f)) |
4de58734220aebd52373bd36aa7745248ff67e8954319f240418cfb26f41cc84 | aryx/fork-efuns | ebuffer.ml | (*s: core/ebuffer.ml *)
(*s: copyright header2 *)
(***********************************************************************)
(* *)
xlib for
(* *)
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
(* *)
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
(*e: copyright header2 *)
open Common
open Efuns
(*s: type [[Ebuffer.t]] *)
type t = Efuns.buffer
(*e: type [[Ebuffer.t]] *)
(* this file is called ebuffer.ml because buffer.ml already exists in stdlib *)
(*s: constant [[Ebuffer.create_buf_hook]] *)
let create_buf_hook = Store.create_abstr "create_buf_hook"
(*e: constant [[Ebuffer.create_buf_hook]] *)
(*s: constant [[Ebuffer.modes_alist]] *)
let modes_alist = Store.create_abstr "modes_alist"
(*e: constant [[Ebuffer.modes_alist]] *)
(*s: function [[Ebuffer.create_syntax_table]] *)
let create_syntax_table () =
let table = Array.make 256 false
in
for i = Char.code 'a' to Char.code 'z' do
table.(i) <- true;
done;
for i = Char.code 'A' to Char.code 'Z' do
table.(i) <- true;
done;
for i = Char.code '0' to Char.code '9' do
table.(i) <- true;
done;
table
(*e: function [[Ebuffer.create_syntax_table]] *)
(*s: constant [[Ebuffer.default_syntax_table]] *)
let default_syntax_table = create_syntax_table ()
(*e: constant [[Ebuffer.default_syntax_table]] *)
(*s: function [[Ebuffer.get_name]] *)
let get_unique_name filename =
let basename = Filename.basename filename in
let name =
if basename = ""
then (Filename.basename (Filename.dirname filename)) ^ "/"
else basename
in
let i = ref 0 in
let compute_name () =
if !i =|= 0
then name
else Printf.sprintf "%s<%d>" name !i
in
try
while true do
let _ = Hashtbl.find (Globals.editor()).edt_buffers (compute_name ())
in
incr i
done;
assert false
with Not_found ->
compute_name ()
(*e: function [[Ebuffer.get_name]] *)
(*s: function [[Ebuffer.new_minor_mode]] *)
let new_minor_mode name hooks = {
min_name = name;
min_map = Keymap.create ();
min_hooks = hooks;
min_vars = Store.new_store ()
}
(*e: function [[Ebuffer.new_minor_mode]] *)
(*s: function [[Ebuffer.new_major_mode]] *)
let new_major_mode name hook_opt = {
maj_name = name;
maj_map = Keymap.create ();
maj_hooks = (match hook_opt with None -> [] | Some hook -> [hook]);
maj_vars = Store.new_store ();
}
(*e: function [[Ebuffer.new_major_mode]] *)
s : constant [ [ ] ]
let fondamental__mode = new_major_mode "Fondamental" None (* no hooks *)
e : constant [ [ ] ]
(*s: constant [[Ebuffer.tab_size]] *)
let tab_size = ref 9
(*e: constant [[Ebuffer.tab_size]] *)
(*s: function [[Ebuffer.create]] *)
let create name filename text local_map =
let name = get_unique_name name in
let buf =
{
buf_text = text;
buf_name = name;
buf_filename = filename;
buf_point = Text.new_point text;
buf_start = Text.new_point text;
buf_last_saved = Text.version text;
buf_modified = 0;
buf_map = local_map;
buf_syntax_table = default_syntax_table;
buf_map_partial = true;
buf_vars = Store.new_store ();
buf_major_mode = fondamental__mode;
buf_minor_modes = [];
buf_sync = false;
buf_mark = None;
buf_shared = 0;
buf_finalizers = [];
buf_history_pos = [||];
(*s: [[Ebuffer.create()]] buffer other fields setup *)
buf_charreprs = Array.init 256 (fun i -> String.make 1 (Char.chr i));
(*e: [[Ebuffer.create()]] buffer other fields setup *)
} in
(*s: [[Ebuffer.create()]] adjust editor global fields *)
Hashtbl.add (Globals.editor()).edt_buffers name buf;
(*e: [[Ebuffer.create()]] adjust editor global fields *)
(*s: [[Ebuffer.create()]] adjust charreprs *)
for i=0 to 25 do
let s = Bytes.make 2 '^' in
Bytes.set s 1 (Char.chr (97+i));
buf.buf_charreprs.(i) <- (Bytes.to_string s);
done;
(*x: [[Ebuffer.create()]] adjust charreprs *)
buf.buf_charreprs.(9) <- String.make !tab_size ' ';
(*e: [[Ebuffer.create()]] adjust charreprs *)
(*s: [[Ebuffer.create()]] run hooks *)
let hooks = try Var.get_global create_buf_hook with Not_found -> [] in
Hook.exec_hooks hooks buf;
(*e: [[Ebuffer.create()]] run hooks *)
buf
(*e: function [[Ebuffer.create]] *)
(*s: function [[Ebuffer.kill]] *)
let kill buf =
let edt = Globals.editor() in
Hashtbl.remove edt.edt_buffers buf.buf_name;
buf.buf_filename |> Option.iter (fun filename ->
Hashtbl.remove edt.edt_files filename
);
List.iter (fun f -> f () ) buf.buf_finalizers;
TODO Gc.compact ( ) ; this cause some segfault under with
buf.buf_shared <- -1
(*e: function [[Ebuffer.kill]] *)
open Options
(*s: constant [[Ebuffer.save_buffer_hooks]] *)
let save_buffer_hooks = define_option ["save_buffer_hooks"] ""
(list_option string_option)
[ ]
(*e: constant [[Ebuffer.save_buffer_hooks]] *)
(*s: constant [[Ebuffer.saved_buffer_hooks]] *)
let saved_buffer_hooks = Store.create_abstr "saved_buffer_hooks"
(*e: constant [[Ebuffer.saved_buffer_hooks]] *)
(*s: function [[Ebuffer.save]] *)
let save buf =
Hook.exec_named_buf_hooks_with_abort !!save_buffer_hooks buf;
let filename =
match buf.buf_filename with
None -> raise Not_found
| Some name -> name
in
let outc = open_out filename in
Text.save buf.buf_text outc;
close_out outc;
buf.buf_last_saved <- Text.version buf.buf_text;
let hooks = try Var.get_var buf saved_buffer_hooks with Not_found -> [] in
Hook.exec_hooks hooks buf
(*e: function [[Ebuffer.save]] *)
(*s: function [[Ebuffer.read]] *)
let read filename local_map =
let edt = Globals.editor() in
let filename = Utils.normal_name edt.edt_dirname filename in
try
Hashtbl.find edt.edt_files filename
with Not_found ->
let text =
try
let inc = open_in filename in
let text = Text.read inc in
close_in inc;
text
with exn ->
Error.error_exn (spf "error reading file %s" filename) exn;
Text.create ""
in
let buf = create filename (Some filename) text local_map in
Hashtbl.add edt.edt_files filename buf;
buf
(*e: function [[Ebuffer.read]] *)
(*s: function [[Ebuffer.find_buffer_opt]] *)
let find_buffer_opt name =
try Some (Hashtbl.find (Globals.editor()).edt_buffers name)
with Not_found -> None
(*e: function [[Ebuffer.find_buffer_opt]] *)
(*s: constant [[Ebuffer.help_buffer_content]] *)
let help_buffer_content =
"Welcome to Efuns, a small demo editor written in Ocaml.
Fabrice Le Fessant
PARA/SOR Project
INRIA Rocquencourt
"
(*e: constant [[Ebuffer.help_buffer_content]] *)
s : function [ [ ] ]
let default name =
try
Hashtbl.find (Globals.editor()).edt_buffers name
with Not_found ->
let str =
if name = "*help*"
then help_buffer_content
else ""
in
create name None (Text.create str) (Keymap.create ())
e : function [ [ ] ]
(*s: function [[Ebuffer.compute_representation]] *)
let compute_representation buf n =
Text.compute_representation buf.buf_text buf.buf_charreprs n
(*e: function [[Ebuffer.compute_representation]] *)
(*s: exception [[Ebuffer.BufferAlreadyOpened]] *)
exception BufferAlreadyOpened
(*e: exception [[Ebuffer.BufferAlreadyOpened]] *)
(*s: function [[Ebuffer.change_name]] *)
let change_name buf filename =
let edt = Globals.editor() in
Hashtbl.remove edt.edt_buffers buf.buf_name;
buf.buf_filename |> Option.iter (fun filename ->
Hashtbl.remove edt.edt_files filename
);
let filename =
if Filename.is_relative filename
then Filename.concat edt.edt_dirname filename
else filename
in
if Utils.hashtbl_mem edt.edt_files filename
then raise BufferAlreadyOpened;
let filename = Utils.normal_name edt.edt_dirname filename in
let name = get_unique_name filename in
Hashtbl.add edt.edt_buffers name buf;
Hashtbl.add edt.edt_files filename buf;
buf.buf_filename <- Some filename;
buf.buf_name <- name
(*e: function [[Ebuffer.change_name]] *)
(*s: function [[Ebuffer.set_mark]] *)
let set_mark buf point =
let text = buf.buf_text in
buf.buf_modified <- buf.buf_modified + 1;
match buf.buf_mark with
| None ->
let mark = Text.dup_point text point in
buf.buf_mark <- Some mark
| Some mark ->
Text.goto_point text mark point
(*e: function [[Ebuffer.set_mark]] *)
(*s: function [[Ebuffer.get_mark]] *)
let rec get_mark buf point =
match buf.buf_mark with
| None ->
set_mark buf point;
get_mark buf point
| Some mark -> mark
(*e: function [[Ebuffer.get_mark]] *)
(*s: function [[Ebuffer.remove_mark]] *)
let remove_mark buf =
buf.buf_mark |> Option.iter (fun mark ->
buf.buf_mark <- None;
Text.remove_point buf.buf_text mark;
buf.buf_modified <- buf.buf_modified + 1
)
(*e: function [[Ebuffer.remove_mark]] *)
(*s: constant [[Ebuffer.modes_old]] *)
let modes_old = ref []
(*e: constant [[Ebuffer.modes_old]] *)
(*s: constant [[Ebuffer.regexp_alist]] *)
let regexp_alist = ref []
(*e: constant [[Ebuffer.regexp_alist]] *)
(*s: function [[Ebuffer.set_major_mode]] *)
let set_major_mode buf mode =
if !Globals.debug
then pr2 (spf "setting %s major mode" mode.maj_name);
buf.buf_modified <- buf.buf_modified + 1;
buf.buf_major_mode <- mode;
mode.maj_hooks |> List.iter (fun f ->
try f buf
with exn -> Error.error_exn "set_major_mode" exn
)
(*e: function [[Ebuffer.set_major_mode]] *)
(*s: function [[Ebuffer.set_minor_mode]] *)
let set_minor_mode buf mode =
buf.buf_minor_modes <- mode :: buf.buf_minor_modes;
buf.buf_modified <- buf.buf_modified + 1;
mode.min_hooks |> List.iter (fun f ->
try f buf
with exn -> Error.error_exn "set_minor_mode" exn
)
(*e: function [[Ebuffer.set_minor_mode]] *)
(*s: function [[Ebuffer.del_minor_mode]] *)
let del_minor_mode buf minor =
buf.buf_minor_modes <-
List.fold_right (fun mode list ->
if mode == minor then begin
buf.buf_modified <- buf.buf_modified + 1;
list
end else (mode :: list)
) buf.buf_minor_modes []
(*e: function [[Ebuffer.del_minor_mode]] *)
(*s: function [[Ebuffer.has_minor_mode]] *)
let has_minor_mode buf minor =
List.memq minor buf.buf_minor_modes
(*e: function [[Ebuffer.has_minor_mode]] *)
(*s: constant [[Ebuffer.suffix_reg]] *)
let suffix_reg = Str.regexp "\\(.*\\)<[0-9]+>$"
(*e: constant [[Ebuffer.suffix_reg]] *)
(*s: function [[Ebuffer.set_buffer_mode]] *)
let set_buffer_mode buf =
let buf_name =
match buf.buf_filename with
None ->
(try
if Str.string_match suffix_reg buf.buf_name 0
then Str.matched_group 1 buf.buf_name
else buf.buf_name
with exn ->
Error.error_exn "set_buffer_mode" exn;
buf.buf_name
)
| Some file_name -> file_name
in
let modes_alist = Var.get_var buf modes_alist in
(* must use != here, because modes_alist contain functional values *)
if (!modes_old != modes_alist) then begin
regexp_alist := modes_alist |> List.map (fun (file_reg, major) ->
Str.regexp file_reg, major
);
modes_old := modes_alist;
end;
try
!regexp_alist |> List.iter (fun (regexp, major) ->
if Str.string_match regexp buf_name 0
then
try
set_major_mode buf major;
raise Exit
with
| Exit -> raise Exit
| exn ->
Error.error_exn "set_buffer_mode" exn;
raise Exit
)
with Exit -> ()
(*e: function [[Ebuffer.set_buffer_mode]] *)
(*s: function [[Ebuffer.get_binding]] *)
let get_binding buf keylist =
let binding = ref Unbound in
try
(*s: [[Ebuffer.get_binding()]] minor mode key search *)
buf.buf_minor_modes |> List.iter (fun minor ->
let b = Keymap.get_binding minor.min_map keylist in
match b with
Prefix _map -> binding := b
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
(*e: [[Ebuffer.get_binding()]] minor mode key search *)
(*s: [[Ebuffer.get_binding()]] major mode key search *)
(let b = Keymap.get_binding buf.buf_major_mode.maj_map keylist in
match b with
Prefix _map -> binding := b
| Function _f -> binding := b; raise Exit
| Unbound -> ());
(*e: [[Ebuffer.get_binding()]] major mode key search *)
(let b = Keymap.get_binding buf.buf_map keylist in
match b with
| Prefix _map -> binding := b;
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
(*s: [[Ebuffer.get_binding()]] if partial map *)
if buf.buf_map_partial then
(let b = Keymap.get_binding (Globals.editor()).edt_map keylist in
match b with
| Prefix _map -> binding := b;
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
(*e: [[Ebuffer.get_binding()]] if partial map *)
!binding
with Exit -> !binding
(*e: function [[Ebuffer.get_binding]] *)
(*s: function [[Ebuffer.message]] *)
(* todo: vs Message.message? *)
let message _buf m =
let name = "*Messages*" in
try
let buf = Hashtbl.find (Globals.editor()).edt_buffers name in
Text.insert_at_end buf.buf_text (m^"\n");
with Not_found ->
create name None (Text.create (m^"\n")) (Keymap.create ()) |> ignore
(*e: function [[Ebuffer.message]] *)
s : function [ [ ] ]
let fondamental_mode frame =
set_major_mode frame.frm_buffer fondamental__mode
[@@interactive]
e : function [ [ ] ]
(*s: toplevel [[Ebuffer]] starting hook *)
let _ =
Hook.add_start_hook (fun () ->
Var.set_global create_buf_hook [set_buffer_mode];
Var.set_global modes_alist []
)
(*e: toplevel [[Ebuffer]] starting hook *)
(*e: core/ebuffer.ml *)
| null | https://raw.githubusercontent.com/aryx/fork-efuns/4db191f47ae35590725a7ba82b911fd94ff12040/core/ebuffer.ml | ocaml | s: core/ebuffer.ml
s: copyright header2
*********************************************************************
*********************************************************************
e: copyright header2
s: type [[Ebuffer.t]]
e: type [[Ebuffer.t]]
this file is called ebuffer.ml because buffer.ml already exists in stdlib
s: constant [[Ebuffer.create_buf_hook]]
e: constant [[Ebuffer.create_buf_hook]]
s: constant [[Ebuffer.modes_alist]]
e: constant [[Ebuffer.modes_alist]]
s: function [[Ebuffer.create_syntax_table]]
e: function [[Ebuffer.create_syntax_table]]
s: constant [[Ebuffer.default_syntax_table]]
e: constant [[Ebuffer.default_syntax_table]]
s: function [[Ebuffer.get_name]]
e: function [[Ebuffer.get_name]]
s: function [[Ebuffer.new_minor_mode]]
e: function [[Ebuffer.new_minor_mode]]
s: function [[Ebuffer.new_major_mode]]
e: function [[Ebuffer.new_major_mode]]
no hooks
s: constant [[Ebuffer.tab_size]]
e: constant [[Ebuffer.tab_size]]
s: function [[Ebuffer.create]]
s: [[Ebuffer.create()]] buffer other fields setup
e: [[Ebuffer.create()]] buffer other fields setup
s: [[Ebuffer.create()]] adjust editor global fields
e: [[Ebuffer.create()]] adjust editor global fields
s: [[Ebuffer.create()]] adjust charreprs
x: [[Ebuffer.create()]] adjust charreprs
e: [[Ebuffer.create()]] adjust charreprs
s: [[Ebuffer.create()]] run hooks
e: [[Ebuffer.create()]] run hooks
e: function [[Ebuffer.create]]
s: function [[Ebuffer.kill]]
e: function [[Ebuffer.kill]]
s: constant [[Ebuffer.save_buffer_hooks]]
e: constant [[Ebuffer.save_buffer_hooks]]
s: constant [[Ebuffer.saved_buffer_hooks]]
e: constant [[Ebuffer.saved_buffer_hooks]]
s: function [[Ebuffer.save]]
e: function [[Ebuffer.save]]
s: function [[Ebuffer.read]]
e: function [[Ebuffer.read]]
s: function [[Ebuffer.find_buffer_opt]]
e: function [[Ebuffer.find_buffer_opt]]
s: constant [[Ebuffer.help_buffer_content]]
e: constant [[Ebuffer.help_buffer_content]]
s: function [[Ebuffer.compute_representation]]
e: function [[Ebuffer.compute_representation]]
s: exception [[Ebuffer.BufferAlreadyOpened]]
e: exception [[Ebuffer.BufferAlreadyOpened]]
s: function [[Ebuffer.change_name]]
e: function [[Ebuffer.change_name]]
s: function [[Ebuffer.set_mark]]
e: function [[Ebuffer.set_mark]]
s: function [[Ebuffer.get_mark]]
e: function [[Ebuffer.get_mark]]
s: function [[Ebuffer.remove_mark]]
e: function [[Ebuffer.remove_mark]]
s: constant [[Ebuffer.modes_old]]
e: constant [[Ebuffer.modes_old]]
s: constant [[Ebuffer.regexp_alist]]
e: constant [[Ebuffer.regexp_alist]]
s: function [[Ebuffer.set_major_mode]]
e: function [[Ebuffer.set_major_mode]]
s: function [[Ebuffer.set_minor_mode]]
e: function [[Ebuffer.set_minor_mode]]
s: function [[Ebuffer.del_minor_mode]]
e: function [[Ebuffer.del_minor_mode]]
s: function [[Ebuffer.has_minor_mode]]
e: function [[Ebuffer.has_minor_mode]]
s: constant [[Ebuffer.suffix_reg]]
e: constant [[Ebuffer.suffix_reg]]
s: function [[Ebuffer.set_buffer_mode]]
must use != here, because modes_alist contain functional values
e: function [[Ebuffer.set_buffer_mode]]
s: function [[Ebuffer.get_binding]]
s: [[Ebuffer.get_binding()]] minor mode key search
e: [[Ebuffer.get_binding()]] minor mode key search
s: [[Ebuffer.get_binding()]] major mode key search
e: [[Ebuffer.get_binding()]] major mode key search
s: [[Ebuffer.get_binding()]] if partial map
e: [[Ebuffer.get_binding()]] if partial map
e: function [[Ebuffer.get_binding]]
s: function [[Ebuffer.message]]
todo: vs Message.message?
e: function [[Ebuffer.message]]
s: toplevel [[Ebuffer]] starting hook
e: toplevel [[Ebuffer]] starting hook
e: core/ebuffer.ml | xlib for
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
Copyright 1998 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
open Common
open Efuns
type t = Efuns.buffer
let create_buf_hook = Store.create_abstr "create_buf_hook"
let modes_alist = Store.create_abstr "modes_alist"
let create_syntax_table () =
let table = Array.make 256 false
in
for i = Char.code 'a' to Char.code 'z' do
table.(i) <- true;
done;
for i = Char.code 'A' to Char.code 'Z' do
table.(i) <- true;
done;
for i = Char.code '0' to Char.code '9' do
table.(i) <- true;
done;
table
let default_syntax_table = create_syntax_table ()
let get_unique_name filename =
let basename = Filename.basename filename in
let name =
if basename = ""
then (Filename.basename (Filename.dirname filename)) ^ "/"
else basename
in
let i = ref 0 in
let compute_name () =
if !i =|= 0
then name
else Printf.sprintf "%s<%d>" name !i
in
try
while true do
let _ = Hashtbl.find (Globals.editor()).edt_buffers (compute_name ())
in
incr i
done;
assert false
with Not_found ->
compute_name ()
let new_minor_mode name hooks = {
min_name = name;
min_map = Keymap.create ();
min_hooks = hooks;
min_vars = Store.new_store ()
}
let new_major_mode name hook_opt = {
maj_name = name;
maj_map = Keymap.create ();
maj_hooks = (match hook_opt with None -> [] | Some hook -> [hook]);
maj_vars = Store.new_store ();
}
s : constant [ [ ] ]
e : constant [ [ ] ]
let tab_size = ref 9
let create name filename text local_map =
let name = get_unique_name name in
let buf =
{
buf_text = text;
buf_name = name;
buf_filename = filename;
buf_point = Text.new_point text;
buf_start = Text.new_point text;
buf_last_saved = Text.version text;
buf_modified = 0;
buf_map = local_map;
buf_syntax_table = default_syntax_table;
buf_map_partial = true;
buf_vars = Store.new_store ();
buf_major_mode = fondamental__mode;
buf_minor_modes = [];
buf_sync = false;
buf_mark = None;
buf_shared = 0;
buf_finalizers = [];
buf_history_pos = [||];
buf_charreprs = Array.init 256 (fun i -> String.make 1 (Char.chr i));
} in
Hashtbl.add (Globals.editor()).edt_buffers name buf;
for i=0 to 25 do
let s = Bytes.make 2 '^' in
Bytes.set s 1 (Char.chr (97+i));
buf.buf_charreprs.(i) <- (Bytes.to_string s);
done;
buf.buf_charreprs.(9) <- String.make !tab_size ' ';
let hooks = try Var.get_global create_buf_hook with Not_found -> [] in
Hook.exec_hooks hooks buf;
buf
let kill buf =
let edt = Globals.editor() in
Hashtbl.remove edt.edt_buffers buf.buf_name;
buf.buf_filename |> Option.iter (fun filename ->
Hashtbl.remove edt.edt_files filename
);
List.iter (fun f -> f () ) buf.buf_finalizers;
TODO Gc.compact ( ) ; this cause some segfault under with
buf.buf_shared <- -1
open Options
let save_buffer_hooks = define_option ["save_buffer_hooks"] ""
(list_option string_option)
[ ]
let saved_buffer_hooks = Store.create_abstr "saved_buffer_hooks"
let save buf =
Hook.exec_named_buf_hooks_with_abort !!save_buffer_hooks buf;
let filename =
match buf.buf_filename with
None -> raise Not_found
| Some name -> name
in
let outc = open_out filename in
Text.save buf.buf_text outc;
close_out outc;
buf.buf_last_saved <- Text.version buf.buf_text;
let hooks = try Var.get_var buf saved_buffer_hooks with Not_found -> [] in
Hook.exec_hooks hooks buf
let read filename local_map =
let edt = Globals.editor() in
let filename = Utils.normal_name edt.edt_dirname filename in
try
Hashtbl.find edt.edt_files filename
with Not_found ->
let text =
try
let inc = open_in filename in
let text = Text.read inc in
close_in inc;
text
with exn ->
Error.error_exn (spf "error reading file %s" filename) exn;
Text.create ""
in
let buf = create filename (Some filename) text local_map in
Hashtbl.add edt.edt_files filename buf;
buf
let find_buffer_opt name =
try Some (Hashtbl.find (Globals.editor()).edt_buffers name)
with Not_found -> None
let help_buffer_content =
"Welcome to Efuns, a small demo editor written in Ocaml.
Fabrice Le Fessant
PARA/SOR Project
INRIA Rocquencourt
"
s : function [ [ ] ]
let default name =
try
Hashtbl.find (Globals.editor()).edt_buffers name
with Not_found ->
let str =
if name = "*help*"
then help_buffer_content
else ""
in
create name None (Text.create str) (Keymap.create ())
e : function [ [ ] ]
let compute_representation buf n =
Text.compute_representation buf.buf_text buf.buf_charreprs n
exception BufferAlreadyOpened
let change_name buf filename =
let edt = Globals.editor() in
Hashtbl.remove edt.edt_buffers buf.buf_name;
buf.buf_filename |> Option.iter (fun filename ->
Hashtbl.remove edt.edt_files filename
);
let filename =
if Filename.is_relative filename
then Filename.concat edt.edt_dirname filename
else filename
in
if Utils.hashtbl_mem edt.edt_files filename
then raise BufferAlreadyOpened;
let filename = Utils.normal_name edt.edt_dirname filename in
let name = get_unique_name filename in
Hashtbl.add edt.edt_buffers name buf;
Hashtbl.add edt.edt_files filename buf;
buf.buf_filename <- Some filename;
buf.buf_name <- name
let set_mark buf point =
let text = buf.buf_text in
buf.buf_modified <- buf.buf_modified + 1;
match buf.buf_mark with
| None ->
let mark = Text.dup_point text point in
buf.buf_mark <- Some mark
| Some mark ->
Text.goto_point text mark point
let rec get_mark buf point =
match buf.buf_mark with
| None ->
set_mark buf point;
get_mark buf point
| Some mark -> mark
let remove_mark buf =
buf.buf_mark |> Option.iter (fun mark ->
buf.buf_mark <- None;
Text.remove_point buf.buf_text mark;
buf.buf_modified <- buf.buf_modified + 1
)
let modes_old = ref []
let regexp_alist = ref []
let set_major_mode buf mode =
if !Globals.debug
then pr2 (spf "setting %s major mode" mode.maj_name);
buf.buf_modified <- buf.buf_modified + 1;
buf.buf_major_mode <- mode;
mode.maj_hooks |> List.iter (fun f ->
try f buf
with exn -> Error.error_exn "set_major_mode" exn
)
let set_minor_mode buf mode =
buf.buf_minor_modes <- mode :: buf.buf_minor_modes;
buf.buf_modified <- buf.buf_modified + 1;
mode.min_hooks |> List.iter (fun f ->
try f buf
with exn -> Error.error_exn "set_minor_mode" exn
)
let del_minor_mode buf minor =
buf.buf_minor_modes <-
List.fold_right (fun mode list ->
if mode == minor then begin
buf.buf_modified <- buf.buf_modified + 1;
list
end else (mode :: list)
) buf.buf_minor_modes []
let has_minor_mode buf minor =
List.memq minor buf.buf_minor_modes
let suffix_reg = Str.regexp "\\(.*\\)<[0-9]+>$"
let set_buffer_mode buf =
let buf_name =
match buf.buf_filename with
None ->
(try
if Str.string_match suffix_reg buf.buf_name 0
then Str.matched_group 1 buf.buf_name
else buf.buf_name
with exn ->
Error.error_exn "set_buffer_mode" exn;
buf.buf_name
)
| Some file_name -> file_name
in
let modes_alist = Var.get_var buf modes_alist in
if (!modes_old != modes_alist) then begin
regexp_alist := modes_alist |> List.map (fun (file_reg, major) ->
Str.regexp file_reg, major
);
modes_old := modes_alist;
end;
try
!regexp_alist |> List.iter (fun (regexp, major) ->
if Str.string_match regexp buf_name 0
then
try
set_major_mode buf major;
raise Exit
with
| Exit -> raise Exit
| exn ->
Error.error_exn "set_buffer_mode" exn;
raise Exit
)
with Exit -> ()
let get_binding buf keylist =
let binding = ref Unbound in
try
buf.buf_minor_modes |> List.iter (fun minor ->
let b = Keymap.get_binding minor.min_map keylist in
match b with
Prefix _map -> binding := b
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
(let b = Keymap.get_binding buf.buf_major_mode.maj_map keylist in
match b with
Prefix _map -> binding := b
| Function _f -> binding := b; raise Exit
| Unbound -> ());
(let b = Keymap.get_binding buf.buf_map keylist in
match b with
| Prefix _map -> binding := b;
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
if buf.buf_map_partial then
(let b = Keymap.get_binding (Globals.editor()).edt_map keylist in
match b with
| Prefix _map -> binding := b;
| Function _f -> binding := b; raise Exit
| Unbound -> ()
);
!binding
with Exit -> !binding
let message _buf m =
let name = "*Messages*" in
try
let buf = Hashtbl.find (Globals.editor()).edt_buffers name in
Text.insert_at_end buf.buf_text (m^"\n");
with Not_found ->
create name None (Text.create (m^"\n")) (Keymap.create ()) |> ignore
s : function [ [ ] ]
let fondamental_mode frame =
set_major_mode frame.frm_buffer fondamental__mode
[@@interactive]
e : function [ [ ] ]
let _ =
Hook.add_start_hook (fun () ->
Var.set_global create_buf_hook [set_buffer_mode];
Var.set_global modes_alist []
)
|
f3e29132319835ba6d66d1c0f3499d4655dab4a8e7470f06cadb7bdad2dfe44e | haskell/time | TimeZone.hs | module Main where
import Data.Time
main :: IO ()
main = do
zone <- getCurrentTimeZone
putStrLn (timeZoneOffsetString zone)
| null | https://raw.githubusercontent.com/haskell/time/ab2c0c28b8b7a12bce12eedd357a73e39b00afc2/test/TimeZone.hs | haskell | module Main where
import Data.Time
main :: IO ()
main = do
zone <- getCurrentTimeZone
putStrLn (timeZoneOffsetString zone)
|
|
e5c28fd041038b24bcf471ff81925849bc54041c95bccbe36290311453b9c880 | glebec/haskell-programming-allen-moronuki | Debug.hs | module Main where
import Control.Monad (forever)
import Network.Socket hiding (recv)
import Network.Socket.ByteString (recv, sendAll)
logAndEcho :: Socket -> IO ()
logAndEcho sock = forever $ do
(soc, _) <- accept sock
NB : blocking
close soc
where
printAndKickback conn = do
msg <- recv conn 1024
print msg
sendAll conn msg
main :: IO ()
main = withSocketsDo $ do
addrInfos <- getAddrInfo
(Just $ defaultHints {addrFlags = [AI_PASSIVE]})
Nothing
(Just "79") -- port
let serverAddr = head addrInfos
sock <- socket (addrFamily serverAddr) Stream defaultProtocol
bind sock (addrAddress serverAddr)
listen sock 1
logAndEcho sock
close sock
| null | https://raw.githubusercontent.com/glebec/haskell-programming-allen-moronuki/99bd232f523e426d18a5e096f1cf771228c55f52/31-final-project/fingerd/src/Debug.hs | haskell | port | module Main where
import Control.Monad (forever)
import Network.Socket hiding (recv)
import Network.Socket.ByteString (recv, sendAll)
logAndEcho :: Socket -> IO ()
logAndEcho sock = forever $ do
(soc, _) <- accept sock
NB : blocking
close soc
where
printAndKickback conn = do
msg <- recv conn 1024
print msg
sendAll conn msg
main :: IO ()
main = withSocketsDo $ do
addrInfos <- getAddrInfo
(Just $ defaultHints {addrFlags = [AI_PASSIVE]})
Nothing
let serverAddr = head addrInfos
sock <- socket (addrFamily serverAddr) Stream defaultProtocol
bind sock (addrAddress serverAddr)
listen sock 1
logAndEcho sock
close sock
|
4a3ad3981fb28430cefd65c2e4ec8b71afea53d601df6c5ef5fa94b1680722ed | aws-beam/aws-erlang | aws_auditmanager.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
%% @doc Welcome to the Audit Manager API reference.
%%
%% This guide is for developers who need detailed information about the Audit
%% Manager API operations, data types, and errors.
%%
%% Audit Manager is a service that provides automated evidence collection so
that you can continually audit your Amazon Web Services usage . You can use
%% it to assess the effectiveness of your controls, manage risk, and simplify
%% compliance.
%%
%% Audit Manager provides prebuilt frameworks that structure and automate
%% assessments for a given compliance standard. Frameworks include a prebuilt
%% collection of controls with descriptions and testing procedures. These
%% controls are grouped according to the requirements of the specified
%% compliance standard or regulation. You can also customize frameworks and
%% controls to support internal audits with specific requirements.
%%
%% Use the following links to get started with the Audit Manager API:
%%
%% <ul> <li> Actions: An alphabetical list of all Audit Manager API
%% operations.
%%
%% </li> <li> Data types: An alphabetical list of all Audit Manager data
%% types.
%%
%% </li> <li> Common parameters: Parameters that all operations can use.
%%
%% </li> <li> Common errors: Client and server errors that all operations can
%% return.
%%
%% </li> </ul> If you're new to Audit Manager, we recommend that you
%% review the Audit Manager User Guide.
-module(aws_auditmanager).
-export([associate_assessment_report_evidence_folder/3,
associate_assessment_report_evidence_folder/4,
batch_associate_assessment_report_evidence/3,
batch_associate_assessment_report_evidence/4,
batch_create_delegation_by_assessment/3,
batch_create_delegation_by_assessment/4,
batch_delete_delegation_by_assessment/3,
batch_delete_delegation_by_assessment/4,
batch_disassociate_assessment_report_evidence/3,
batch_disassociate_assessment_report_evidence/4,
batch_import_evidence_to_assessment_control/5,
batch_import_evidence_to_assessment_control/6,
create_assessment/2,
create_assessment/3,
create_assessment_framework/2,
create_assessment_framework/3,
create_assessment_report/3,
create_assessment_report/4,
create_control/2,
create_control/3,
delete_assessment/3,
delete_assessment/4,
delete_assessment_framework/3,
delete_assessment_framework/4,
delete_assessment_framework_share/3,
delete_assessment_framework_share/4,
delete_assessment_report/4,
delete_assessment_report/5,
delete_control/3,
delete_control/4,
deregister_account/2,
deregister_account/3,
deregister_organization_admin_account/2,
deregister_organization_admin_account/3,
disassociate_assessment_report_evidence_folder/3,
disassociate_assessment_report_evidence_folder/4,
get_account_status/1,
get_account_status/3,
get_account_status/4,
get_assessment/2,
get_assessment/4,
get_assessment/5,
get_assessment_framework/2,
get_assessment_framework/4,
get_assessment_framework/5,
get_assessment_report_url/3,
get_assessment_report_url/5,
get_assessment_report_url/6,
get_change_logs/2,
get_change_logs/4,
get_change_logs/5,
get_control/2,
get_control/4,
get_control/5,
get_delegations/1,
get_delegations/3,
get_delegations/4,
get_evidence/5,
get_evidence/7,
get_evidence/8,
get_evidence_by_evidence_folder/4,
get_evidence_by_evidence_folder/6,
get_evidence_by_evidence_folder/7,
get_evidence_folder/4,
get_evidence_folder/6,
get_evidence_folder/7,
get_evidence_folders_by_assessment/2,
get_evidence_folders_by_assessment/4,
get_evidence_folders_by_assessment/5,
get_evidence_folders_by_assessment_control/4,
get_evidence_folders_by_assessment_control/6,
get_evidence_folders_by_assessment_control/7,
get_insights/1,
get_insights/3,
get_insights/4,
get_insights_by_assessment/2,
get_insights_by_assessment/4,
get_insights_by_assessment/5,
get_organization_admin_account/1,
get_organization_admin_account/3,
get_organization_admin_account/4,
get_services_in_scope/1,
get_services_in_scope/3,
get_services_in_scope/4,
get_settings/2,
get_settings/4,
get_settings/5,
list_assessment_control_insights_by_control_domain/3,
list_assessment_control_insights_by_control_domain/5,
list_assessment_control_insights_by_control_domain/6,
list_assessment_framework_share_requests/2,
list_assessment_framework_share_requests/4,
list_assessment_framework_share_requests/5,
list_assessment_frameworks/2,
list_assessment_frameworks/4,
list_assessment_frameworks/5,
list_assessment_reports/1,
list_assessment_reports/3,
list_assessment_reports/4,
list_assessments/1,
list_assessments/3,
list_assessments/4,
list_control_domain_insights/1,
list_control_domain_insights/3,
list_control_domain_insights/4,
list_control_domain_insights_by_assessment/2,
list_control_domain_insights_by_assessment/4,
list_control_domain_insights_by_assessment/5,
list_control_insights_by_control_domain/2,
list_control_insights_by_control_domain/4,
list_control_insights_by_control_domain/5,
list_controls/2,
list_controls/4,
list_controls/5,
list_keywords_for_data_source/2,
list_keywords_for_data_source/4,
list_keywords_for_data_source/5,
list_notifications/1,
list_notifications/3,
list_notifications/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
register_account/2,
register_account/3,
register_organization_admin_account/2,
register_organization_admin_account/3,
start_assessment_framework_share/3,
start_assessment_framework_share/4,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_assessment/3,
update_assessment/4,
update_assessment_control/5,
update_assessment_control/6,
update_assessment_control_set_status/4,
update_assessment_control_set_status/5,
update_assessment_framework/3,
update_assessment_framework/4,
update_assessment_framework_share/3,
update_assessment_framework_share/4,
update_assessment_status/3,
update_assessment_status/4,
update_control/3,
update_control/4,
update_settings/2,
update_settings/3,
validate_assessment_report_integrity/2,
validate_assessment_report_integrity/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Associates an evidence folder to an assessment report in an Audit
%% Manager assessment.
associate_assessment_report_evidence_folder(Client, AssessmentId, Input) ->
associate_assessment_report_evidence_folder(Client, AssessmentId, Input, []).
associate_assessment_report_evidence_folder(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/associateToAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Associates a list of evidence to an assessment report in an Audit
%% Manager assessment.
batch_associate_assessment_report_evidence(Client, AssessmentId, Input) ->
batch_associate_assessment_report_evidence(Client, AssessmentId, Input, []).
batch_associate_assessment_report_evidence(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/batchAssociateToAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a batch of delegations for an assessment in Audit Manager.
batch_create_delegation_by_assessment(Client, AssessmentId, Input) ->
batch_create_delegation_by_assessment(Client, AssessmentId, Input, []).
batch_create_delegation_by_assessment(Client, AssessmentId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a batch of delegations for an assessment in Audit Manager.
batch_delete_delegation_by_assessment(Client, AssessmentId, Input) ->
batch_delete_delegation_by_assessment(Client, AssessmentId, Input, []).
batch_delete_delegation_by_assessment(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociates a list of evidence from an assessment report in Audit
%% Manager.
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input) ->
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input, []).
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/batchDisassociateFromAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Uploads one or more pieces of evidence to a control in an Audit
%% Manager assessment.
%%
You can upload manual evidence from any Amazon Simple Storage Service
( Amazon S3 ) bucket by specifying the S3 URI of the evidence .
%%
%% You must upload manual evidence to your S3 bucket before you can upload it
to your assessment . For instructions , see CreateBucket and PutObject in
the Amazon Simple Storage Service API Reference .
%%
%% The following restrictions apply to this action:
%%
< ul > < li > Maximum size of an individual evidence file : 100 MB
%%
< /li > < li > Number of daily manual evidence uploads per control : 100
%%
%% </li> <li> Supported file formats: See Supported file types for manual
%% evidence in the Audit Manager User Guide
%%
%% </li> </ul> For more information about Audit Manager service restrictions,
%% see Quotas and restrictions for Audit Manager.
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input) ->
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input, []).
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/controls/", aws_util:encode_uri(ControlId), "/evidence"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates an assessment in Audit Manager.
create_assessment(Client, Input) ->
create_assessment(Client, Input, []).
create_assessment(Client, Input0, Options0) ->
Method = post,
Path = ["/assessments"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a custom framework in Audit Manager.
create_assessment_framework(Client, Input) ->
create_assessment_framework(Client, Input, []).
create_assessment_framework(Client, Input0, Options0) ->
Method = post,
Path = ["/assessmentFrameworks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates an assessment report for the specified assessment.
create_assessment_report(Client, AssessmentId, Input) ->
create_assessment_report(Client, AssessmentId, Input, []).
create_assessment_report(Client, AssessmentId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a new custom control in Audit Manager.
create_control(Client, Input) ->
create_control(Client, Input, []).
create_control(Client, Input0, Options0) ->
Method = post,
Path = ["/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes an assessment in Audit Manager.
delete_assessment(Client, AssessmentId, Input) ->
delete_assessment(Client, AssessmentId, Input, []).
delete_assessment(Client, AssessmentId, Input0, Options0) ->
Method = delete,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a custom framework in Audit Manager.
delete_assessment_framework(Client, FrameworkId, Input) ->
delete_assessment_framework(Client, FrameworkId, Input, []).
delete_assessment_framework(Client, FrameworkId, Input0, Options0) ->
Method = delete,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a share request for a custom framework in Audit Manager.
delete_assessment_framework_share(Client, RequestId, Input) ->
delete_assessment_framework_share(Client, RequestId, Input, []).
delete_assessment_framework_share(Client, RequestId, Input0, Options0) ->
Method = delete,
Path = ["/assessmentFrameworkShareRequests/", aws_util:encode_uri(RequestId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"requestType">>, <<"requestType">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes an assessment report in Audit Manager.
%%
%% When you run the `DeleteAssessmentReport' operation, Audit Manager
%% attempts to delete the following data:
%%
%% <ol> <li> The specified assessment report that’s stored in your S3 bucket
%%
%% </li> <li> The associated metadata that’s stored in Audit Manager
%%
%% </li> </ol> If Audit Manager can’t access the assessment report in your S3
%% bucket, the report isn’t deleted. In this event, the
%% `DeleteAssessmentReport' operation doesn’t fail. Instead, it proceeds
%% to delete the associated metadata only. You must then delete the
%% assessment report from the S3 bucket yourself.
%%
This scenario happens when Audit Manager receives a ` 403 ( Forbidden ) '
or ` 404 ( Not Found ) ' error from Amazon S3 . To avoid this , make sure
%% that your S3 bucket is available, and that you configured the correct
%% permissions for Audit Manager to delete resources in your S3 bucket. For
%% an example permissions policy that you can use, see Assessment report
%% destination permissions in the Audit Manager User Guide. For information
about the issues that could cause a ` 403 ( Forbidden ) ' or ` 404 ( Not
Found ' ) error from Amazon S3 , see List of Error Codes in the Amazon
%% Simple Storage Service API Reference.
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input) ->
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input, []).
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input0, Options0) ->
Method = delete,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports/", aws_util:encode_uri(AssessmentReportId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes a custom control in Audit Manager.
delete_control(Client, ControlId, Input) ->
delete_control(Client, ControlId, Input, []).
delete_control(Client, ControlId, Input0, Options0) ->
Method = delete,
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deregisters an account in Audit Manager .
%%
Before you deregister , you can use the UpdateSettings API operation to set
%% your preferred data retention policy. By default, Audit Manager retains
%% your data. If you want to delete your data, you can use the
` DeregistrationPolicy ' attribute to request the deletion of your data .
%%
For more information about data retention , see Data Protection in the
%% Audit Manager User Guide.
deregister_account(Client, Input) ->
deregister_account(Client, Input, []).
deregister_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/deregisterAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes the specified Amazon Web Services account as a delegated
%% administrator for Audit Manager.
%%
%% When you remove a delegated administrator from your Audit Manager
%% settings, you continue to have access to the evidence that you previously
%% collected under that account. This is also the case when you deregister a
%% delegated administrator from Organizations. However, Audit Manager stops
%% collecting and attaching evidence to that delegated administrator account
%% moving forward.
%%
%% Keep in mind the following cleanup task if you use evidence finder:
%%
%% Before you use your management account to remove a delegated
%% administrator, make sure that the current delegated administrator account
signs in to Audit Manager and disables evidence finder first . Disabling
%% evidence finder automatically deletes the event data store that was
%% created in their account when they enabled evidence finder. If this task
%% isn’t completed, the event data store remains in their account. In this
%% case, we recommend that the original delegated administrator goes to
CloudTrail Lake and manually deletes the event data store .
%%
%% This cleanup task is necessary to ensure that you don't end up with
%% multiple event data stores. Audit Manager ignores an unused event data
%% store after you remove or change a delegated administrator account.
%% However, the unused event data store continues to incur storage costs from
CloudTrail Lake if you do n't delete it .
%%
%% When you deregister a delegated administrator account for Audit Manager,
%% the data for that account isn’t deleted. If you want to delete resource
%% data for a delegated administrator account, you must perform that task
%% separately before you deregister the account. Either, you can do this in
the Audit Manager console . Or , you can use one of the delete API
%% operations that are provided by Audit Manager.
%%
%% To delete your Audit Manager resource data, see the following
%% instructions:
%%
%% <ul> <li> DeleteAssessment (see also: Deleting an assessment in the Audit
%% Manager User Guide)
%%
%% </li> <li> DeleteAssessmentFramework (see also: Deleting a custom
%% framework in the Audit Manager User Guide)
%%
< /li > < li > ( see also : Deleting a share
%% request in the Audit Manager User Guide)
%%
%% </li> <li> DeleteAssessmentReport (see also: Deleting an assessment report
%% in the Audit Manager User Guide)
%%
< /li > < li > DeleteControl ( see also : Deleting a custom control in the Audit
%% Manager User Guide)
%%
%% </li> </ul> At this time, Audit Manager doesn't provide an option to
%% delete evidence for a specific delegated administrator. Instead, when your
%% management account deregisters Audit Manager, we perform a cleanup for the
%% current delegated administrator account at the time of deregistration.
deregister_organization_admin_account(Client, Input) ->
deregister_organization_admin_account(Client, Input, []).
deregister_organization_admin_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/deregisterOrganizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociates an evidence folder from the specified assessment report
%% in Audit Manager.
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input) ->
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input, []).
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/disassociateFromAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Returns the registration status of an account in Audit Manager.
get_account_status(Client)
when is_map(Client) ->
get_account_status(Client, #{}, #{}).
get_account_status(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_account_status(Client, QueryMap, HeadersMap, []).
get_account_status(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/account/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns an assessment from Audit Manager.
get_assessment(Client, AssessmentId)
when is_map(Client) ->
get_assessment(Client, AssessmentId, #{}, #{}).
get_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a framework from Audit Manager.
get_assessment_framework(Client, FrameworkId)
when is_map(Client) ->
get_assessment_framework(Client, FrameworkId, #{}, #{}).
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap, []).
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns the URL of an assessment report in Audit Manager.
get_assessment_report_url(Client, AssessmentId, AssessmentReportId)
when is_map(Client) ->
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, #{}, #{}).
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap, []).
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports/", aws_util:encode_uri(AssessmentReportId), "/url"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of changelogs from Audit Manager.
get_change_logs(Client, AssessmentId)
when is_map(Client) ->
get_change_logs(Client, AssessmentId, #{}, #{}).
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap, []).
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/changelogs"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlId">>, maps:get(<<"controlId">>, QueryMap, undefined)},
{<<"controlSetId">>, maps:get(<<"controlSetId">>, QueryMap, undefined)},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a control from Audit Manager.
get_control(Client, ControlId)
when is_map(Client) ->
get_control(Client, ControlId, #{}, #{}).
get_control(Client, ControlId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_control(Client, ControlId, QueryMap, HeadersMap, []).
get_control(Client, ControlId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of delegations from an audit owner to a delegate.
get_delegations(Client)
when is_map(Client) ->
get_delegations(Client, #{}, #{}).
get_delegations(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_delegations(Client, QueryMap, HeadersMap, []).
get_delegations(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns evidence from Audit Manager.
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId)
when is_map(Client) ->
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, #{}, #{}).
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap, []).
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), "/evidence/", aws_util:encode_uri(EvidenceId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns all evidence from a specified evidence folder in Audit
%% Manager.
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId)
when is_map(Client) ->
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, #{}, #{}).
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, []).
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), "/evidence"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns an evidence folder from the specified assessment in Audit
%% Manager.
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId)
when is_map(Client) ->
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, #{}, #{}).
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, []).
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns the evidence folders from a specified assessment in Audit
%% Manager.
get_evidence_folders_by_assessment(Client, AssessmentId)
when is_map(Client) ->
get_evidence_folders_by_assessment(Client, AssessmentId, #{}, #{}).
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/evidenceFolders"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of evidence folders that are associated with a
%% specified control in an Audit Manager assessment.
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId)
when is_map(Client) ->
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, #{}, #{}).
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap, []).
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/evidenceFolders-by-assessment-control/", aws_util:encode_uri(ControlSetId), "/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Gets the latest analytics data for all your current active
%% assessments.
get_insights(Client)
when is_map(Client) ->
get_insights(Client, #{}, #{}).
get_insights(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_insights(Client, QueryMap, HeadersMap, []).
get_insights(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Gets the latest analytics data for a specific active assessment.
get_insights_by_assessment(Client, AssessmentId)
when is_map(Client) ->
get_insights_by_assessment(Client, AssessmentId, #{}, #{}).
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the name of the delegated Amazon Web Services administrator
%% account for the organization.
get_organization_admin_account(Client)
when is_map(Client) ->
get_organization_admin_account(Client, #{}, #{}).
get_organization_admin_account(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_organization_admin_account(Client, QueryMap, HeadersMap, []).
get_organization_admin_account(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/account/organizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns a list of all of the Amazon Web Services that you can choose
%% to include in your assessment.
%%
%% When you create an assessment, specify which of these services you want to
%% include to narrow the assessment's scope.
get_services_in_scope(Client)
when is_map(Client) ->
get_services_in_scope(Client, #{}, #{}).
get_services_in_scope(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_services_in_scope(Client, QueryMap, HeadersMap, []).
get_services_in_scope(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/services"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the settings for the specified Amazon Web Services account .
get_settings(Client, Attribute)
when is_map(Client) ->
get_settings(Client, Attribute, #{}, #{}).
get_settings(Client, Attribute, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_settings(Client, Attribute, QueryMap, HeadersMap, []).
get_settings(Client, Attribute, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/settings/", aws_util:encode_uri(Attribute), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists the latest analytics data for controls within a specific
%% control domain and a specific active assessment.
%%
%% Control insights are listed only if the control belongs to the control
%% domain and assessment that was specified. Moreover, the control must have
%% collected evidence on the `lastUpdated' date of
%% `controlInsightsByAssessment'. If neither of these conditions are met,
%% no data is listed for that control.
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId)
when is_map(Client) ->
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, #{}, #{}).
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap, []).
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/controls-by-assessment"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"assessmentId">>, AssessmentId},
{<<"controlDomainId">>, ControlDomainId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of sent or received share requests for custom
%% frameworks in Audit Manager.
list_assessment_framework_share_requests(Client, RequestType)
when is_map(Client) ->
list_assessment_framework_share_requests(Client, RequestType, #{}, #{}).
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap, []).
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworkShareRequests"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"requestType">>, RequestType}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of the frameworks that are available in the Audit
%% Manager framework library.
list_assessment_frameworks(Client, FrameworkType)
when is_map(Client) ->
list_assessment_frameworks(Client, FrameworkType, #{}, #{}).
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap, []).
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"frameworkType">>, FrameworkType},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of assessment reports created in Audit Manager.
list_assessment_reports(Client)
when is_map(Client) ->
list_assessment_reports(Client, #{}, #{}).
list_assessment_reports(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_reports(Client, QueryMap, HeadersMap, []).
list_assessment_reports(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentReports"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of current and past assessments from Audit Manager.
list_assessments(Client)
when is_map(Client) ->
list_assessments(Client, #{}, #{}).
list_assessments(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessments(Client, QueryMap, HeadersMap, []).
list_assessments(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"status">>, maps:get(<<"status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists the latest analytics data for control domains across all of
%% your active assessments.
%%
A control domain is listed only if at least one of the controls within
%% that domain collected evidence on the `lastUpdated' date of
%% `controlDomainInsights'. If this condition isn’t met, no data is
%% listed for that control domain.
list_control_domain_insights(Client)
when is_map(Client) ->
list_control_domain_insights(Client, #{}, #{}).
list_control_domain_insights(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_domain_insights(Client, QueryMap, HeadersMap, []).
list_control_domain_insights(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/control-domains"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists analytics data for control domains within a specified active
%% assessment.
%%
A control domain is listed only if at least one of the controls within
%% that domain collected evidence on the `lastUpdated' date of
%% `controlDomainInsights'. If this condition isn’t met, no data is
%% listed for that domain.
list_control_domain_insights_by_assessment(Client, AssessmentId)
when is_map(Client) ->
list_control_domain_insights_by_assessment(Client, AssessmentId, #{}, #{}).
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/control-domains-by-assessment"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"assessmentId">>, AssessmentId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Lists the latest analytics data for controls within a specific
%% control domain across all active assessments.
%%
%% Control insights are listed only if the control belongs to the control
%% domain that was specified and the control collected evidence on the
%% `lastUpdated' date of `controlInsightsMetadata'. If neither of
%% these conditions are met, no data is listed for that control.
list_control_insights_by_control_domain(Client, ControlDomainId)
when is_map(Client) ->
list_control_insights_by_control_domain(Client, ControlDomainId, #{}, #{}).
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap, []).
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlDomainId">>, ControlDomainId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of controls from Audit Manager.
list_controls(Client, ControlType)
when is_map(Client) ->
list_controls(Client, ControlType, #{}, #{}).
list_controls(Client, ControlType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_controls(Client, ControlType, QueryMap, HeadersMap, []).
list_controls(Client, ControlType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlType">>, ControlType},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of keywords that are pre-mapped to the specified
%% control data source.
list_keywords_for_data_source(Client, Source)
when is_map(Client) ->
list_keywords_for_data_source(Client, Source, #{}, #{}).
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap, []).
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/dataSourceKeywords"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"source">>, Source}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of all Audit Manager notifications.
list_notifications(Client)
when is_map(Client) ->
list_notifications(Client, #{}, #{}).
list_notifications(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_notifications(Client, QueryMap, HeadersMap, []).
list_notifications(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/notifications"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
%% @doc Returns a list of tags for the specified resource in Audit Manager.
list_tags_for_resource(Client, ResourceArn)
when is_map(Client) ->
list_tags_for_resource(Client, ResourceArn, #{}, #{}).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Enables Audit Manager for the specified Amazon Web Services account .
register_account(Client, Input) ->
register_account(Client, Input, []).
register_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/registerAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Enables an Amazon Web Services account within the organization as the
%% delegated administrator for Audit Manager.
register_organization_admin_account(Client, Input) ->
register_organization_admin_account(Client, Input, []).
register_organization_admin_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/registerOrganizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Creates a share request for a custom framework in Audit Manager.
%%
%% The share request specifies a recipient and notifies them that a custom
framework is available . Recipients have 120 days to accept or decline the
%% request. If no action is taken, the share request expires.
%%
%% When you create a share request, Audit Manager stores a snapshot of your
custom framework in the US East ( N. Virginia ) Amazon Web Services Region .
Audit Manager also stores a backup of the same snapshot in the US West
( Oregon ) Amazon Web Services Region .
%%
Audit Manager deletes the snapshot and the backup snapshot when one of the
%% following events occurs:
%%
%% <ul> <li> The sender revokes the share request.
%%
%% </li> <li> The recipient declines the share request.
%%
%% </li> <li> The recipient encounters an error and doesn't successfully
%% accept the share request.
%%
%% </li> <li> The share request expires before the recipient responds to the
%% request.
%%
%% </li> </ul> When a sender resends a share request, the snapshot is
%% replaced with an updated version that corresponds with the latest version
%% of the custom framework.
%%
%% When a recipient accepts a share request, the snapshot is replicated into
their Amazon Web Services account under the Amazon Web Services Region
%% that was specified in the share request.
%%
%% When you invoke the `StartAssessmentFrameworkShare' API, you are about
to share a custom framework with another Amazon Web Services account . You
%% may not share a custom framework that is derived from a standard framework
%% if the standard framework is designated as not eligible for sharing by
Amazon Web Services , unless you have obtained permission to do so from the
%% owner of the standard framework. To learn more about which standard
%% frameworks are eligible for sharing, see Framework sharing eligibility in
%% the Audit Manager User Guide.
start_assessment_framework_share(Client, FrameworkId, Input) ->
start_assessment_framework_share(Client, FrameworkId, Input, []).
start_assessment_framework_share(Client, FrameworkId, Input0, Options0) ->
Method = post,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), "/shareRequests"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Tags the specified resource in Audit Manager.
tag_resource(Client, ResourceArn, Input) ->
tag_resource(Client, ResourceArn, Input, []).
tag_resource(Client, ResourceArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes a tag from a resource in Audit Manager.
untag_resource(Client, ResourceArn, Input) ->
untag_resource(Client, ResourceArn, Input, []).
untag_resource(Client, ResourceArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"tagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Edits an Audit Manager assessment.
update_assessment(Client, AssessmentId, Input) ->
update_assessment(Client, AssessmentId, Input, []).
update_assessment(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates a control within an assessment in Audit Manager.
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input) ->
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input, []).
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates the status of a control set in an Audit Manager assessment.
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input) ->
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input, []).
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates a custom framework in Audit Manager.
update_assessment_framework(Client, FrameworkId, Input) ->
update_assessment_framework(Client, FrameworkId, Input, []).
update_assessment_framework(Client, FrameworkId, Input0, Options0) ->
Method = put,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates a share request for a custom framework in Audit Manager.
update_assessment_framework_share(Client, RequestId, Input) ->
update_assessment_framework_share(Client, RequestId, Input, []).
update_assessment_framework_share(Client, RequestId, Input0, Options0) ->
Method = put,
Path = ["/assessmentFrameworkShareRequests/", aws_util:encode_uri(RequestId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates the status of an assessment in Audit Manager.
update_assessment_status(Client, AssessmentId, Input) ->
update_assessment_status(Client, AssessmentId, Input, []).
update_assessment_status(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates a custom control in Audit Manager.
update_control(Client, ControlId, Input) ->
update_control(Client, ControlId, Input, []).
update_control(Client, ControlId, Input0, Options0) ->
Method = put,
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Updates Audit Manager settings for the current account.
update_settings(Client, Input) ->
update_settings(Client, Input, []).
update_settings(Client, Input0, Options0) ->
Method = put,
Path = ["/settings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Validates the integrity of an assessment report in Audit Manager.
validate_assessment_report_integrity(Client, Input) ->
validate_assessment_report_integrity(Client, Input, []).
validate_assessment_report_integrity(Client, Input0, Options0) ->
Method = post,
Path = ["/assessmentReports/integrity"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"auditmanager">>},
Host = build_host(<<"auditmanager">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_auditmanager.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
@doc Welcome to the Audit Manager API reference.
This guide is for developers who need detailed information about the Audit
Manager API operations, data types, and errors.
Audit Manager is a service that provides automated evidence collection so
it to assess the effectiveness of your controls, manage risk, and simplify
compliance.
Audit Manager provides prebuilt frameworks that structure and automate
assessments for a given compliance standard. Frameworks include a prebuilt
collection of controls with descriptions and testing procedures. These
controls are grouped according to the requirements of the specified
compliance standard or regulation. You can also customize frameworks and
controls to support internal audits with specific requirements.
Use the following links to get started with the Audit Manager API:
<ul> <li> Actions: An alphabetical list of all Audit Manager API
operations.
</li> <li> Data types: An alphabetical list of all Audit Manager data
types.
</li> <li> Common parameters: Parameters that all operations can use.
</li> <li> Common errors: Client and server errors that all operations can
return.
</li> </ul> If you're new to Audit Manager, we recommend that you
review the Audit Manager User Guide.
====================================================================
API
====================================================================
@doc Associates an evidence folder to an assessment report in an Audit
Manager assessment.
@doc Associates a list of evidence to an assessment report in an Audit
Manager assessment.
@doc Creates a batch of delegations for an assessment in Audit Manager.
@doc Deletes a batch of delegations for an assessment in Audit Manager.
Manager.
Manager assessment.
You must upload manual evidence to your S3 bucket before you can upload it
The following restrictions apply to this action:
</li> <li> Supported file formats: See Supported file types for manual
evidence in the Audit Manager User Guide
</li> </ul> For more information about Audit Manager service restrictions,
see Quotas and restrictions for Audit Manager.
@doc Creates an assessment in Audit Manager.
@doc Creates a custom framework in Audit Manager.
@doc Creates an assessment report for the specified assessment.
@doc Creates a new custom control in Audit Manager.
@doc Deletes an assessment in Audit Manager.
@doc Deletes a custom framework in Audit Manager.
@doc Deletes a share request for a custom framework in Audit Manager.
@doc Deletes an assessment report in Audit Manager.
When you run the `DeleteAssessmentReport' operation, Audit Manager
attempts to delete the following data:
<ol> <li> The specified assessment report that’s stored in your S3 bucket
</li> <li> The associated metadata that’s stored in Audit Manager
</li> </ol> If Audit Manager can’t access the assessment report in your S3
bucket, the report isn’t deleted. In this event, the
`DeleteAssessmentReport' operation doesn’t fail. Instead, it proceeds
to delete the associated metadata only. You must then delete the
assessment report from the S3 bucket yourself.
that your S3 bucket is available, and that you configured the correct
permissions for Audit Manager to delete resources in your S3 bucket. For
an example permissions policy that you can use, see Assessment report
destination permissions in the Audit Manager User Guide. For information
Simple Storage Service API Reference.
@doc Deletes a custom control in Audit Manager.
your preferred data retention policy. By default, Audit Manager retains
your data. If you want to delete your data, you can use the
Audit Manager User Guide.
administrator for Audit Manager.
When you remove a delegated administrator from your Audit Manager
settings, you continue to have access to the evidence that you previously
collected under that account. This is also the case when you deregister a
delegated administrator from Organizations. However, Audit Manager stops
collecting and attaching evidence to that delegated administrator account
moving forward.
Keep in mind the following cleanup task if you use evidence finder:
Before you use your management account to remove a delegated
administrator, make sure that the current delegated administrator account
evidence finder automatically deletes the event data store that was
created in their account when they enabled evidence finder. If this task
isn’t completed, the event data store remains in their account. In this
case, we recommend that the original delegated administrator goes to
This cleanup task is necessary to ensure that you don't end up with
multiple event data stores. Audit Manager ignores an unused event data
store after you remove or change a delegated administrator account.
However, the unused event data store continues to incur storage costs from
When you deregister a delegated administrator account for Audit Manager,
the data for that account isn’t deleted. If you want to delete resource
data for a delegated administrator account, you must perform that task
separately before you deregister the account. Either, you can do this in
operations that are provided by Audit Manager.
To delete your Audit Manager resource data, see the following
instructions:
<ul> <li> DeleteAssessment (see also: Deleting an assessment in the Audit
Manager User Guide)
</li> <li> DeleteAssessmentFramework (see also: Deleting a custom
framework in the Audit Manager User Guide)
request in the Audit Manager User Guide)
</li> <li> DeleteAssessmentReport (see also: Deleting an assessment report
in the Audit Manager User Guide)
Manager User Guide)
</li> </ul> At this time, Audit Manager doesn't provide an option to
delete evidence for a specific delegated administrator. Instead, when your
management account deregisters Audit Manager, we perform a cleanup for the
current delegated administrator account at the time of deregistration.
in Audit Manager.
@doc Returns the registration status of an account in Audit Manager.
@doc Returns an assessment from Audit Manager.
@doc Returns a framework from Audit Manager.
@doc Returns the URL of an assessment report in Audit Manager.
@doc Returns a list of changelogs from Audit Manager.
@doc Returns a control from Audit Manager.
@doc Returns a list of delegations from an audit owner to a delegate.
@doc Returns evidence from Audit Manager.
@doc Returns all evidence from a specified evidence folder in Audit
Manager.
@doc Returns an evidence folder from the specified assessment in Audit
Manager.
@doc Returns the evidence folders from a specified assessment in Audit
Manager.
@doc Returns a list of evidence folders that are associated with a
specified control in an Audit Manager assessment.
@doc Gets the latest analytics data for all your current active
assessments.
@doc Gets the latest analytics data for a specific active assessment.
account for the organization.
to include in your assessment.
When you create an assessment, specify which of these services you want to
include to narrow the assessment's scope.
@doc Lists the latest analytics data for controls within a specific
control domain and a specific active assessment.
Control insights are listed only if the control belongs to the control
domain and assessment that was specified. Moreover, the control must have
collected evidence on the `lastUpdated' date of
`controlInsightsByAssessment'. If neither of these conditions are met,
no data is listed for that control.
@doc Returns a list of sent or received share requests for custom
frameworks in Audit Manager.
@doc Returns a list of the frameworks that are available in the Audit
Manager framework library.
@doc Returns a list of assessment reports created in Audit Manager.
@doc Returns a list of current and past assessments from Audit Manager.
@doc Lists the latest analytics data for control domains across all of
your active assessments.
that domain collected evidence on the `lastUpdated' date of
`controlDomainInsights'. If this condition isn’t met, no data is
listed for that control domain.
@doc Lists analytics data for control domains within a specified active
assessment.
that domain collected evidence on the `lastUpdated' date of
`controlDomainInsights'. If this condition isn’t met, no data is
listed for that domain.
@doc Lists the latest analytics data for controls within a specific
control domain across all active assessments.
Control insights are listed only if the control belongs to the control
domain that was specified and the control collected evidence on the
`lastUpdated' date of `controlInsightsMetadata'. If neither of
these conditions are met, no data is listed for that control.
@doc Returns a list of controls from Audit Manager.
@doc Returns a list of keywords that are pre-mapped to the specified
control data source.
@doc Returns a list of all Audit Manager notifications.
@doc Returns a list of tags for the specified resource in Audit Manager.
delegated administrator for Audit Manager.
@doc Creates a share request for a custom framework in Audit Manager.
The share request specifies a recipient and notifies them that a custom
request. If no action is taken, the share request expires.
When you create a share request, Audit Manager stores a snapshot of your
following events occurs:
<ul> <li> The sender revokes the share request.
</li> <li> The recipient declines the share request.
</li> <li> The recipient encounters an error and doesn't successfully
accept the share request.
</li> <li> The share request expires before the recipient responds to the
request.
</li> </ul> When a sender resends a share request, the snapshot is
replaced with an updated version that corresponds with the latest version
of the custom framework.
When a recipient accepts a share request, the snapshot is replicated into
that was specified in the share request.
When you invoke the `StartAssessmentFrameworkShare' API, you are about
may not share a custom framework that is derived from a standard framework
if the standard framework is designated as not eligible for sharing by
owner of the standard framework. To learn more about which standard
frameworks are eligible for sharing, see Framework sharing eligibility in
the Audit Manager User Guide.
@doc Tags the specified resource in Audit Manager.
@doc Removes a tag from a resource in Audit Manager.
@doc Edits an Audit Manager assessment.
@doc Updates a control within an assessment in Audit Manager.
@doc Updates the status of a control set in an Audit Manager assessment.
@doc Updates a custom framework in Audit Manager.
@doc Updates a share request for a custom framework in Audit Manager.
@doc Updates the status of an assessment in Audit Manager.
@doc Updates a custom control in Audit Manager.
@doc Updates Audit Manager settings for the current account.
@doc Validates the integrity of an assessment report in Audit Manager.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
that you can continually audit your Amazon Web Services usage . You can use
-module(aws_auditmanager).
-export([associate_assessment_report_evidence_folder/3,
associate_assessment_report_evidence_folder/4,
batch_associate_assessment_report_evidence/3,
batch_associate_assessment_report_evidence/4,
batch_create_delegation_by_assessment/3,
batch_create_delegation_by_assessment/4,
batch_delete_delegation_by_assessment/3,
batch_delete_delegation_by_assessment/4,
batch_disassociate_assessment_report_evidence/3,
batch_disassociate_assessment_report_evidence/4,
batch_import_evidence_to_assessment_control/5,
batch_import_evidence_to_assessment_control/6,
create_assessment/2,
create_assessment/3,
create_assessment_framework/2,
create_assessment_framework/3,
create_assessment_report/3,
create_assessment_report/4,
create_control/2,
create_control/3,
delete_assessment/3,
delete_assessment/4,
delete_assessment_framework/3,
delete_assessment_framework/4,
delete_assessment_framework_share/3,
delete_assessment_framework_share/4,
delete_assessment_report/4,
delete_assessment_report/5,
delete_control/3,
delete_control/4,
deregister_account/2,
deregister_account/3,
deregister_organization_admin_account/2,
deregister_organization_admin_account/3,
disassociate_assessment_report_evidence_folder/3,
disassociate_assessment_report_evidence_folder/4,
get_account_status/1,
get_account_status/3,
get_account_status/4,
get_assessment/2,
get_assessment/4,
get_assessment/5,
get_assessment_framework/2,
get_assessment_framework/4,
get_assessment_framework/5,
get_assessment_report_url/3,
get_assessment_report_url/5,
get_assessment_report_url/6,
get_change_logs/2,
get_change_logs/4,
get_change_logs/5,
get_control/2,
get_control/4,
get_control/5,
get_delegations/1,
get_delegations/3,
get_delegations/4,
get_evidence/5,
get_evidence/7,
get_evidence/8,
get_evidence_by_evidence_folder/4,
get_evidence_by_evidence_folder/6,
get_evidence_by_evidence_folder/7,
get_evidence_folder/4,
get_evidence_folder/6,
get_evidence_folder/7,
get_evidence_folders_by_assessment/2,
get_evidence_folders_by_assessment/4,
get_evidence_folders_by_assessment/5,
get_evidence_folders_by_assessment_control/4,
get_evidence_folders_by_assessment_control/6,
get_evidence_folders_by_assessment_control/7,
get_insights/1,
get_insights/3,
get_insights/4,
get_insights_by_assessment/2,
get_insights_by_assessment/4,
get_insights_by_assessment/5,
get_organization_admin_account/1,
get_organization_admin_account/3,
get_organization_admin_account/4,
get_services_in_scope/1,
get_services_in_scope/3,
get_services_in_scope/4,
get_settings/2,
get_settings/4,
get_settings/5,
list_assessment_control_insights_by_control_domain/3,
list_assessment_control_insights_by_control_domain/5,
list_assessment_control_insights_by_control_domain/6,
list_assessment_framework_share_requests/2,
list_assessment_framework_share_requests/4,
list_assessment_framework_share_requests/5,
list_assessment_frameworks/2,
list_assessment_frameworks/4,
list_assessment_frameworks/5,
list_assessment_reports/1,
list_assessment_reports/3,
list_assessment_reports/4,
list_assessments/1,
list_assessments/3,
list_assessments/4,
list_control_domain_insights/1,
list_control_domain_insights/3,
list_control_domain_insights/4,
list_control_domain_insights_by_assessment/2,
list_control_domain_insights_by_assessment/4,
list_control_domain_insights_by_assessment/5,
list_control_insights_by_control_domain/2,
list_control_insights_by_control_domain/4,
list_control_insights_by_control_domain/5,
list_controls/2,
list_controls/4,
list_controls/5,
list_keywords_for_data_source/2,
list_keywords_for_data_source/4,
list_keywords_for_data_source/5,
list_notifications/1,
list_notifications/3,
list_notifications/4,
list_tags_for_resource/2,
list_tags_for_resource/4,
list_tags_for_resource/5,
register_account/2,
register_account/3,
register_organization_admin_account/2,
register_organization_admin_account/3,
start_assessment_framework_share/3,
start_assessment_framework_share/4,
tag_resource/3,
tag_resource/4,
untag_resource/3,
untag_resource/4,
update_assessment/3,
update_assessment/4,
update_assessment_control/5,
update_assessment_control/6,
update_assessment_control_set_status/4,
update_assessment_control_set_status/5,
update_assessment_framework/3,
update_assessment_framework/4,
update_assessment_framework_share/3,
update_assessment_framework_share/4,
update_assessment_status/3,
update_assessment_status/4,
update_control/3,
update_control/4,
update_settings/2,
update_settings/3,
validate_assessment_report_integrity/2,
validate_assessment_report_integrity/3]).
-include_lib("hackney/include/hackney_lib.hrl").
associate_assessment_report_evidence_folder(Client, AssessmentId, Input) ->
associate_assessment_report_evidence_folder(Client, AssessmentId, Input, []).
associate_assessment_report_evidence_folder(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/associateToAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
batch_associate_assessment_report_evidence(Client, AssessmentId, Input) ->
batch_associate_assessment_report_evidence(Client, AssessmentId, Input, []).
batch_associate_assessment_report_evidence(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/batchAssociateToAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
batch_create_delegation_by_assessment(Client, AssessmentId, Input) ->
batch_create_delegation_by_assessment(Client, AssessmentId, Input, []).
batch_create_delegation_by_assessment(Client, AssessmentId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
batch_delete_delegation_by_assessment(Client, AssessmentId, Input) ->
batch_delete_delegation_by_assessment(Client, AssessmentId, Input, []).
batch_delete_delegation_by_assessment(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociates a list of evidence from an assessment report in Audit
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input) ->
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input, []).
batch_disassociate_assessment_report_evidence(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/batchDisassociateFromAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Uploads one or more pieces of evidence to a control in an Audit
You can upload manual evidence from any Amazon Simple Storage Service
( Amazon S3 ) bucket by specifying the S3 URI of the evidence .
to your assessment . For instructions , see CreateBucket and PutObject in
the Amazon Simple Storage Service API Reference .
< ul > < li > Maximum size of an individual evidence file : 100 MB
< /li > < li > Number of daily manual evidence uploads per control : 100
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input) ->
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input, []).
batch_import_evidence_to_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/controls/", aws_util:encode_uri(ControlId), "/evidence"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_assessment(Client, Input) ->
create_assessment(Client, Input, []).
create_assessment(Client, Input0, Options0) ->
Method = post,
Path = ["/assessments"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_assessment_framework(Client, Input) ->
create_assessment_framework(Client, Input, []).
create_assessment_framework(Client, Input0, Options0) ->
Method = post,
Path = ["/assessmentFrameworks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_assessment_report(Client, AssessmentId, Input) ->
create_assessment_report(Client, AssessmentId, Input, []).
create_assessment_report(Client, AssessmentId, Input0, Options0) ->
Method = post,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
create_control(Client, Input) ->
create_control(Client, Input, []).
create_control(Client, Input0, Options0) ->
Method = post,
Path = ["/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_assessment(Client, AssessmentId, Input) ->
delete_assessment(Client, AssessmentId, Input, []).
delete_assessment(Client, AssessmentId, Input0, Options0) ->
Method = delete,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_assessment_framework(Client, FrameworkId, Input) ->
delete_assessment_framework(Client, FrameworkId, Input, []).
delete_assessment_framework(Client, FrameworkId, Input0, Options0) ->
Method = delete,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_assessment_framework_share(Client, RequestId, Input) ->
delete_assessment_framework_share(Client, RequestId, Input, []).
delete_assessment_framework_share(Client, RequestId, Input0, Options0) ->
Method = delete,
Path = ["/assessmentFrameworkShareRequests/", aws_util:encode_uri(RequestId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"requestType">>, <<"requestType">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
This scenario happens when Audit Manager receives a ` 403 ( Forbidden ) '
or ` 404 ( Not Found ) ' error from Amazon S3 . To avoid this , make sure
about the issues that could cause a ` 403 ( Forbidden ) ' or ` 404 ( Not
Found ' ) error from Amazon S3 , see List of Error Codes in the Amazon
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input) ->
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input, []).
delete_assessment_report(Client, AssessmentId, AssessmentReportId, Input0, Options0) ->
Method = delete,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports/", aws_util:encode_uri(AssessmentReportId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_control(Client, ControlId, Input) ->
delete_control(Client, ControlId, Input, []).
delete_control(Client, ControlId, Input0, Options0) ->
Method = delete,
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Deregisters an account in Audit Manager .
Before you deregister , you can use the UpdateSettings API operation to set
` DeregistrationPolicy ' attribute to request the deletion of your data .
For more information about data retention , see Data Protection in the
deregister_account(Client, Input) ->
deregister_account(Client, Input, []).
deregister_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/deregisterAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Removes the specified Amazon Web Services account as a delegated
signs in to Audit Manager and disables evidence finder first . Disabling
CloudTrail Lake and manually deletes the event data store .
CloudTrail Lake if you do n't delete it .
the Audit Manager console . Or , you can use one of the delete API
< /li > < li > ( see also : Deleting a share
< /li > < li > DeleteControl ( see also : Deleting a custom control in the Audit
deregister_organization_admin_account(Client, Input) ->
deregister_organization_admin_account(Client, Input, []).
deregister_organization_admin_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/deregisterOrganizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Disassociates an evidence folder from the specified assessment report
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input) ->
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input, []).
disassociate_assessment_report_evidence_folder(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/disassociateFromAssessmentReport"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
get_account_status(Client)
when is_map(Client) ->
get_account_status(Client, #{}, #{}).
get_account_status(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_account_status(Client, QueryMap, HeadersMap, []).
get_account_status(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/account/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_assessment(Client, AssessmentId)
when is_map(Client) ->
get_assessment(Client, AssessmentId, #{}, #{}).
get_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_assessment_framework(Client, FrameworkId)
when is_map(Client) ->
get_assessment_framework(Client, FrameworkId, #{}, #{}).
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap, []).
get_assessment_framework(Client, FrameworkId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_assessment_report_url(Client, AssessmentId, AssessmentReportId)
when is_map(Client) ->
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, #{}, #{}).
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap, []).
get_assessment_report_url(Client, AssessmentId, AssessmentReportId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/reports/", aws_util:encode_uri(AssessmentReportId), "/url"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_change_logs(Client, AssessmentId)
when is_map(Client) ->
get_change_logs(Client, AssessmentId, #{}, #{}).
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap, []).
get_change_logs(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/changelogs"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlId">>, maps:get(<<"controlId">>, QueryMap, undefined)},
{<<"controlSetId">>, maps:get(<<"controlSetId">>, QueryMap, undefined)},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_control(Client, ControlId)
when is_map(Client) ->
get_control(Client, ControlId, #{}, #{}).
get_control(Client, ControlId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_control(Client, ControlId, QueryMap, HeadersMap, []).
get_control(Client, ControlId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_delegations(Client)
when is_map(Client) ->
get_delegations(Client, #{}, #{}).
get_delegations(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_delegations(Client, QueryMap, HeadersMap, []).
get_delegations(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/delegations"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId)
when is_map(Client) ->
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, #{}, #{}).
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap, []).
get_evidence(Client, AssessmentId, ControlSetId, EvidenceFolderId, EvidenceId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), "/evidence/", aws_util:encode_uri(EvidenceId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId)
when is_map(Client) ->
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, #{}, #{}).
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, []).
get_evidence_by_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), "/evidence"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId)
when is_map(Client) ->
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, #{}, #{}).
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, []).
get_evidence_folder(Client, AssessmentId, ControlSetId, EvidenceFolderId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/evidenceFolders/", aws_util:encode_uri(EvidenceFolderId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_evidence_folders_by_assessment(Client, AssessmentId)
when is_map(Client) ->
get_evidence_folders_by_assessment(Client, AssessmentId, #{}, #{}).
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_evidence_folders_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/evidenceFolders"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId)
when is_map(Client) ->
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, #{}, #{}).
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap, []).
get_evidence_folders_by_assessment_control(Client, AssessmentId, ControlId, ControlSetId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/evidenceFolders-by-assessment-control/", aws_util:encode_uri(ControlSetId), "/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_insights(Client)
when is_map(Client) ->
get_insights(Client, #{}, #{}).
get_insights(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_insights(Client, QueryMap, HeadersMap, []).
get_insights(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
get_insights_by_assessment(Client, AssessmentId)
when is_map(Client) ->
get_insights_by_assessment(Client, AssessmentId, #{}, #{}).
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
get_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the name of the delegated Amazon Web Services administrator
get_organization_admin_account(Client)
when is_map(Client) ->
get_organization_admin_account(Client, #{}, #{}).
get_organization_admin_account(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_organization_admin_account(Client, QueryMap, HeadersMap, []).
get_organization_admin_account(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/account/organizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns a list of all of the Amazon Web Services that you can choose
get_services_in_scope(Client)
when is_map(Client) ->
get_services_in_scope(Client, #{}, #{}).
get_services_in_scope(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_services_in_scope(Client, QueryMap, HeadersMap, []).
get_services_in_scope(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/services"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Returns the settings for the specified Amazon Web Services account .
get_settings(Client, Attribute)
when is_map(Client) ->
get_settings(Client, Attribute, #{}, #{}).
get_settings(Client, Attribute, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
get_settings(Client, Attribute, QueryMap, HeadersMap, []).
get_settings(Client, Attribute, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/settings/", aws_util:encode_uri(Attribute), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId)
when is_map(Client) ->
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, #{}, #{}).
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap, []).
list_assessment_control_insights_by_control_domain(Client, AssessmentId, ControlDomainId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/controls-by-assessment"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"assessmentId">>, AssessmentId},
{<<"controlDomainId">>, ControlDomainId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_assessment_framework_share_requests(Client, RequestType)
when is_map(Client) ->
list_assessment_framework_share_requests(Client, RequestType, #{}, #{}).
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap, []).
list_assessment_framework_share_requests(Client, RequestType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworkShareRequests"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"requestType">>, RequestType}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_assessment_frameworks(Client, FrameworkType)
when is_map(Client) ->
list_assessment_frameworks(Client, FrameworkType, #{}, #{}).
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap, []).
list_assessment_frameworks(Client, FrameworkType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentFrameworks"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"frameworkType">>, FrameworkType},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_assessment_reports(Client)
when is_map(Client) ->
list_assessment_reports(Client, #{}, #{}).
list_assessment_reports(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessment_reports(Client, QueryMap, HeadersMap, []).
list_assessment_reports(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessmentReports"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_assessments(Client)
when is_map(Client) ->
list_assessments(Client, #{}, #{}).
list_assessments(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_assessments(Client, QueryMap, HeadersMap, []).
list_assessments(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/assessments"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"status">>, maps:get(<<"status">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
A control domain is listed only if at least one of the controls within
list_control_domain_insights(Client)
when is_map(Client) ->
list_control_domain_insights(Client, #{}, #{}).
list_control_domain_insights(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_domain_insights(Client, QueryMap, HeadersMap, []).
list_control_domain_insights(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/control-domains"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
A control domain is listed only if at least one of the controls within
list_control_domain_insights_by_assessment(Client, AssessmentId)
when is_map(Client) ->
list_control_domain_insights_by_assessment(Client, AssessmentId, #{}, #{}).
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, []).
list_control_domain_insights_by_assessment(Client, AssessmentId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/control-domains-by-assessment"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"assessmentId">>, AssessmentId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_control_insights_by_control_domain(Client, ControlDomainId)
when is_map(Client) ->
list_control_insights_by_control_domain(Client, ControlDomainId, #{}, #{}).
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap, []).
list_control_insights_by_control_domain(Client, ControlDomainId, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/insights/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlDomainId">>, ControlDomainId},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_controls(Client, ControlType)
when is_map(Client) ->
list_controls(Client, ControlType, #{}, #{}).
list_controls(Client, ControlType, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_controls(Client, ControlType, QueryMap, HeadersMap, []).
list_controls(Client, ControlType, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/controls"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"controlType">>, ControlType},
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_keywords_for_data_source(Client, Source)
when is_map(Client) ->
list_keywords_for_data_source(Client, Source, #{}, #{}).
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap, []).
list_keywords_for_data_source(Client, Source, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/dataSourceKeywords"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)},
{<<"source">>, Source}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_notifications(Client)
when is_map(Client) ->
list_notifications(Client, #{}, #{}).
list_notifications(Client, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_notifications(Client, QueryMap, HeadersMap, []).
list_notifications(Client, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/notifications"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query0_ =
[
{<<"maxResults">>, maps:get(<<"maxResults">>, QueryMap, undefined)},
{<<"nextToken">>, maps:get(<<"nextToken">>, QueryMap, undefined)}
],
Query_ = [H || {_, V} = H <- Query0_, V =/= undefined],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
list_tags_for_resource(Client, ResourceArn)
when is_map(Client) ->
list_tags_for_resource(Client, ResourceArn, #{}, #{}).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap) ->
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, []).
list_tags_for_resource(Client, ResourceArn, QueryMap, HeadersMap, Options0)
when is_map(Client), is_map(QueryMap), is_map(HeadersMap), is_list(Options0) ->
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false}
| Options0],
Headers = [],
Query_ = [],
request(Client, get, Path, Query_, Headers, undefined, Options, SuccessStatusCode).
@doc Enables Audit Manager for the specified Amazon Web Services account .
register_account(Client, Input) ->
register_account(Client, Input, []).
register_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/registerAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Enables an Amazon Web Services account within the organization as the
register_organization_admin_account(Client, Input) ->
register_organization_admin_account(Client, Input, []).
register_organization_admin_account(Client, Input0, Options0) ->
Method = post,
Path = ["/account/registerOrganizationAdminAccount"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
framework is available . Recipients have 120 days to accept or decline the
custom framework in the US East ( N. Virginia ) Amazon Web Services Region .
Audit Manager also stores a backup of the same snapshot in the US West
( Oregon ) Amazon Web Services Region .
Audit Manager deletes the snapshot and the backup snapshot when one of the
their Amazon Web Services account under the Amazon Web Services Region
to share a custom framework with another Amazon Web Services account . You
Amazon Web Services , unless you have obtained permission to do so from the
start_assessment_framework_share(Client, FrameworkId, Input) ->
start_assessment_framework_share(Client, FrameworkId, Input, []).
start_assessment_framework_share(Client, FrameworkId, Input0, Options0) ->
Method = post,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), "/shareRequests"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
tag_resource(Client, ResourceArn, Input) ->
tag_resource(Client, ResourceArn, Input, []).
tag_resource(Client, ResourceArn, Input0, Options0) ->
Method = post,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
untag_resource(Client, ResourceArn, Input) ->
untag_resource(Client, ResourceArn, Input, []).
untag_resource(Client, ResourceArn, Input0, Options0) ->
Method = delete,
Path = ["/tags/", aws_util:encode_uri(ResourceArn), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
QueryMapping = [
{<<"tagKeys">>, <<"tagKeys">>}
],
{Query_, Input} = aws_request:build_headers(QueryMapping, Input2),
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment(Client, AssessmentId, Input) ->
update_assessment(Client, AssessmentId, Input, []).
update_assessment(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input) ->
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input, []).
update_assessment_control(Client, AssessmentId, ControlId, ControlSetId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input) ->
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input, []).
update_assessment_control_set_status(Client, AssessmentId, ControlSetId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/controlSets/", aws_util:encode_uri(ControlSetId), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment_framework(Client, FrameworkId, Input) ->
update_assessment_framework(Client, FrameworkId, Input, []).
update_assessment_framework(Client, FrameworkId, Input0, Options0) ->
Method = put,
Path = ["/assessmentFrameworks/", aws_util:encode_uri(FrameworkId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment_framework_share(Client, RequestId, Input) ->
update_assessment_framework_share(Client, RequestId, Input, []).
update_assessment_framework_share(Client, RequestId, Input0, Options0) ->
Method = put,
Path = ["/assessmentFrameworkShareRequests/", aws_util:encode_uri(RequestId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_assessment_status(Client, AssessmentId, Input) ->
update_assessment_status(Client, AssessmentId, Input, []).
update_assessment_status(Client, AssessmentId, Input0, Options0) ->
Method = put,
Path = ["/assessments/", aws_util:encode_uri(AssessmentId), "/status"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_control(Client, ControlId, Input) ->
update_control(Client, ControlId, Input, []).
update_control(Client, ControlId, Input0, Options0) ->
Method = put,
Path = ["/controls/", aws_util:encode_uri(ControlId), ""],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
update_settings(Client, Input) ->
update_settings(Client, Input, []).
update_settings(Client, Input0, Options0) ->
Method = put,
Path = ["/settings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
validate_assessment_report_integrity(Client, Input) ->
validate_assessment_report_integrity(Client, Input, []).
validate_assessment_report_integrity(Client, Input0, Options0) ->
Method = post,
Path = ["/assessmentReports/integrity"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"auditmanager">>},
Host = build_host(<<"auditmanager">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
db5202dd2786ea3f4d37edba3de25fe84369a947e5863924018c13b79f4c356b | zack-bitcoin/chalang | fractions.erl | -module(fractions).
-export([new/2,negate/1,add/2,sub/2,mul/2,divide/2,to_int/1,test/0, exponent/2, lt/2, gt/2, equal/2, is_fraction/1,sqrt/1]).
-record(f, {top = 0, bottom = 0}).
is_fraction(X) when not is_record(X, f) ->
false;
is_fraction({f, _, Y}) when not is_integer(Y) -> false;
is_fraction({f, Y, _}) when not is_integer(Y) -> false;
is_fraction({f, _, Y}) when Y == 0 -> false;
is_fraction({f, _, _}) -> true;
is_fraction(_) -> false.
sqrt({f, A, B}) ->
sqrt_helper({f, A, B}, {f, 1, 2}).
sqrt_helper(A, Guess) ->
B = sub(A, mul(Guess, Guess)),
correct to 8 decimal places .
if
Bool -> Guess;
true ->
Sum = add(Guess, divide(A, Guess)),
Improved = divide(Sum, {f, 2, 1}),
sqrt_helper(A, Improved)
end.
to_frac(X) when is_integer(X) ->
new(X, 1);
to_frac({f, X, Y}) -> {f, X, Y}.
equal(A, B) ->
C = to_frac(A),
D = to_frac(B),
C#f.top * D#f.bottom == D#f.top * C#f.bottom.
gt(C, D) ->
A = to_frac(D),
B = to_frac(C),
A#f.top * B#f.bottom < B#f.top * A#f.bottom.
lt(C, D) ->
A = to_frac(C),
B = to_frac(D),
A#f.top * B#f.bottom < B#f.top * A#f.bottom.
new(T,B) -> #f{top = T, bottom = B}.
negate(B) ->
A = to_frac(B),
#f{top = -A#f.top, bottom = A#f.bottom}.
sub(A, B) -> add(A, negate(B)).
add(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = (A#f.top * B#f.bottom) + (A#f.bottom * B#f.top) , bottom = A#f.bottom * B#f.bottom}).
mul(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = A#f.top * B#f.top, bottom = A#f.bottom * B#f.bottom}).
divide(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = A#f.top * B#f.bottom, bottom = A#f.bottom * B#f.top}).
to_int(A) -> A#f.top div A#f.bottom.
simplify(F) -> simplify_lcd(simplify_size(F)).
simplify_lcd(F) ->
L = lcd(F#f.top, F#f.bottom),
#f{top = F#f.top div L, bottom = F#f.bottom div L}.
simplify_size(F) ->
IC = 4294967296,%this is higher than the highest value we can store in top or bottom.
IC = 281474976710656 ,
%X = F#f.bottom div IC,
Y = F#f.top div IC ,
Z = if
((F#f.bottom > IC) and (F#f.top > IC)) -> IC;
true -> 1
end,
#f{top = F#f.top div Z, bottom = F#f.bottom div Z}.
exponent(F, N) ->
G = to_frac(F),
exponent2(G, N).
exponent2(_, 0) -> #f{top = 1, bottom = 1};
exponent2(F, 1) -> F;
exponent2(F, N) when N rem 2 == 0 ->
exponent2(mul(F, F), N div 2);
exponent2(F, N) -> mul(F, exponent2(F, N - 1)).
lcd(A, 0) -> A;
lcd(A, B) -> lcd(B, A rem B).
test() ->
A = new(1, 3),
B = new(2, 5),
C = mul(A, B),
C = new(2, 15),
B = divide(C, A),
9 = lcd(27, 9),
5 = lcd(25, 15),
success.
| null | https://raw.githubusercontent.com/zack-bitcoin/chalang/a728e6bb9a60ac6eca189ee7d6873a891825fa9a/src/fractions.erl | erlang | this is higher than the highest value we can store in top or bottom.
X = F#f.bottom div IC, | -module(fractions).
-export([new/2,negate/1,add/2,sub/2,mul/2,divide/2,to_int/1,test/0, exponent/2, lt/2, gt/2, equal/2, is_fraction/1,sqrt/1]).
-record(f, {top = 0, bottom = 0}).
is_fraction(X) when not is_record(X, f) ->
false;
is_fraction({f, _, Y}) when not is_integer(Y) -> false;
is_fraction({f, Y, _}) when not is_integer(Y) -> false;
is_fraction({f, _, Y}) when Y == 0 -> false;
is_fraction({f, _, _}) -> true;
is_fraction(_) -> false.
sqrt({f, A, B}) ->
sqrt_helper({f, A, B}, {f, 1, 2}).
sqrt_helper(A, Guess) ->
B = sub(A, mul(Guess, Guess)),
correct to 8 decimal places .
if
Bool -> Guess;
true ->
Sum = add(Guess, divide(A, Guess)),
Improved = divide(Sum, {f, 2, 1}),
sqrt_helper(A, Improved)
end.
to_frac(X) when is_integer(X) ->
new(X, 1);
to_frac({f, X, Y}) -> {f, X, Y}.
equal(A, B) ->
C = to_frac(A),
D = to_frac(B),
C#f.top * D#f.bottom == D#f.top * C#f.bottom.
gt(C, D) ->
A = to_frac(D),
B = to_frac(C),
A#f.top * B#f.bottom < B#f.top * A#f.bottom.
lt(C, D) ->
A = to_frac(C),
B = to_frac(D),
A#f.top * B#f.bottom < B#f.top * A#f.bottom.
new(T,B) -> #f{top = T, bottom = B}.
negate(B) ->
A = to_frac(B),
#f{top = -A#f.top, bottom = A#f.bottom}.
sub(A, B) -> add(A, negate(B)).
add(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = (A#f.top * B#f.bottom) + (A#f.bottom * B#f.top) , bottom = A#f.bottom * B#f.bottom}).
mul(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = A#f.top * B#f.top, bottom = A#f.bottom * B#f.bottom}).
divide(C, D) ->
A = to_frac(C),
B = to_frac(D),
simplify(#f{top = A#f.top * B#f.bottom, bottom = A#f.bottom * B#f.top}).
to_int(A) -> A#f.top div A#f.bottom.
simplify(F) -> simplify_lcd(simplify_size(F)).
simplify_lcd(F) ->
L = lcd(F#f.top, F#f.bottom),
#f{top = F#f.top div L, bottom = F#f.bottom div L}.
simplify_size(F) ->
IC = 281474976710656 ,
Y = F#f.top div IC ,
Z = if
((F#f.bottom > IC) and (F#f.top > IC)) -> IC;
true -> 1
end,
#f{top = F#f.top div Z, bottom = F#f.bottom div Z}.
exponent(F, N) ->
G = to_frac(F),
exponent2(G, N).
exponent2(_, 0) -> #f{top = 1, bottom = 1};
exponent2(F, 1) -> F;
exponent2(F, N) when N rem 2 == 0 ->
exponent2(mul(F, F), N div 2);
exponent2(F, N) -> mul(F, exponent2(F, N - 1)).
lcd(A, 0) -> A;
lcd(A, B) -> lcd(B, A rem B).
test() ->
A = new(1, 3),
B = new(2, 5),
C = mul(A, B),
C = new(2, 15),
B = divide(C, A),
9 = lcd(27, 9),
5 = lcd(25, 15),
success.
|
edd910a84bd2d07fdf42142c56c6f8ac617ca6c3405b0b2033908664e794d94e | darach/jch-erl | jch.erl | %% -------------------------------------------------------------------
Copyright ( c ) 2014 < darach at gmail dot com >
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
%% without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to permit
persons to whom the Software is furnished to do so , subject to the
%% following conditions:
%%
%% The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
%% OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
%% NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR
%% OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
%% USE OR OTHER DEALINGS IN THE SOFTWARE.
%%
%% File: jch.erl. Jump Consistent Hashing
%%
NIF wrapper for Jump Consistent Hash algorithm by and
developed at Google , Inc. Paper : " A Fast , Minimal Memory , Consistent Hash Algorithm .
This implementation uses the xorshift64 * PRNG rather than the LCG PRNG in the paper
by default , but can be switched to compatible algorithm by passing 3'rd argument
%% as atom 'orig'.
%%
%% -------------------------------------------------------------------
-module(jch).
-export([ch/2, ch/3]).
-on_load(init/0).
init() ->
SoName = filename:join(
case code:priv_dir(?MODULE) of
{error, bad_name} ->
Dir = code:which(?MODULE),
filename:join([filename:dirname(Dir),"..","priv"]);
Dir -> Dir
end, atom_to_list(?MODULE) ++ "_nif"),
erlang:load_nif(SoName, 0).
-spec ch(Key,Buckets) -> Hash when
Key :: integer(),
Buckets :: integer(),
Hash :: integer().
ch(Key, Buckets) ->
ch(Key, Buckets, xorshift64).
-spec ch(Key, Buckets, Type) -> Hash when
Key :: integer(),
Buckets :: integer(),
Type :: orig | xorshift64,
Hash :: integer().
ch(Key, Buckets, Type) when is_integer(Key) andalso (Key >= 0)
andalso is_integer(Buckets) andalso (Buckets > 0)
andalso ((Type == orig) or (Type == xorshift64)) ->
erlang:nif_error({nif_not_loaded, ?MODULE}).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
ch_xorshift_test_() ->
Cases =
%% {Expect, Key, Buckets}
[{0, 0, 1},
{0, 3, 1},
{0, 0, 2},
{1, 2, 2},
{0, 4, 2},
{29, 1, 128},
{113, 129, 128},
{0, 0, 100000000},
{82916011, 128, 100000000},
{239467867, 128, 2147483648},
{78, 18446744073709551615, 128}
],
[?_assertEqual(Expect, jch:ch(K, B)) || {Expect, K, B} <- Cases].
ch_orig_test_() ->
Cases =
%% {Expect, Key, Buckets}
[{0, 0, 1},
{0, 3, 1},
{0, 0, 2},
{1, 4, 2},
{0, 7, 2},
{55, 1, 128},
{120, 129, 128},
{0, 0, 100000000},
{38172097, 128, 100000000},
{1644467860, 128, 2147483648},
{92, 18446744073709551615, 128}
],
[?_assertEqual(Expect, jch:ch(K, B, orig)) || {Expect, K, B} <- Cases].
%% -rs/blob/master/src/lib.rs#L30
ch_range_test() ->
test_ch_range(orig, 0),
test_ch_range(xorshift64, 0).
test_ch_range(_, 10000) -> ok;
test_ch_range(Algo, Key) ->
LastVal = ch(Key, 1),
test_ch_range(Algo, Key, LastVal, 1),
test_ch_range(Algo, Key + 1).
test_ch_range(_Algo, _Key, _LastVal, 100) -> ok;
test_ch_range(Algo, Key, LastVal, Buckets) ->
Val = ch(Key, Buckets, Algo),
io : format("ch(~p , ~p , ~p ) - > ~p ~ n " , [ Key , Buckets , Algo , ] ) ,
?assert((Val == LastVal) orelse (Val == Buckets - 1)),
test_ch_range(Algo, Key, Val, Buckets + 1).
-endif.
| null | https://raw.githubusercontent.com/darach/jch-erl/4a6f9d35258f1ed5555792ffd4a3463ea835d1f2/src/jch.erl | erlang | -------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
following conditions:
The above copyright notice and this permission notice shall be included
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
File: jch.erl. Jump Consistent Hashing
as atom 'orig'.
-------------------------------------------------------------------
{Expect, Key, Buckets}
{Expect, Key, Buckets}
-rs/blob/master/src/lib.rs#L30 | Copyright ( c ) 2014 < darach at gmail dot com >
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to permit
persons to whom the Software is furnished to do so , subject to the
in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR
NIF wrapper for Jump Consistent Hash algorithm by and
developed at Google , Inc. Paper : " A Fast , Minimal Memory , Consistent Hash Algorithm .
This implementation uses the xorshift64 * PRNG rather than the LCG PRNG in the paper
by default , but can be switched to compatible algorithm by passing 3'rd argument
-module(jch).
-export([ch/2, ch/3]).
-on_load(init/0).
init() ->
SoName = filename:join(
case code:priv_dir(?MODULE) of
{error, bad_name} ->
Dir = code:which(?MODULE),
filename:join([filename:dirname(Dir),"..","priv"]);
Dir -> Dir
end, atom_to_list(?MODULE) ++ "_nif"),
erlang:load_nif(SoName, 0).
-spec ch(Key,Buckets) -> Hash when
Key :: integer(),
Buckets :: integer(),
Hash :: integer().
ch(Key, Buckets) ->
ch(Key, Buckets, xorshift64).
-spec ch(Key, Buckets, Type) -> Hash when
Key :: integer(),
Buckets :: integer(),
Type :: orig | xorshift64,
Hash :: integer().
ch(Key, Buckets, Type) when is_integer(Key) andalso (Key >= 0)
andalso is_integer(Buckets) andalso (Buckets > 0)
andalso ((Type == orig) or (Type == xorshift64)) ->
erlang:nif_error({nif_not_loaded, ?MODULE}).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
ch_xorshift_test_() ->
Cases =
[{0, 0, 1},
{0, 3, 1},
{0, 0, 2},
{1, 2, 2},
{0, 4, 2},
{29, 1, 128},
{113, 129, 128},
{0, 0, 100000000},
{82916011, 128, 100000000},
{239467867, 128, 2147483648},
{78, 18446744073709551615, 128}
],
[?_assertEqual(Expect, jch:ch(K, B)) || {Expect, K, B} <- Cases].
ch_orig_test_() ->
Cases =
[{0, 0, 1},
{0, 3, 1},
{0, 0, 2},
{1, 4, 2},
{0, 7, 2},
{55, 1, 128},
{120, 129, 128},
{0, 0, 100000000},
{38172097, 128, 100000000},
{1644467860, 128, 2147483648},
{92, 18446744073709551615, 128}
],
[?_assertEqual(Expect, jch:ch(K, B, orig)) || {Expect, K, B} <- Cases].
ch_range_test() ->
test_ch_range(orig, 0),
test_ch_range(xorshift64, 0).
test_ch_range(_, 10000) -> ok;
test_ch_range(Algo, Key) ->
LastVal = ch(Key, 1),
test_ch_range(Algo, Key, LastVal, 1),
test_ch_range(Algo, Key + 1).
test_ch_range(_Algo, _Key, _LastVal, 100) -> ok;
test_ch_range(Algo, Key, LastVal, Buckets) ->
Val = ch(Key, Buckets, Algo),
io : format("ch(~p , ~p , ~p ) - > ~p ~ n " , [ Key , Buckets , Algo , ] ) ,
?assert((Val == LastVal) orelse (Val == Buckets - 1)),
test_ch_range(Algo, Key, Val, Buckets + 1).
-endif.
|
778b0926e1deee5e69b2612a420742fc7c6f88b5d5f288defe8718754c9645aa | rems-project/extract | BinRep.ml | type position = int * int
type field =
Opcode of int * position
| Ifield of string * position
| SplitIfield of string * position * position
| Reserved of position
type t = field list
let pos_to_string (x, y) =
if y = 1
then Printf.sprintf "[%d]" x
else Printf.sprintf "[%d:%d]" x (x+y-1)
(* Opcode are represented by their integer value, reserved bits by a
* series of '/' and ifields by their name. *)
let parse_field s p =
try Opcode (int_of_string s, p)
with _ ->
if Str.string_match (Str.regexp "^[ /]*$") s 0
then Reserved p
else Ifield (s, p)
* Returns the position of the first bit of a field
let get_pos = function
| Opcode (_, (p, _)) | Ifield (_, (p, _))
| SplitIfield (_, (p, _), _) | Reserved (p, _) -> p
let compare_pos i i' = compare (get_pos i) (get_pos i')
let build fields positions =
let rec compute_size = function
| [] -> []
| [x] -> [(x, 32-x)]
| x :: y :: xs ->
assert(y>x);
(x, y-x) :: compute_size (y :: xs) in
let rec make_split_ifields = function
| Ifield (f, p) :: Ifield (f', p') :: l when f = f' ->
make_split_ifields (SplitIfield (f, p, p') :: l)
| SplitIfield (f, _, _) :: Ifield (f', _) :: _ when f = f' ->
failwith ("split ifield of more than two parts: "^f)
| x :: l -> x :: make_split_ifields l
| [] -> [] in
List.map2 parse_field fields (compute_size positions)
|> List.sort compare |> make_split_ifields
|> List.sort compare_pos
| null | https://raw.githubusercontent.com/rems-project/extract/5d544e996f9ca7bb0b937d16f7739b0452654d24/src/BinRep.ml | ocaml | Opcode are represented by their integer value, reserved bits by a
* series of '/' and ifields by their name. | type position = int * int
type field =
Opcode of int * position
| Ifield of string * position
| SplitIfield of string * position * position
| Reserved of position
type t = field list
let pos_to_string (x, y) =
if y = 1
then Printf.sprintf "[%d]" x
else Printf.sprintf "[%d:%d]" x (x+y-1)
let parse_field s p =
try Opcode (int_of_string s, p)
with _ ->
if Str.string_match (Str.regexp "^[ /]*$") s 0
then Reserved p
else Ifield (s, p)
* Returns the position of the first bit of a field
let get_pos = function
| Opcode (_, (p, _)) | Ifield (_, (p, _))
| SplitIfield (_, (p, _), _) | Reserved (p, _) -> p
let compare_pos i i' = compare (get_pos i) (get_pos i')
let build fields positions =
let rec compute_size = function
| [] -> []
| [x] -> [(x, 32-x)]
| x :: y :: xs ->
assert(y>x);
(x, y-x) :: compute_size (y :: xs) in
let rec make_split_ifields = function
| Ifield (f, p) :: Ifield (f', p') :: l when f = f' ->
make_split_ifields (SplitIfield (f, p, p') :: l)
| SplitIfield (f, _, _) :: Ifield (f', _) :: _ when f = f' ->
failwith ("split ifield of more than two parts: "^f)
| x :: l -> x :: make_split_ifields l
| [] -> [] in
List.map2 parse_field fields (compute_size positions)
|> List.sort compare |> make_split_ifields
|> List.sort compare_pos
|
fdebb155952584246a2cf61fd0a9a635df0b7873381c274998b3f4aea97b3f45 | garycrawford/lein-life | core.clj | (ns {{ns-name}}.controllers.home.core
(:require [{{ns-name}}.views.home :refer [home-view]]
[{{ns-name}}.responses :refer [model-view-200 model-view-404]]
[{{ns-name}}.platform.people-api.core :refer [get-people create-person get-person update-person delete-person]]
[ring.util.anti-forgery :refer [anti-forgery-field]]
[ring.util.response :refer [redirect-after-post]]))
(defn person:m->vm
"Converts a person model into a person view-model"
[model]
(select-keys model [:name :location :id]))
(defn people-list:m->vm
"Converts a poeple-list model into a person-list view-model"
[model]
{:people (map person:m->vm model)})
(defn add-anti-forgery
"Adds an anti-forgery token to a model map"
[model]
(merge model
{:anti-forgery-field (anti-forgery-field)}))
(defn person-response
"Builds a response based on the named template
and the person data associated with the id"
[{:keys [id]} template]
(if-let [person (get-person id)]
(model-view-200 {:model (add-anti-forgery (person:m->vm person))
:view (home-view template)})
(model-view-404 {:model {}
:view (home-view "not-found")})))
(defn home
[]
(let [people (get-people)
view-model (people-list:m->vm people)]
(model-view-200 {:model (add-anti-forgery view-model)
:view (home-view "introduction")})))
(defn create-person-post
[{:keys [name location]}]
(create-person {:name name :location location})
(redirect-after-post "/"))
(defn update-person-get
[params]
(person-response params "update-person"))
(defn update-person-post
[params]
(let [person (select-keys params [:id :name :location])
{:keys [updated]} (update-person person)]
(if updated
(redirect-after-post "/")
(model-view-404 {:model {}
:view (home-view "not-found")}))))
(defn delete-person-get
[params]
(person-response params "delete-person"))
(defn delete-person-post
[{:keys [id]}]
(let [{:keys [deleted]} (delete-person id)]
(if deleted
(redirect-after-post "/")
(model-view-404 {:model {}
:view (home-view "not-found")}))))
| null | https://raw.githubusercontent.com/garycrawford/lein-life/d55f3636950b58b02399ab32eb1f6d81ecfce823/src/leiningen/new/life/site/api/src/controllers/home/core.clj | clojure | (ns {{ns-name}}.controllers.home.core
(:require [{{ns-name}}.views.home :refer [home-view]]
[{{ns-name}}.responses :refer [model-view-200 model-view-404]]
[{{ns-name}}.platform.people-api.core :refer [get-people create-person get-person update-person delete-person]]
[ring.util.anti-forgery :refer [anti-forgery-field]]
[ring.util.response :refer [redirect-after-post]]))
(defn person:m->vm
"Converts a person model into a person view-model"
[model]
(select-keys model [:name :location :id]))
(defn people-list:m->vm
"Converts a poeple-list model into a person-list view-model"
[model]
{:people (map person:m->vm model)})
(defn add-anti-forgery
"Adds an anti-forgery token to a model map"
[model]
(merge model
{:anti-forgery-field (anti-forgery-field)}))
(defn person-response
"Builds a response based on the named template
and the person data associated with the id"
[{:keys [id]} template]
(if-let [person (get-person id)]
(model-view-200 {:model (add-anti-forgery (person:m->vm person))
:view (home-view template)})
(model-view-404 {:model {}
:view (home-view "not-found")})))
(defn home
[]
(let [people (get-people)
view-model (people-list:m->vm people)]
(model-view-200 {:model (add-anti-forgery view-model)
:view (home-view "introduction")})))
(defn create-person-post
[{:keys [name location]}]
(create-person {:name name :location location})
(redirect-after-post "/"))
(defn update-person-get
[params]
(person-response params "update-person"))
(defn update-person-post
[params]
(let [person (select-keys params [:id :name :location])
{:keys [updated]} (update-person person)]
(if updated
(redirect-after-post "/")
(model-view-404 {:model {}
:view (home-view "not-found")}))))
(defn delete-person-get
[params]
(person-response params "delete-person"))
(defn delete-person-post
[{:keys [id]}]
(let [{:keys [deleted]} (delete-person id)]
(if deleted
(redirect-after-post "/")
(model-view-404 {:model {}
:view (home-view "not-found")}))))
|
|
b99c422ab62b12125b3b3aa400cf499d13c4b86077a96b15e85acc710075b978 | onyx-platform/onyx | grouping.cljc | (ns onyx.peer.grouping
(:require [onyx.peer.operation :refer [resolve-fn]]
#?(:clj [clj-tuple :as t])))
(defn task-map->grouping-fn [task-map]
(if-let [group-key (:onyx/group-by-key task-map)]
(cond (keyword? group-key)
group-key
(sequential? group-key)
#(select-keys % group-key)
:else
#(get % group-key))
(if-let [group-fn (:onyx/group-by-fn task-map)]
(resolve-fn {:onyx/fn (:onyx/group-by-fn task-map)}))))
(defn compile-grouping-fn
"Compiles outgoing grouping task info into a task->group-fn map
for quick lookup and group fn calls"
[catalog egress-tasks]
(->> catalog
(map (juxt :onyx/name task-map->grouping-fn))
(filter (fn [[n f]]
(and f egress-tasks (egress-tasks n))))
(into #?(:cljs {}
:clj (t/hash-map)))))
(defn grouped-task? [task-map]
(boolean
(or (:onyx/group-by-key task-map)
(:onyx/group-by-fn task-map))))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/src/onyx/peer/grouping.cljc | clojure | (ns onyx.peer.grouping
(:require [onyx.peer.operation :refer [resolve-fn]]
#?(:clj [clj-tuple :as t])))
(defn task-map->grouping-fn [task-map]
(if-let [group-key (:onyx/group-by-key task-map)]
(cond (keyword? group-key)
group-key
(sequential? group-key)
#(select-keys % group-key)
:else
#(get % group-key))
(if-let [group-fn (:onyx/group-by-fn task-map)]
(resolve-fn {:onyx/fn (:onyx/group-by-fn task-map)}))))
(defn compile-grouping-fn
"Compiles outgoing grouping task info into a task->group-fn map
for quick lookup and group fn calls"
[catalog egress-tasks]
(->> catalog
(map (juxt :onyx/name task-map->grouping-fn))
(filter (fn [[n f]]
(and f egress-tasks (egress-tasks n))))
(into #?(:cljs {}
:clj (t/hash-map)))))
(defn grouped-task? [task-map]
(boolean
(or (:onyx/group-by-key task-map)
(:onyx/group-by-fn task-map))))
|
|
5e3e4aaf4606edb5a146e0aab32ab3da0c738ddb56dbd46b8c0dda0d46776ee1 | c-cube/qcheck | QCheck_alcotest.mli |
* { 1 Alcotest backend for QCheck }
We use environment variables for controlling QCheck here , since alcotest
does n't seem to provide a lot of flexibility .
- [ QCHECK_VERBOSE ] if " 1 " or " true " , will make tests verbose
- [ QCHECK_SEED ] if an integer , will fix the seed
- [ QCHECK_LONG ] is present , will trigger long tests
@since 0.9
We use environment variables for controlling QCheck here, since alcotest
doesn't seem to provide a lot of flexibility.
- [QCHECK_VERBOSE] if "1" or "true", will make tests verbose
- [QCHECK_SEED] if an integer, will fix the seed
- [QCHECK_LONG] is present, will trigger long tests
@since 0.9
*)
val to_alcotest :
?colors:bool -> ?verbose:bool -> ?long:bool ->
?debug_shrink:(out_channel option) ->
?debug_shrink_list:(string list) ->
?rand:Random.State.t ->
QCheck2.Test.t -> unit Alcotest.test_case
* Convert a qcheck test into an alcotest test .
In addition to the environment variables mentioned above , you can control
the behavior of QCheck tests using optional parameters that behave in the
same way as the parameters of { ! QCheck_base_runner.run_tests } .
@since 0.9
@since 0.9 parameters [ verbose ] , [ long ] , [ rand ]
@since 0.19 parameters [ colors ] , [ debug_shrink ] , [ debug_shrink_list ]
In addition to the environment variables mentioned above, you can control
the behavior of QCheck tests using optional parameters that behave in the
same way as the parameters of {!QCheck_base_runner.run_tests}.
@since 0.9
@since 0.9 parameters [verbose], [long], [rand]
@since 0.19 parameters [colors], [debug_shrink], [debug_shrink_list]
*)
| null | https://raw.githubusercontent.com/c-cube/qcheck/063c1d74795a24eb77fa661d218c4715382df566/src/alcotest/QCheck_alcotest.mli | ocaml |
* { 1 Alcotest backend for QCheck }
We use environment variables for controlling QCheck here , since alcotest
does n't seem to provide a lot of flexibility .
- [ QCHECK_VERBOSE ] if " 1 " or " true " , will make tests verbose
- [ QCHECK_SEED ] if an integer , will fix the seed
- [ QCHECK_LONG ] is present , will trigger long tests
@since 0.9
We use environment variables for controlling QCheck here, since alcotest
doesn't seem to provide a lot of flexibility.
- [QCHECK_VERBOSE] if "1" or "true", will make tests verbose
- [QCHECK_SEED] if an integer, will fix the seed
- [QCHECK_LONG] is present, will trigger long tests
@since 0.9
*)
val to_alcotest :
?colors:bool -> ?verbose:bool -> ?long:bool ->
?debug_shrink:(out_channel option) ->
?debug_shrink_list:(string list) ->
?rand:Random.State.t ->
QCheck2.Test.t -> unit Alcotest.test_case
* Convert a qcheck test into an alcotest test .
In addition to the environment variables mentioned above , you can control
the behavior of QCheck tests using optional parameters that behave in the
same way as the parameters of { ! QCheck_base_runner.run_tests } .
@since 0.9
@since 0.9 parameters [ verbose ] , [ long ] , [ rand ]
@since 0.19 parameters [ colors ] , [ debug_shrink ] , [ debug_shrink_list ]
In addition to the environment variables mentioned above, you can control
the behavior of QCheck tests using optional parameters that behave in the
same way as the parameters of {!QCheck_base_runner.run_tests}.
@since 0.9
@since 0.9 parameters [verbose], [long], [rand]
@since 0.19 parameters [colors], [debug_shrink], [debug_shrink_list]
*)
|
|
739c5350469aacfa370fcb8103d92ab5eda20532affc28cf849b9c5a5940d9f1 | karamellpelle/grid | PlatformObject.hs | grid is a game written in Haskell
Copyright ( C ) 2018
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with grid. If not, see </>.
--
module MEnv.Env.PlatformObject
(
-- tmp
module MEnv.Env.PlatformObject.GLFW,
--
) where
-- tmp
import MEnv.Env.PlatformObject.GLFW
--
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/designer/source/MEnv/Env/PlatformObject.hs | haskell |
This file is part of grid.
grid is free software: you can redistribute it and/or modify
(at your option) any later version.
grid is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with grid. If not, see </>.
tmp
tmp
| grid is a game written in Haskell
Copyright ( C ) 2018
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
module MEnv.Env.PlatformObject
(
module MEnv.Env.PlatformObject.GLFW,
) where
import MEnv.Env.PlatformObject.GLFW
|
c0e7cd826f186c73a26b2956df24d33488e806af443e6d7d391c0a050ba28fc2 | standardsemiconductor/lion | Com.hs |
import Control.Concurrent.Async ( concurrently_ )
import Control.Monad ( forever )
import Data.Functor ( (<&>) )
import System.Environment ( getArgs )
import System.Hardware.Serialport
import System.IO
main :: IO ()
main = com =<< portPath
where
portPath = getArgs <&> \case
[pathArg] -> pathArg
_ -> "/dev/ttyUSB0"
com :: String -> IO ()
com portPath = hWithSerial portPath serialPortSettings $ \hndl -> do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
concurrently_ (readUart hndl) (writeUart hndl)
where
readUart hndl = forever $ putChar =<< hGetChar hndl
writeUart hndl = forever $ hPutChar hndl =<< getChar
serialPortSettings :: SerialPortSettings
serialPortSettings = defaultSerialSettings{ commSpeed = CS19200 } | null | https://raw.githubusercontent.com/standardsemiconductor/lion/7f00f6122f95986f46a5e1c25e4db6ca08709741/lion-soc/app/Com.hs | haskell |
import Control.Concurrent.Async ( concurrently_ )
import Control.Monad ( forever )
import Data.Functor ( (<&>) )
import System.Environment ( getArgs )
import System.Hardware.Serialport
import System.IO
main :: IO ()
main = com =<< portPath
where
portPath = getArgs <&> \case
[pathArg] -> pathArg
_ -> "/dev/ttyUSB0"
com :: String -> IO ()
com portPath = hWithSerial portPath serialPortSettings $ \hndl -> do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
concurrently_ (readUart hndl) (writeUart hndl)
where
readUart hndl = forever $ putChar =<< hGetChar hndl
writeUart hndl = forever $ hPutChar hndl =<< getChar
serialPortSettings :: SerialPortSettings
serialPortSettings = defaultSerialSettings{ commSpeed = CS19200 } |
|
013e03eef0d0faed13eadca286dfdb9c7fa04ace49d51b11ce356eea72de2a6c | alexandergunnarson/quantum | dom.cljc | (ns quantum.untyped.ui.style.css.dom
(:require
[clojure.string :as str]
[quantum.untyped.core.fn
:refer [fn->]]
[quantum.untyped.core.spec :as us]
[quantum.untyped.core.type.predicates
:refer [val?]]
[quantum.untyped.ui.dom :as udom]))
#?(:cljs
(defn add-link! [link #_href-string?]
(let [elem (.createElement js/document "link")]
(set! (.-href elem) link)
(set! (.-rel elem) "stylesheet")
(set! (.-type elem) "text/css")
(.appendChild (.-head js/document) elem)
elem)))
#?(:cljs
(defn append-css! [css-str #_css-string?]
"Inserts stylesheet into document head"
{:from ""}
(let [elem (.createElement js/document "style")
text (.createTextNode js/document css-str)]
(.appendChild elem text)
(.appendChild (.-head js/document) elem)
elem)))
#?(:cljs
(defn replace-css-at! [id #_dom-id-string? css-str #_css-string?]
"Replaces CSS at a (possibly generated) style node."
(let [elem (udom/append-element!
(or (some-> (.getElementById js/document id) .-parentNode)
(.-head js/document))
"style"
id)
_ (us/validate elem (us/and val? (fn-> .-tagName str/lower-case (= "style"))))
text (.createTextNode js/document css-str)]
(while (.-firstChild elem)
(.removeChild elem (.-firstChild elem)))
(.appendChild elem text)
elem)))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src-untyped/quantum/untyped/ui/style/css/dom.cljc | clojure | (ns quantum.untyped.ui.style.css.dom
(:require
[clojure.string :as str]
[quantum.untyped.core.fn
:refer [fn->]]
[quantum.untyped.core.spec :as us]
[quantum.untyped.core.type.predicates
:refer [val?]]
[quantum.untyped.ui.dom :as udom]))
#?(:cljs
(defn add-link! [link #_href-string?]
(let [elem (.createElement js/document "link")]
(set! (.-href elem) link)
(set! (.-rel elem) "stylesheet")
(set! (.-type elem) "text/css")
(.appendChild (.-head js/document) elem)
elem)))
#?(:cljs
(defn append-css! [css-str #_css-string?]
"Inserts stylesheet into document head"
{:from ""}
(let [elem (.createElement js/document "style")
text (.createTextNode js/document css-str)]
(.appendChild elem text)
(.appendChild (.-head js/document) elem)
elem)))
#?(:cljs
(defn replace-css-at! [id #_dom-id-string? css-str #_css-string?]
"Replaces CSS at a (possibly generated) style node."
(let [elem (udom/append-element!
(or (some-> (.getElementById js/document id) .-parentNode)
(.-head js/document))
"style"
id)
_ (us/validate elem (us/and val? (fn-> .-tagName str/lower-case (= "style"))))
text (.createTextNode js/document css-str)]
(while (.-firstChild elem)
(.removeChild elem (.-firstChild elem)))
(.appendChild elem text)
elem)))
|
|
3c22397ad1fbca5ea20459caee11d69c912b2901a778fe7638632dc4db2773ef | Drup/LILiS | calc.mli | (** Small library to evaluate simple arithmetic expressions. *)
*
This library evaluates simple arithmetic expression over floats .
Regular operators ( + , -,*,/,^ ) and some regular functions ( sin , cos , tan , asin , acos , atan , log , log10 , exp , sqrt ) are implemented .
Arithmetic expressions can contain variables .
Here is an example of expression : [ 3*x+sin(2 ) ] .
This library evaluates simple arithmetic expression over floats.
Regular operators (+,-,*,/,^) and some regular functions (sin, cos, tan, asin, acos, atan, log, log10, exp, sqrt) are implemented.
Arithmetic expressions can contain variables.
Here is an example of expression : [ 3*x+sin(2) ].
*)
(** Type of binary operators *)
type op2 = Plus | Minus | Times | Div | Pow
(** Type of unary operators *)
type op1 = Func of (float -> float) | MinusUn ;;
(** Type of tree which represent an arithmetic expression *)
type 'a t =
| Float of float
| Op2 of ('a t) * op2 * ('a t)
| Op1 of op1 * ('a t)
| Var of 'a
module Env : sig
type t
val add : string -> float -> t -> t
val mem : string -> t -> bool
val union : t -> t -> t
val of_list : (string * float) list -> t
val empty : t
val usual : t
end
(** Variable environment.
{! Env.usual } contains [ pi ] and [ e ] .
*)
exception Unknown_variable of string
val eval : Env.t -> string t -> float
(** Evaluate a tree in the given environment.
@raise Unkown_variable if a variable is not defined in the environment.
*)
val compress : Env.t -> string t -> string t
(** Compress a tree in the given environment, ie. evaluate everything that can be evaluated. *)
* { 3 Some other functions }
val eval_custom : ('a -> float) -> 'a t -> float
(** Evaluate a tree, the given function is used to evaluate variables. *)
val compress_custom : ('a -> float option) -> 'a t -> 'a t
(** Compress a tree using the given function, ie. evaluate everything that can be evaluated.
A variable is untouched if the function returns [ None ].
*)
val bind : ('a -> 'b t) -> 'a t -> 'b t
(** Replace each variables by a subtree. *)
val bind_opt : ('a -> 'a t option) -> 'a t -> 'a t
(** Replace some variables by a subtree. *)
val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b
* Depth first left to right traversal of the tree .
val map : ('a -> 'b) -> 'a t -> 'b t
(** Change variables representation using the given function. *)
val iter :
?var:('a -> unit) -> ?float:(float -> unit) ->
?op1:(op1 -> unit) -> ?op2:(op2 -> unit) -> 'a t -> unit
(** Iteration on everything. *)
val vars : 'a t -> (('a -> unit) -> unit)
(** Get the sequence of variables in the given tree.
Use with sequence or containers.
*)
val closure :
?env:Env.t ->
string t -> (string * 'a) list -> (('a -> float) -> float)
(** Compress the string in the optional env and return the resulting closure. *)
| null | https://raw.githubusercontent.com/Drup/LILiS/df63fbc3ee77b3378ae1ef27715828c3ad892475/calc/calc.mli | ocaml | * Small library to evaluate simple arithmetic expressions.
* Type of binary operators
* Type of unary operators
* Type of tree which represent an arithmetic expression
* Variable environment.
{! Env.usual } contains [ pi ] and [ e ] .
* Evaluate a tree in the given environment.
@raise Unkown_variable if a variable is not defined in the environment.
* Compress a tree in the given environment, ie. evaluate everything that can be evaluated.
* Evaluate a tree, the given function is used to evaluate variables.
* Compress a tree using the given function, ie. evaluate everything that can be evaluated.
A variable is untouched if the function returns [ None ].
* Replace each variables by a subtree.
* Replace some variables by a subtree.
* Change variables representation using the given function.
* Iteration on everything.
* Get the sequence of variables in the given tree.
Use with sequence or containers.
* Compress the string in the optional env and return the resulting closure. |
*
This library evaluates simple arithmetic expression over floats .
Regular operators ( + , -,*,/,^ ) and some regular functions ( sin , cos , tan , asin , acos , atan , log , log10 , exp , sqrt ) are implemented .
Arithmetic expressions can contain variables .
Here is an example of expression : [ 3*x+sin(2 ) ] .
This library evaluates simple arithmetic expression over floats.
Regular operators (+,-,*,/,^) and some regular functions (sin, cos, tan, asin, acos, atan, log, log10, exp, sqrt) are implemented.
Arithmetic expressions can contain variables.
Here is an example of expression : [ 3*x+sin(2) ].
*)
type op2 = Plus | Minus | Times | Div | Pow
type op1 = Func of (float -> float) | MinusUn ;;
type 'a t =
| Float of float
| Op2 of ('a t) * op2 * ('a t)
| Op1 of op1 * ('a t)
| Var of 'a
module Env : sig
type t
val add : string -> float -> t -> t
val mem : string -> t -> bool
val union : t -> t -> t
val of_list : (string * float) list -> t
val empty : t
val usual : t
end
exception Unknown_variable of string
val eval : Env.t -> string t -> float
val compress : Env.t -> string t -> string t
* { 3 Some other functions }
val eval_custom : ('a -> float) -> 'a t -> float
val compress_custom : ('a -> float option) -> 'a t -> 'a t
val bind : ('a -> 'b t) -> 'a t -> 'b t
val bind_opt : ('a -> 'a t option) -> 'a t -> 'a t
val fold : ('a -> 'b -> 'b) -> 'a t -> 'b -> 'b
* Depth first left to right traversal of the tree .
val map : ('a -> 'b) -> 'a t -> 'b t
val iter :
?var:('a -> unit) -> ?float:(float -> unit) ->
?op1:(op1 -> unit) -> ?op2:(op2 -> unit) -> 'a t -> unit
val vars : 'a t -> (('a -> unit) -> unit)
val closure :
?env:Env.t ->
string t -> (string * 'a) list -> (('a -> float) -> float)
|
4cb3d3f924e19ab0982d1f98097de0536dcb82e97c2bb644d243d3d1a9b42704 | kowainik/github-graphql | GitHub.hs | |
Copyright : ( c ) 2020 - 2021 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
GraphQL bindings to GitHub API .
Copyright: (c) 2020-2021 Kowainik
SPDX-License-Identifier: MPL-2.0
Maintainer: Kowainik <>
GraphQL bindings to GitHub API.
-}
module GitHub
* Querying GitHub
module GitHub.Query
-- * Top-level queries
, module GitHub.Repository
-- * Queries connections
-- ** Issues
, module GitHub.Issue
, module GitHub.Order
* * PullRequests
, module GitHub.PullRequests
-- ** Milestones
, module GitHub.Milestone
-- ** Issue or PR labels
, module GitHub.Label
* * GitHub users
, module GitHub.User
-- * Connection
, module GitHub.Connection
-- * Connections fields
-- ** Interfaces
, module GitHub.Author
, module GitHub.Title
-- * Low-level AST fields
, module GitHub.GraphQL
-- * General tools to work with API
-- ** Using lenses to change fields
, module GitHub.Lens
-- * General types
, module GitHub.Id
-- * General utils
, module GitHub.Json
, one
) where
import GitHub.Author
import GitHub.Connection
import GitHub.GraphQL
import GitHub.Id
import GitHub.Issue
import GitHub.Json
import GitHub.Label
import GitHub.Lens
import GitHub.Milestone
import GitHub.Order
import GitHub.PullRequests
import GitHub.Query
import GitHub.Repository
import GitHub.Title
import GitHub.User
import GitHub.Common (one)
| null | https://raw.githubusercontent.com/kowainik/github-graphql/8400190ad443a0454d10efa417b9bef9d434e893/src/GitHub.hs | haskell | * Top-level queries
* Queries connections
** Issues
** Milestones
** Issue or PR labels
* Connection
* Connections fields
** Interfaces
* Low-level AST fields
* General tools to work with API
** Using lenses to change fields
* General types
* General utils | |
Copyright : ( c ) 2020 - 2021 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
GraphQL bindings to GitHub API .
Copyright: (c) 2020-2021 Kowainik
SPDX-License-Identifier: MPL-2.0
Maintainer: Kowainik <>
GraphQL bindings to GitHub API.
-}
module GitHub
* Querying GitHub
module GitHub.Query
, module GitHub.Repository
, module GitHub.Issue
, module GitHub.Order
* * PullRequests
, module GitHub.PullRequests
, module GitHub.Milestone
, module GitHub.Label
* * GitHub users
, module GitHub.User
, module GitHub.Connection
, module GitHub.Author
, module GitHub.Title
, module GitHub.GraphQL
, module GitHub.Lens
, module GitHub.Id
, module GitHub.Json
, one
) where
import GitHub.Author
import GitHub.Connection
import GitHub.GraphQL
import GitHub.Id
import GitHub.Issue
import GitHub.Json
import GitHub.Label
import GitHub.Lens
import GitHub.Milestone
import GitHub.Order
import GitHub.PullRequests
import GitHub.Query
import GitHub.Repository
import GitHub.Title
import GitHub.User
import GitHub.Common (one)
|
922d0639c96955492d5f839b9aee6f31165decd942eb34e4e5c45b492e26f28f | smallhadroncollider/brok | Attoparsec.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Brok.Parser.Attoparsec where
import ClassyPrelude
import Data.Attoparsec.Text
lexeme :: Parser a -> Parser a
lexeme p = skipSpace *> p <* skipSpace
tchar :: Char -> Parser Text
tchar ch = singleton <$> char ch
chopt :: Char -> Parser Text
chopt ch = option "" (tchar ch)
manyChars :: Parser Char -> Parser Text
manyChars p = pack <$> many1 p
concat3 :: (Monoid a) => a -> a -> a -> a
concat3 t1 t2 t3 = concat [t1, t2, t3]
concat5 :: (Monoid a) => a -> a -> a -> a -> a -> a
concat5 t1 t2 t3 t4 t5 = concat [t1, t2, t3, t4, t5]
surround :: Char -> Char -> Parser Text -> Parser Text
surround open close parser = concat3 <$> tchar open <*> parser <*> tchar close
| null | https://raw.githubusercontent.com/smallhadroncollider/brok/bf62288d913af5fc694e683cc247f66426025400/src/Brok/Parser/Attoparsec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE NoImplicitPrelude #
module Brok.Parser.Attoparsec where
import ClassyPrelude
import Data.Attoparsec.Text
lexeme :: Parser a -> Parser a
lexeme p = skipSpace *> p <* skipSpace
tchar :: Char -> Parser Text
tchar ch = singleton <$> char ch
chopt :: Char -> Parser Text
chopt ch = option "" (tchar ch)
manyChars :: Parser Char -> Parser Text
manyChars p = pack <$> many1 p
concat3 :: (Monoid a) => a -> a -> a -> a
concat3 t1 t2 t3 = concat [t1, t2, t3]
concat5 :: (Monoid a) => a -> a -> a -> a -> a -> a
concat5 t1 t2 t3 t4 t5 = concat [t1, t2, t3, t4, t5]
surround :: Char -> Char -> Parser Text -> Parser Text
surround open close parser = concat3 <$> tchar open <*> parser <*> tchar close
|
f601a96c4d0092e38c9ab03d25e8f49b18a8bb17193788399ce4a0e6344fc188 | fishcakez/sbroker | sbroker_queue.erl | %%-------------------------------------------------------------------
%%
Copyright ( c ) 2015 , < >
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%-------------------------------------------------------------------
%% @doc Behaviour for implementing queues for `sbroker' and `sregulator'.
%%
A custom queue must implement the ` sbroker_queue ' behaviour . The first
%% callback is `init/3', which starts the queue:
%% ```
-callback init(InternalQueue : : internal_queue ( ) , Time : : integer ( ) ,
: : any ( ) ) - >
{ State : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
` InternalQueue ' is the internal queue of requests , it is a ` queue : queue ( ) '
with items of the form ` { SendTime , From , Value , Reference } ' . ` SendTime ' is
%% the approximate time the request was sent in `native' time units and is
always less than or equal to ` Time' . `From ' is the a 2 - tuple containing the
%% senders pid and a response tag. `SendTime' and `From' can be used with
%% `drop/3' to drop a request. `Value' is any term, `Reference' is the monitor
%% reference of the sender.
%%
%% `Time' is the time, in `native' time units, of the queue at creation. Some
other callbacks will receive the current time of the queue as the second last
%% argument. It is monotically increasing, so subsequent calls will have the
%% same or a greater time.
%%
` ' is the arguments for the queue . It can be any term .
%%
` State ' is the state of the queue and used in the next call .
%%
` TimeoutTime ' represents the next time a queue wishes to call
` ' to drop items . If a message is not received the timeout
should occur at or after ` TimeoutTime ' . The time must be greater than or
equal to ` Time ' . If a queue does not require a timeout then ` TimeoutTime '
%% should be `infinity'. The value may be ignored or unavailable in other
%% callbacks if the queue is empty.
%%
When inserting a request into the queue , ` ' :
%% ```
%% -callback handle_in(SendTime :: integer(),
%% From :: {Sender :: pid(), Tag :: any()}, Value :: any(),
Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
%% new state.
%%
When removing a request from the queue , ` handle_out/2 ' :
%% ```
-callback handle_out(Time : : integer ( ) , State : : any ( ) ) - >
%% {SendTime :: integer(), From :: {Sender :: pid(), Tag :: any()},
Value : : any ( ) , Ref : : reference , NState : : any ( ) ,
TimeoutTime : : integer ( ) | infinity } |
{ empty , NState : : any ( ) } .
%% '''
%%
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
%% new state. This callback either returns a single request, added in the
` InternalQueue ' from ` init/3 ' or enqueued with ` ' . If the queue is
%% empty an `empty' tuple is returned.
%%
When a timeout occurs , ` ' :
%% ```
-callback handle_timeout(Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
%% new state.
%%
%% When cancelling requests, `handle_cancel/3':
%% ```
-callback handle_cancel(Tag : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ Reply : : false | ( ) , NState : : any ( ) ,
TimeoutTime : : integer ( ) | infinity } .
%% '''
%% `Tag' is a response tag, which is part of the `From' tuple passed via
` InternalQueue ' in ` init/3 ' or directly in ` ' . There may be
%% multiple requests with the same tag and all should be removed.
%%
%% If no requests are cancelled the `Reply' is `false', otherwise it is the
%% number of cancelled requests.
%%
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
%% the new state.
%%
%% When handling a message, `handle_info/3':
%% ```
-callback handle_info(Msg : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
%% `Msg' is the message, and may be intended for another queue.
%%
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
%% the new state.
%%
%% When changing the state due to a code change, `code_change/4':
%% ```
-callback code_change(OldVsn : : any ( ) , Time : : integer ( ) , State : : any ( ) ,
%% Extra :: any()) ->
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
On an upgrade ` OldVsn ' is version the state was created with and on an
downgrade is the same form except ` { down , OldVsn } ' . ` OldVsn ' is defined by
%% the vsn attribute(s) of the old version of the callback module. If no such
attribute is defined , the version is the checksum of the BEAM file . ` Extra '
%% is from `{advanced, Extra}' in the update instructions.
%%
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
%% the new state.
%%
%% When changing the configuration of a queue, `config_change/4':
%% ```
-callback config_change(Args : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
%% '''
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
%% new state.
%%
%% When returning the number of queued requests, `len/1':
%% ```
%% -callback len(State :: any()) -> Len :: non_neg_integer().
%% '''
` State ' is the current state of the queue and ` Len ' is the number of queued
%% requests. This callback must be idempotent and so not drop any requests.
%%
%% When returning the send time of the oldest request in the queue,
%% `send_time/1':
%% ```
%% -callback send_time(State :: any()) -> SendTime :: integer() | empty.
%% '''
` State ' is the current state of the queue and ` SendTime ' is the send time of
%% the oldest request, if not requests then `empty'. This callback must be
%% idempotent and so not drop any requests.
%%
%% When cleaning up the queue, `terminate/2':
%% ```
-callback terminate(Reason : : sbroker_handlers : reason ( ) , State : : any ( ) ) - >
InternalQueue : : internal_queue ( ) .
%% '''
%% `Reason' is `stop' if the queue is being shutdown, `change' if the queue is
%% being replaced by another queue, `{bad_return_value, Return}' if a previous
%% callback returned an invalid term or `{Class, Reason, Stack}' if a previous
%% callback raised an exception.
%%
` State ' is the current state of the queue .
%%
` InternalQueue ' is the same as ` init/3 ' and is passed to the next queue if
%% `Reason' is `change'.
%%
%% The process controlling the queue may not be terminating with the queue and
%% so `terminate/2' should do any clean up required.
-module(sbroker_queue).
-behaviour(sbroker_handlers).
%% public api
-export([drop/3]).
%% sbroker_handlers api
-export([initial_state/0]).
-export([init/5]).
-export([code_change/6]).
-export([config_change/5]).
-export([terminate/3]).
%% types
-type internal_queue() ::
queue:queue({integer(), {pid(), any()}, any(), reference()}).
-export_type([internal_queue/0]).
-callback init(Q :: internal_queue(), Time :: integer(), Args :: any()) ->
{State :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_in(SendTime :: integer(),
From :: {Sender :: pid(), Tag :: any()}, Value :: any(),
Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_out(Time :: integer(), State :: any()) ->
{SendTime :: integer(), From :: {pid(), Tag :: any()}, Value :: any(),
Ref :: reference(), NState :: any(), TimeoutTime :: integer() | infinity} |
{empty, NState :: any()}.
-callback handle_timeout(Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_cancel(Tag :: any(), Time :: integer(), State :: any()) ->
{Reply :: false | pos_integer(), NState :: any(),
TimeoutTime :: integer() | infinity}.
-callback handle_info(Msg :: any(), Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback code_change(OldVsn :: any(), Time :: integer(), State :: any(),
Extra :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback config_change(Args :: any(), Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback len(State :: any()) -> Len :: non_neg_integer().
-callback send_time(State :: any()) -> SendTime :: integer() | empty.
-callback terminate(Reason :: sbroker_handlers:reason(), State :: any()) ->
Q :: internal_queue().
%% public api
@doc Drop a request from ` From ' , sent at ` SendTime ' from the queue .
%%
%% Call `drop/3' when dropping a request from a queue.
-spec drop(From, SendTime, Time) -> ok when
From :: {pid(), Tag :: any()},
SendTime :: integer(),
Time :: integer().
drop(From, SendTime, Time) ->
_ = gen:reply(From, {drop, Time-SendTime}),
ok.
%% sbroker_handlers api
@private
-spec initial_state() -> Q when
Q :: internal_queue().
initial_state() ->
queue:new().
@private
-spec init(Module, Q, Send, Time, Args) -> {State, TimeoutTime} when
Module :: module(),
Q :: internal_queue(),
Send :: integer(),
Time :: integer(),
Args :: any(),
State :: any(),
TimeoutTime :: integer() | infinity.
init(Mod, Q, _, Now, Args) ->
Mod:init(Q, Now, Args).
@private
-spec code_change(Module, OldVsn, Send, Time, State, Extra) ->
{NState, TimeoutTime} when
Module :: module(),
OldVsn :: any(),
Send :: integer(),
Time :: integer(),
State :: any(),
Extra :: any(),
NState :: any(),
TimeoutTime :: integer() | infinity.
code_change(Mod, OldVsn, _, Time, State, Extra) ->
Mod:code_change(OldVsn, Time, State, Extra).
@private
-spec config_change(Module, Args, Send, Time, State) ->
{NState, TimeoutTime} when
Module :: module(),
Args :: any(),
Send :: integer(),
Time :: integer(),
State :: any(),
NState :: any(),
TimeoutTime :: integer() | infinity.
config_change(Mod, Args, _, Now, State) ->
Mod:config_change(Args, Now, State).
@private
-spec terminate(Module, Reason, State) -> Q when
Module :: module(),
Reason :: sbroker_handlers:reason(),
State :: any(),
Q :: internal_queue().
terminate(Mod, Reason, State) ->
Q = Mod:terminate(Reason, State),
case queue:is_queue(Q) of
true -> Q;
false -> exit({bad_return_value, Q})
end.
| null | https://raw.githubusercontent.com/fishcakez/sbroker/10f7e3970d0a296fbf08b1d1a94c88979a7deb5e/src/sbroker_queue.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Behaviour for implementing queues for `sbroker' and `sregulator'.
callback is `init/3', which starts the queue:
```
'''
the approximate time the request was sent in `native' time units and is
senders pid and a response tag. `SendTime' and `From' can be used with
`drop/3' to drop a request. `Value' is any term, `Reference' is the monitor
reference of the sender.
`Time' is the time, in `native' time units, of the queue at creation. Some
argument. It is monotically increasing, so subsequent calls will have the
same or a greater time.
should be `infinity'. The value may be ignored or unavailable in other
callbacks if the queue is empty.
```
-callback handle_in(SendTime :: integer(),
From :: {Sender :: pid(), Tag :: any()}, Value :: any(),
'''
new state.
```
{SendTime :: integer(), From :: {Sender :: pid(), Tag :: any()},
'''
new state. This callback either returns a single request, added in the
empty an `empty' tuple is returned.
```
'''
new state.
When cancelling requests, `handle_cancel/3':
```
'''
`Tag' is a response tag, which is part of the `From' tuple passed via
multiple requests with the same tag and all should be removed.
If no requests are cancelled the `Reply' is `false', otherwise it is the
number of cancelled requests.
the new state.
When handling a message, `handle_info/3':
```
'''
`Msg' is the message, and may be intended for another queue.
the new state.
When changing the state due to a code change, `code_change/4':
```
Extra :: any()) ->
'''
the vsn attribute(s) of the old version of the callback module. If no such
is from `{advanced, Extra}' in the update instructions.
the new state.
When changing the configuration of a queue, `config_change/4':
```
'''
new state.
When returning the number of queued requests, `len/1':
```
-callback len(State :: any()) -> Len :: non_neg_integer().
'''
requests. This callback must be idempotent and so not drop any requests.
When returning the send time of the oldest request in the queue,
`send_time/1':
```
-callback send_time(State :: any()) -> SendTime :: integer() | empty.
'''
the oldest request, if not requests then `empty'. This callback must be
idempotent and so not drop any requests.
When cleaning up the queue, `terminate/2':
```
'''
`Reason' is `stop' if the queue is being shutdown, `change' if the queue is
being replaced by another queue, `{bad_return_value, Return}' if a previous
callback returned an invalid term or `{Class, Reason, Stack}' if a previous
callback raised an exception.
`Reason' is `change'.
The process controlling the queue may not be terminating with the queue and
so `terminate/2' should do any clean up required.
public api
sbroker_handlers api
types
public api
Call `drop/3' when dropping a request from a queue.
sbroker_handlers api | Copyright ( c ) 2015 , < >
This file is provided to you under the Apache License ,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
A custom queue must implement the ` sbroker_queue ' behaviour . The first
-callback init(InternalQueue : : internal_queue ( ) , Time : : integer ( ) ,
: : any ( ) ) - >
{ State : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
` InternalQueue ' is the internal queue of requests , it is a ` queue : queue ( ) '
with items of the form ` { SendTime , From , Value , Reference } ' . ` SendTime ' is
always less than or equal to ` Time' . `From ' is the a 2 - tuple containing the
other callbacks will receive the current time of the queue as the second last
` ' is the arguments for the queue . It can be any term .
` State ' is the state of the queue and used in the next call .
` TimeoutTime ' represents the next time a queue wishes to call
` ' to drop items . If a message is not received the timeout
should occur at or after ` TimeoutTime ' . The time must be greater than or
equal to ` Time ' . If a queue does not require a timeout then ` TimeoutTime '
When inserting a request into the queue , ` ' :
Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
When removing a request from the queue , ` handle_out/2 ' :
-callback handle_out(Time : : integer ( ) , State : : any ( ) ) - >
Value : : any ( ) , Ref : : reference , NState : : any ( ) ,
TimeoutTime : : integer ( ) | infinity } |
{ empty , NState : : any ( ) } .
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
` InternalQueue ' from ` init/3 ' or enqueued with ` ' . If the queue is
When a timeout occurs , ` ' :
-callback handle_timeout(Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
-callback handle_cancel(Tag : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ Reply : : false | ( ) , NState : : any ( ) ,
TimeoutTime : : integer ( ) | infinity } .
` InternalQueue ' in ` init/3 ' or directly in ` ' . There may be
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
-callback handle_info(Msg : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
-callback code_change(OldVsn : : any ( ) , Time : : integer ( ) , State : : any ( ) ,
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
On an upgrade ` OldVsn ' is version the state was created with and on an
downgrade is the same form except ` { down , OldVsn } ' . ` OldVsn ' is defined by
attribute is defined , the version is the checksum of the BEAM file . ` Extra '
The other variables are equivalent to those in ` init/3 ' , with ` NState ' being
-callback config_change(Args : : any ( ) , Time : : integer ( ) , State : : any ( ) ) - >
{ NState : : any ( ) , TimeoutTime : : integer ( ) | infinity } .
The variables are equivalent to those in ` init/3 ' , with ` NState ' being the
` State ' is the current state of the queue and ` Len ' is the number of queued
` State ' is the current state of the queue and ` SendTime ' is the send time of
-callback terminate(Reason : : sbroker_handlers : reason ( ) , State : : any ( ) ) - >
InternalQueue : : internal_queue ( ) .
` State ' is the current state of the queue .
` InternalQueue ' is the same as ` init/3 ' and is passed to the next queue if
-module(sbroker_queue).
-behaviour(sbroker_handlers).
-export([drop/3]).
-export([initial_state/0]).
-export([init/5]).
-export([code_change/6]).
-export([config_change/5]).
-export([terminate/3]).
-type internal_queue() ::
queue:queue({integer(), {pid(), any()}, any(), reference()}).
-export_type([internal_queue/0]).
-callback init(Q :: internal_queue(), Time :: integer(), Args :: any()) ->
{State :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_in(SendTime :: integer(),
From :: {Sender :: pid(), Tag :: any()}, Value :: any(),
Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_out(Time :: integer(), State :: any()) ->
{SendTime :: integer(), From :: {pid(), Tag :: any()}, Value :: any(),
Ref :: reference(), NState :: any(), TimeoutTime :: integer() | infinity} |
{empty, NState :: any()}.
-callback handle_timeout(Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback handle_cancel(Tag :: any(), Time :: integer(), State :: any()) ->
{Reply :: false | pos_integer(), NState :: any(),
TimeoutTime :: integer() | infinity}.
-callback handle_info(Msg :: any(), Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback code_change(OldVsn :: any(), Time :: integer(), State :: any(),
Extra :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback config_change(Args :: any(), Time :: integer(), State :: any()) ->
{NState :: any(), TimeoutTime :: integer() | infinity}.
-callback len(State :: any()) -> Len :: non_neg_integer().
-callback send_time(State :: any()) -> SendTime :: integer() | empty.
-callback terminate(Reason :: sbroker_handlers:reason(), State :: any()) ->
Q :: internal_queue().
@doc Drop a request from ` From ' , sent at ` SendTime ' from the queue .
-spec drop(From, SendTime, Time) -> ok when
From :: {pid(), Tag :: any()},
SendTime :: integer(),
Time :: integer().
drop(From, SendTime, Time) ->
_ = gen:reply(From, {drop, Time-SendTime}),
ok.
@private
-spec initial_state() -> Q when
Q :: internal_queue().
initial_state() ->
queue:new().
@private
-spec init(Module, Q, Send, Time, Args) -> {State, TimeoutTime} when
Module :: module(),
Q :: internal_queue(),
Send :: integer(),
Time :: integer(),
Args :: any(),
State :: any(),
TimeoutTime :: integer() | infinity.
init(Mod, Q, _, Now, Args) ->
Mod:init(Q, Now, Args).
@private
-spec code_change(Module, OldVsn, Send, Time, State, Extra) ->
{NState, TimeoutTime} when
Module :: module(),
OldVsn :: any(),
Send :: integer(),
Time :: integer(),
State :: any(),
Extra :: any(),
NState :: any(),
TimeoutTime :: integer() | infinity.
code_change(Mod, OldVsn, _, Time, State, Extra) ->
Mod:code_change(OldVsn, Time, State, Extra).
@private
-spec config_change(Module, Args, Send, Time, State) ->
{NState, TimeoutTime} when
Module :: module(),
Args :: any(),
Send :: integer(),
Time :: integer(),
State :: any(),
NState :: any(),
TimeoutTime :: integer() | infinity.
config_change(Mod, Args, _, Now, State) ->
Mod:config_change(Args, Now, State).
@private
-spec terminate(Module, Reason, State) -> Q when
Module :: module(),
Reason :: sbroker_handlers:reason(),
State :: any(),
Q :: internal_queue().
terminate(Mod, Reason, State) ->
Q = Mod:terminate(Reason, State),
case queue:is_queue(Q) of
true -> Q;
false -> exit({bad_return_value, Q})
end.
|
0e4ee768c9daafa8cbe0483d73eb039f1858d0ea002673979dca6a19417cead2 | brendanhay/amazonka | GeoMatchStatement.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
-- Module : Amazonka.WAFV2.Types.GeoMatchStatement
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.WAFV2.Types.GeoMatchStatement where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.WAFV2.Types.CountryCode
import Amazonka.WAFV2.Types.ForwardedIPConfig
-- | A rule statement that labels web requests by country and region and that
-- matches against web requests based on country code. A geo match rule
-- labels every request that it inspects regardless of whether it finds a
-- match.
--
-- - To manage requests only by country, you can use this statement by
-- itself and specify the countries that you want to match against in
-- the @CountryCodes@ array.
--
-- - Otherwise, configure your geo match rule with Count action so that
it only labels requests . Then , add one or more label match rules to
-- run after the geo match rule and configure them to match against the
-- geographic labels and handle the requests as needed.
--
WAF labels requests using the alpha-2 country and region codes from the
International Organization for Standardization ( ISO ) 3166 standard . WAF
-- determines the codes using either the IP address in the web request
-- origin or, if you specify it, the address in the geo match
--
-- If you use the web request origin, the label formats are
: clientip : geo : region:\<ISO country code>-\<ISO region code>@ and
: clientip : geo : country:\<ISO country code>@.
--
-- If you use a forwarded IP address, the label formats are
-- @awswaf:forwardedip:geo:region:\<ISO country code>-\<ISO region code>@
and : forwardedip : geo : country:\<ISO country code>@.
--
-- For additional details, see
-- <-rule-statement-type-geo-match.html Geographic match rule statement>
-- in the
-- <-chapter.html WAF Developer Guide>.
--
-- /See:/ 'newGeoMatchStatement' smart constructor.
data GeoMatchStatement = GeoMatchStatement'
| An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
-- the ISO 3166 international standard.
--
-- When you use a geo match statement just for the region and country
-- labels that it adds to requests, you still have to supply a country code
-- for the rule to evaluate. In this case, you configure the rule to only
-- count matching requests, but it will still generate logging and count
-- metrics for any matches. You can reduce the logging and metrics that the
-- rule produces by specifying a country that\'s unlikely to be a source of
-- traffic to your site.
countryCodes :: Prelude.Maybe (Prelude.NonEmpty CountryCode),
-- | The configuration for inspecting IP addresses in an HTTP header that you
-- specify, instead of using the IP address that\'s reported by the web
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
-- you can specify any header name.
--
If the specified header isn\'t present in the request , WAF doesn\'t
-- apply the rule to the web request at all.
forwardedIPConfig :: Prelude.Maybe ForwardedIPConfig
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'GeoMatchStatement' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
' countryCodes ' , ' geoMatchStatement_countryCodes ' - An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
-- the ISO 3166 international standard.
--
-- When you use a geo match statement just for the region and country
-- labels that it adds to requests, you still have to supply a country code
-- for the rule to evaluate. In this case, you configure the rule to only
-- count matching requests, but it will still generate logging and count
-- metrics for any matches. You can reduce the logging and metrics that the
-- rule produces by specifying a country that\'s unlikely to be a source of
-- traffic to your site.
--
' forwardedIPConfig ' , ' geoMatchStatement_forwardedIPConfig ' - The configuration for inspecting IP addresses in an HTTP header that you
-- specify, instead of using the IP address that\'s reported by the web
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
-- you can specify any header name.
--
If the specified header isn\'t present in the request , WAF doesn\'t
-- apply the rule to the web request at all.
newGeoMatchStatement ::
GeoMatchStatement
newGeoMatchStatement =
GeoMatchStatement'
{ countryCodes = Prelude.Nothing,
forwardedIPConfig = Prelude.Nothing
}
| An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
-- the ISO 3166 international standard.
--
-- When you use a geo match statement just for the region and country
-- labels that it adds to requests, you still have to supply a country code
-- for the rule to evaluate. In this case, you configure the rule to only
-- count matching requests, but it will still generate logging and count
-- metrics for any matches. You can reduce the logging and metrics that the
-- rule produces by specifying a country that\'s unlikely to be a source of
-- traffic to your site.
geoMatchStatement_countryCodes :: Lens.Lens' GeoMatchStatement (Prelude.Maybe (Prelude.NonEmpty CountryCode))
geoMatchStatement_countryCodes = Lens.lens (\GeoMatchStatement' {countryCodes} -> countryCodes) (\s@GeoMatchStatement' {} a -> s {countryCodes = a} :: GeoMatchStatement) Prelude.. Lens.mapping Lens.coerced
-- | The configuration for inspecting IP addresses in an HTTP header that you
-- specify, instead of using the IP address that\'s reported by the web
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
-- you can specify any header name.
--
If the specified header isn\'t present in the request , WAF doesn\'t
-- apply the rule to the web request at all.
geoMatchStatement_forwardedIPConfig :: Lens.Lens' GeoMatchStatement (Prelude.Maybe ForwardedIPConfig)
geoMatchStatement_forwardedIPConfig = Lens.lens (\GeoMatchStatement' {forwardedIPConfig} -> forwardedIPConfig) (\s@GeoMatchStatement' {} a -> s {forwardedIPConfig = a} :: GeoMatchStatement)
instance Data.FromJSON GeoMatchStatement where
parseJSON =
Data.withObject
"GeoMatchStatement"
( \x ->
GeoMatchStatement'
Prelude.<$> (x Data..:? "CountryCodes")
Prelude.<*> (x Data..:? "ForwardedIPConfig")
)
instance Prelude.Hashable GeoMatchStatement where
hashWithSalt _salt GeoMatchStatement' {..} =
_salt `Prelude.hashWithSalt` countryCodes
`Prelude.hashWithSalt` forwardedIPConfig
instance Prelude.NFData GeoMatchStatement where
rnf GeoMatchStatement' {..} =
Prelude.rnf countryCodes
`Prelude.seq` Prelude.rnf forwardedIPConfig
instance Data.ToJSON GeoMatchStatement where
toJSON GeoMatchStatement' {..} =
Data.object
( Prelude.catMaybes
[ ("CountryCodes" Data..=) Prelude.<$> countryCodes,
("ForwardedIPConfig" Data..=)
Prelude.<$> forwardedIPConfig
]
)
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-wafv2/gen/Amazonka/WAFV2/Types/GeoMatchStatement.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Module : Amazonka.WAFV2.Types.GeoMatchStatement
Stability : auto-generated
| A rule statement that labels web requests by country and region and that
matches against web requests based on country code. A geo match rule
labels every request that it inspects regardless of whether it finds a
match.
- To manage requests only by country, you can use this statement by
itself and specify the countries that you want to match against in
the @CountryCodes@ array.
- Otherwise, configure your geo match rule with Count action so that
run after the geo match rule and configure them to match against the
geographic labels and handle the requests as needed.
determines the codes using either the IP address in the web request
origin or, if you specify it, the address in the geo match
If you use the web request origin, the label formats are
If you use a forwarded IP address, the label formats are
@awswaf:forwardedip:geo:region:\<ISO country code>-\<ISO region code>@
For additional details, see
<-rule-statement-type-geo-match.html Geographic match rule statement>
in the
<-chapter.html WAF Developer Guide>.
/See:/ 'newGeoMatchStatement' smart constructor.
the ISO 3166 international standard.
When you use a geo match statement just for the region and country
labels that it adds to requests, you still have to supply a country code
for the rule to evaluate. In this case, you configure the rule to only
count matching requests, but it will still generate logging and count
metrics for any matches. You can reduce the logging and metrics that the
rule produces by specifying a country that\'s unlikely to be a source of
traffic to your site.
| The configuration for inspecting IP addresses in an HTTP header that you
specify, instead of using the IP address that\'s reported by the web
you can specify any header name.
apply the rule to the web request at all.
|
Create a value of 'GeoMatchStatement' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
the ISO 3166 international standard.
When you use a geo match statement just for the region and country
labels that it adds to requests, you still have to supply a country code
for the rule to evaluate. In this case, you configure the rule to only
count matching requests, but it will still generate logging and count
metrics for any matches. You can reduce the logging and metrics that the
rule produces by specifying a country that\'s unlikely to be a source of
traffic to your site.
specify, instead of using the IP address that\'s reported by the web
you can specify any header name.
apply the rule to the web request at all.
the ISO 3166 international standard.
When you use a geo match statement just for the region and country
labels that it adds to requests, you still have to supply a country code
for the rule to evaluate. In this case, you configure the rule to only
count matching requests, but it will still generate logging and count
metrics for any matches. You can reduce the logging and metrics that the
rule produces by specifying a country that\'s unlikely to be a source of
traffic to your site.
| The configuration for inspecting IP addresses in an HTTP header that you
specify, instead of using the IP address that\'s reported by the web
you can specify any header name.
apply the rule to the web request at all. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WAFV2.Types.GeoMatchStatement where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.WAFV2.Types.CountryCode
import Amazonka.WAFV2.Types.ForwardedIPConfig
it only labels requests . Then , add one or more label match rules to
WAF labels requests using the alpha-2 country and region codes from the
International Organization for Standardization ( ISO ) 3166 standard . WAF
: clientip : geo : region:\<ISO country code>-\<ISO region code>@ and
: clientip : geo : country:\<ISO country code>@.
and : forwardedip : geo : country:\<ISO country code>@.
data GeoMatchStatement = GeoMatchStatement'
| An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
countryCodes :: Prelude.Maybe (Prelude.NonEmpty CountryCode),
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
If the specified header isn\'t present in the request , WAF doesn\'t
forwardedIPConfig :: Prelude.Maybe ForwardedIPConfig
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
' countryCodes ' , ' geoMatchStatement_countryCodes ' - An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
' forwardedIPConfig ' , ' geoMatchStatement_forwardedIPConfig ' - The configuration for inspecting IP addresses in an HTTP header that you
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
If the specified header isn\'t present in the request , WAF doesn\'t
newGeoMatchStatement ::
GeoMatchStatement
newGeoMatchStatement =
GeoMatchStatement'
{ countryCodes = Prelude.Nothing,
forwardedIPConfig = Prelude.Nothing
}
| An array of two - character country codes that you want to match against ,
for example , @ [ \"US\ " , \"CN\ " ] @ , from the alpha-2 country ISO codes of
geoMatchStatement_countryCodes :: Lens.Lens' GeoMatchStatement (Prelude.Maybe (Prelude.NonEmpty CountryCode))
geoMatchStatement_countryCodes = Lens.lens (\GeoMatchStatement' {countryCodes} -> countryCodes) (\s@GeoMatchStatement' {} a -> s {countryCodes = a} :: GeoMatchStatement) Prelude.. Lens.mapping Lens.coerced
request origin . Commonly , this is the X - Forwarded - For ( XFF ) header , but
If the specified header isn\'t present in the request , WAF doesn\'t
geoMatchStatement_forwardedIPConfig :: Lens.Lens' GeoMatchStatement (Prelude.Maybe ForwardedIPConfig)
geoMatchStatement_forwardedIPConfig = Lens.lens (\GeoMatchStatement' {forwardedIPConfig} -> forwardedIPConfig) (\s@GeoMatchStatement' {} a -> s {forwardedIPConfig = a} :: GeoMatchStatement)
instance Data.FromJSON GeoMatchStatement where
parseJSON =
Data.withObject
"GeoMatchStatement"
( \x ->
GeoMatchStatement'
Prelude.<$> (x Data..:? "CountryCodes")
Prelude.<*> (x Data..:? "ForwardedIPConfig")
)
instance Prelude.Hashable GeoMatchStatement where
hashWithSalt _salt GeoMatchStatement' {..} =
_salt `Prelude.hashWithSalt` countryCodes
`Prelude.hashWithSalt` forwardedIPConfig
instance Prelude.NFData GeoMatchStatement where
rnf GeoMatchStatement' {..} =
Prelude.rnf countryCodes
`Prelude.seq` Prelude.rnf forwardedIPConfig
instance Data.ToJSON GeoMatchStatement where
toJSON GeoMatchStatement' {..} =
Data.object
( Prelude.catMaybes
[ ("CountryCodes" Data..=) Prelude.<$> countryCodes,
("ForwardedIPConfig" Data..=)
Prelude.<$> forwardedIPConfig
]
)
|
02fc8f16ce48da8feb198de5d8cd8c8fef546eec91fc7faa3a5ff93e1301afa7 | acieroid/scala-am | unfringe.scm | (define (unfringe-1 l)
(cond ((null? l) '())
((null? (cdr l)) (list (car l)))
(else (list (car l)
(unfringe-1 (cdr l))))))
(define (unfringe-2 l)
(define (pair l)
(cond ((null? l) '())
((null? (cdr l)) (list l))
(else (cons (list (car l) (cadr l))
(pair (cddr l))))))
(let loop ((l l))
(if (or (null? l)
(null? (cdr l)))
l
(loop (pair l)))))
(and (equal? (unfringe-1 '(1 2 3 4 5 6 7 8 9)) '(1 (2 (3 (4 (5 (6 (7 (8 (9))))))))))
(equal? (unfringe-2 '(1 2 3 4 5 6 7 8 9)) '(((((1 2) (3 4)) ((5 6) (7 8))) (((9))))))) | null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/scp1/unfringe.scm | scheme | (define (unfringe-1 l)
(cond ((null? l) '())
((null? (cdr l)) (list (car l)))
(else (list (car l)
(unfringe-1 (cdr l))))))
(define (unfringe-2 l)
(define (pair l)
(cond ((null? l) '())
((null? (cdr l)) (list l))
(else (cons (list (car l) (cadr l))
(pair (cddr l))))))
(let loop ((l l))
(if (or (null? l)
(null? (cdr l)))
l
(loop (pair l)))))
(and (equal? (unfringe-1 '(1 2 3 4 5 6 7 8 9)) '(1 (2 (3 (4 (5 (6 (7 (8 (9))))))))))
(equal? (unfringe-2 '(1 2 3 4 5 6 7 8 9)) '(((((1 2) (3 4)) ((5 6) (7 8))) (((9))))))) |
|
9c5a6c8b1edc4286df33403941d0fbef243f50e65971178e100ff26af5757cfe | alex-hhh/data-frame | csv.rkt | #lang racket/base
csv.rkt -- read and write data frames to CVS files
;;
;; This file is part of data-frame -- -hhh/data-frame
Copyright ( c ) 2018 , 2021 < >
;;
;; This program is free software: you can redistribute it and/or modify it
;; under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation , either version 3 of the License , or ( at your
;; option) any later version.
;;
;; This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
;; FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
;; License for more details.
;;
You should have received a copy of the GNU Lesser General Public License
;; along with this program. If not, see </>.
(require racket/contract
racket/format
racket/list
racket/string
"df.rkt"
"series.rkt")
;;............................................................ write-csv ....
Quote the string STR , as per CSV rules : the string is enclosed in quotes
;; and any quotes inside the string are doubled.
(define (quote-string str)
(string-append "\"" (string-replace str "\"" "\"\"") "\""))
Write in CSV format the data frame DF to the output port OUTP . If SERIES ,
;; if non-null, denote the series to be written. If null, all the series are
;; written out in an unspecified order. Rows between START and STOP are
;; written out.
(define (write-csv df outp series #:start start #:stop stop)
(define first? #t)
(define columns (if (null? series) (df-series-names df) series))
(for ([header (in-list columns)])
(if first?
(set! first? #f)
(write-string "," outp))
(write-string (quote-string header) outp))
(newline outp)
(df-for-each
df
columns
(lambda (val)
(define first? #t)
(for ([col (in-list columns)]
[item (in-list val)])
(if first?
(set! first? #f)
(write-string "," outp))
(define oitem
(cond
((df-is-na? df col item) "") ; this is not very fast...
((string? item) (quote-string item))
((real? item)
(~a
(if (exact-integer? item)
item
(exact->inexact item))))
;; otherwise we write in a way that we might be able to read it
;; back... this would work for transparent structs...
(#t (quote-string (~s item)))))
(write-string oitem outp))
(newline outp))
#:start start #:stop stop))
Write the data frame DF to OUTP which is either an output port or a string ,
;; in which case it is assumed to be a file name. The series to be written
;; out can be specified as the SERIES list. If SERIES is empty, all series
are written out as columns in an unspecified order . START and STOP denote
;; the beginning and end rows to be written out, by default all rows are
;; written out.
(define (df-write/csv df outp #:start (start 0) #:stop (stop (df-row-count df)) . series)
(if (path-string? outp)
(call-with-output-file outp
#:mode 'text #:exists 'truncate/replace
(lambda (o)
(write-csv df o series #:start start #:stop stop)))
(write-csv df outp series #:start start #:stop stop)))
;;............................................................. read-csv ....
(define (->cell data maybe-number? contains-whitespace?)
(define as-string (list->string (reverse data)))
(if maybe-number?
(let ([v (if contains-whitespace? (string-trim as-string) as-string)])
(or (string->number v 10) v))
as-string))
;; Return #t if the character c is possibly part of a number...
(define (number-constituent? c)
(or (char-numeric? c)
(char-punctuation? c)
(char-whitespace? c)
(equal? c #\e)
(equal? c #\E)
(equal? c #\+) ; note that - is punctuation, but + is not...
(equal? c #\i)
(equal? c #\I)))
;; NOTE: returns a list of characters in reverse order
(define (slurp-string in)
(let loop ((current '())
(maybe-number? #t)
(contains-whitespace? #f))
(let ((c (read-char in)))
(cond ((eof-object? c)
(values current maybe-number? contains-whitespace?))
;; NOTE: currently, a return or newline will terminate a string,
;; but it is unclear if this is the right thing to do...
((equal? c #\newline)
Recognize # \newline + # \return combinations
(when (equal? (peek-char in) #\return) (read-char in))
(values current maybe-number? contains-whitespace?))
((equal? c #\return)
;; Recognize #\return + #\newline combinations
(when (equal? (peek-char in) #\newline) (read-char in))
(values current maybe-number? contains-whitespace?))
((equal? c #\")
;; Quote ends the string, but only if it is not immediately
;; followed by another quote -- this allows having quotes in
;; strings.
(if (equal? (peek-char in) #\")
(begin
(read-char in) ; consume the next char
(loop (cons c current) maybe-number? contains-whitespace?))
(values current maybe-number? contains-whitespace?)))
(#t
(loop (cons c current)
(and maybe-number? (number-constituent? c))
(or contains-whitespace? (char-whitespace? c))))))))
Parse a LINE from a CSV file and return the list of " cells " in it as
;; strings or numbers also returns the number of cells in the list. Takes
;; special care that comma characters "," inside strings are correctly
;; handled. Also double quotes inside strings are unquoted.
;;
;; NOTE: cells are produced in reverse order!
(define (parse-line in quoted-numbers?)
(let loop ((current null)
(whitespace-run null)
(row null)
(cell-count 0)
(maybe-number? #t)
(contains-whitespace? #f))
(let ((c (read-char in)))
(cond ((eof-object? c)
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\newline)
Recognize # \newline + # \return combinations
(when (equal? (peek-char in) #\return) (read-char in))
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\return)
;; Recognize #\return + #\newline combinations
(when (equal? (peek-char in) #\newline) (read-char in))
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\,)
;; NOTE: will discard last whitespace-run
(loop null
null
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)
#t
#f))
((char-whitespace? c)
(if (null? current)
;; Discard whitespace at the start of the string
(loop current '() row cell-count maybe-number? contains-whitespace?)
(loop current
(cons c whitespace-run)
row
cell-count
maybe-number?
contains-whitespace?)))
((equal? c #\")
(define-values (s m w) (slurp-string in))
(loop (append s whitespace-run current)
'()
row
cell-count
(and quoted-numbers? maybe-number? m)
(or contains-whitespace? w)))
(#t
(loop (cons c (append whitespace-run current))
'()
row
cell-count
(and maybe-number? (number-constituent? c))
(or contains-whitespace? (not (null? whitespace-run)))))))))
Read a data frame from the INPUT port , by decoding CSV input . IF HEADERS ?
is true , the first row in INPUT becomes the names of the columns ,
otherwise , the columns will be named " col1 " , " col2 " , etc . The first row
;; defines the number of columns: if subsequent rows have fewer cells, they
are padded with # f , if it has more , they are silently truncated . NA
;; determines the string that constitutes the "not available" value.
(define (read-csv input headers? na qn?)
(define df (make-data-frame))
(define series #f)
(define na? (if (procedure? na) na (lambda (v) (equal? v na))))
(define (decode cell) (if (na? cell) #f cell))
(unless (eof-object? (peek-char input))
(define-values (first-row-cells series-count) (parse-line input qn?))
(if headers?
(let ((index 1)
(seen-header-names '()))
(set! series
(for/list ([h (reverse first-row-cells)])
;; Gracefully handle series with empty header names
(let ((name (~a (decode h))))
(unless name
(set! name (~a "col" index))
(set! index (add1 index)))
(let loop ([suffix 1]
[seen? (member name seen-header-names)])
(when seen?
(let ([candidate (format "~a (~a)" name suffix)])
(if (member candidate seen-header-names)
(loop (add1 suffix) #t)
(set! name candidate)))))
(set! seen-header-names (cons name seen-header-names))
(make-series name #:capacity 100)))))
(begin
(set! series (for/list ([idx (in-range series-count)])
(make-series (format "col~a" idx) #:capacity 100)))
(for ([s (in-list series)] [v (in-list (reverse first-row-cells))])
(series-push-back s (decode v)))))
(set! series (reverse series))
(let row-loop ()
(unless (eof-object? (peek-char input))
(let-values ([(cells cell-count) (parse-line input qn?)])
;; Normally, a CSV file should have the same number of slots in each
;; line, if there are more slots than series, we discard extra ones,
;; if there is a shortfall, we add #f to the remaining series.
(for ([series (in-list series)]
[cell (in-list (cond ((= series-count cell-count) cells)
((< series-count cell-count)
;; to many cells, problem is they are at
;; the front (remember that cells are in
;; reverse order)
(drop cells (- cell-count series-count)))
(#t
;; To few cells, we need to pad them out
;; at the front.
(append (make-list (- series-count cell-count) #f) cells))))])
(series-push-back series (decode cell))))
(row-loop)))
(for ((s (in-list series)))
(df-add-series! df s)))
df)
Read CSV data in a data frame from the INP which is either a port or a
;; string, in which case it is assumed to be a file name. IF HEADERS? is
true , the first row in INPUT becomes the names of the columns , otherwise ,
the columns will be named " col1 " , " col2 " , etc . The first row defines the
;; number of columns: if subsequent rows have fewer cells, they are padded
with # f , if it has more , they are silently truncated . NA represents the
cell value to be replaced by the NA value in the data frame , by default
only empty cells are NA values , but this allows specifying an additional
string to represent NA values ( some CSV exporters use " - " as the not
;; available value).
(define (df-read/csv inp #:headers? (headers? #t) #:na (na "") #:quoted-numbers? (qn? #f))
(if (path-string? inp)
not ' text : we might read MAC text files on a Windows machine !
(call-with-input-file inp #:mode 'text
(lambda (i) (read-csv i headers? na qn?)))
(read-csv inp headers? na qn?)))
;;............................................................. provides ....
(provide/contract
(df-write/csv (->* (data-frame? (or/c path-string? output-port?))
(#:start exact-nonnegative-integer? #:stop exact-nonnegative-integer?)
#:rest (listof string?)
any/c))
(df-read/csv (->* ((or/c path-string? input-port?))
(#:headers? boolean?
#:na (or/c any/c (-> any/c boolean?))
#:quoted-numbers? boolean?)
data-frame?)))
| null | https://raw.githubusercontent.com/alex-hhh/data-frame/b32142ae8ccd206262f1968cfb141926b2c9d8e7/private/csv.rkt | racket |
This file is part of data-frame -- -hhh/data-frame
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
along with this program. If not, see </>.
............................................................ write-csv ....
and any quotes inside the string are doubled.
if non-null, denote the series to be written. If null, all the series are
written out in an unspecified order. Rows between START and STOP are
written out.
this is not very fast...
otherwise we write in a way that we might be able to read it
back... this would work for transparent structs...
in which case it is assumed to be a file name. The series to be written
out can be specified as the SERIES list. If SERIES is empty, all series
the beginning and end rows to be written out, by default all rows are
written out.
............................................................. read-csv ....
Return #t if the character c is possibly part of a number...
note that - is punctuation, but + is not...
NOTE: returns a list of characters in reverse order
NOTE: currently, a return or newline will terminate a string,
but it is unclear if this is the right thing to do...
Recognize #\return + #\newline combinations
Quote ends the string, but only if it is not immediately
followed by another quote -- this allows having quotes in
strings.
consume the next char
strings or numbers also returns the number of cells in the list. Takes
special care that comma characters "," inside strings are correctly
handled. Also double quotes inside strings are unquoted.
NOTE: cells are produced in reverse order!
Recognize #\return + #\newline combinations
NOTE: will discard last whitespace-run
Discard whitespace at the start of the string
defines the number of columns: if subsequent rows have fewer cells, they
determines the string that constitutes the "not available" value.
Gracefully handle series with empty header names
Normally, a CSV file should have the same number of slots in each
line, if there are more slots than series, we discard extra ones,
if there is a shortfall, we add #f to the remaining series.
to many cells, problem is they are at
the front (remember that cells are in
reverse order)
To few cells, we need to pad them out
at the front.
string, in which case it is assumed to be a file name. IF HEADERS? is
number of columns: if subsequent rows have fewer cells, they are padded
available value).
............................................................. provides .... | #lang racket/base
csv.rkt -- read and write data frames to CVS files
Copyright ( c ) 2018 , 2021 < >
the Free Software Foundation , either version 3 of the License , or ( at your
You should have received a copy of the GNU Lesser General Public License
(require racket/contract
racket/format
racket/list
racket/string
"df.rkt"
"series.rkt")
Quote the string STR , as per CSV rules : the string is enclosed in quotes
(define (quote-string str)
(string-append "\"" (string-replace str "\"" "\"\"") "\""))
Write in CSV format the data frame DF to the output port OUTP . If SERIES ,
(define (write-csv df outp series #:start start #:stop stop)
(define first? #t)
(define columns (if (null? series) (df-series-names df) series))
(for ([header (in-list columns)])
(if first?
(set! first? #f)
(write-string "," outp))
(write-string (quote-string header) outp))
(newline outp)
(df-for-each
df
columns
(lambda (val)
(define first? #t)
(for ([col (in-list columns)]
[item (in-list val)])
(if first?
(set! first? #f)
(write-string "," outp))
(define oitem
(cond
((string? item) (quote-string item))
((real? item)
(~a
(if (exact-integer? item)
item
(exact->inexact item))))
(#t (quote-string (~s item)))))
(write-string oitem outp))
(newline outp))
#:start start #:stop stop))
Write the data frame DF to OUTP which is either an output port or a string ,
are written out as columns in an unspecified order . START and STOP denote
(define (df-write/csv df outp #:start (start 0) #:stop (stop (df-row-count df)) . series)
(if (path-string? outp)
(call-with-output-file outp
#:mode 'text #:exists 'truncate/replace
(lambda (o)
(write-csv df o series #:start start #:stop stop)))
(write-csv df outp series #:start start #:stop stop)))
(define (->cell data maybe-number? contains-whitespace?)
(define as-string (list->string (reverse data)))
(if maybe-number?
(let ([v (if contains-whitespace? (string-trim as-string) as-string)])
(or (string->number v 10) v))
as-string))
(define (number-constituent? c)
(or (char-numeric? c)
(char-punctuation? c)
(char-whitespace? c)
(equal? c #\e)
(equal? c #\E)
(equal? c #\i)
(equal? c #\I)))
(define (slurp-string in)
(let loop ((current '())
(maybe-number? #t)
(contains-whitespace? #f))
(let ((c (read-char in)))
(cond ((eof-object? c)
(values current maybe-number? contains-whitespace?))
((equal? c #\newline)
Recognize # \newline + # \return combinations
(when (equal? (peek-char in) #\return) (read-char in))
(values current maybe-number? contains-whitespace?))
((equal? c #\return)
(when (equal? (peek-char in) #\newline) (read-char in))
(values current maybe-number? contains-whitespace?))
((equal? c #\")
(if (equal? (peek-char in) #\")
(begin
(loop (cons c current) maybe-number? contains-whitespace?))
(values current maybe-number? contains-whitespace?)))
(#t
(loop (cons c current)
(and maybe-number? (number-constituent? c))
(or contains-whitespace? (char-whitespace? c))))))))
Parse a LINE from a CSV file and return the list of " cells " in it as
(define (parse-line in quoted-numbers?)
(let loop ((current null)
(whitespace-run null)
(row null)
(cell-count 0)
(maybe-number? #t)
(contains-whitespace? #f))
(let ((c (read-char in)))
(cond ((eof-object? c)
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\newline)
Recognize # \newline + # \return combinations
(when (equal? (peek-char in) #\return) (read-char in))
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\return)
(when (equal? (peek-char in) #\newline) (read-char in))
(values
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)))
((equal? c #\,)
(loop null
null
(cons (->cell current maybe-number? contains-whitespace?) row)
(add1 cell-count)
#t
#f))
((char-whitespace? c)
(if (null? current)
(loop current '() row cell-count maybe-number? contains-whitespace?)
(loop current
(cons c whitespace-run)
row
cell-count
maybe-number?
contains-whitespace?)))
((equal? c #\")
(define-values (s m w) (slurp-string in))
(loop (append s whitespace-run current)
'()
row
cell-count
(and quoted-numbers? maybe-number? m)
(or contains-whitespace? w)))
(#t
(loop (cons c (append whitespace-run current))
'()
row
cell-count
(and maybe-number? (number-constituent? c))
(or contains-whitespace? (not (null? whitespace-run)))))))))
Read a data frame from the INPUT port , by decoding CSV input . IF HEADERS ?
is true , the first row in INPUT becomes the names of the columns ,
otherwise , the columns will be named " col1 " , " col2 " , etc . The first row
are padded with # f , if it has more , they are silently truncated . NA
(define (read-csv input headers? na qn?)
(define df (make-data-frame))
(define series #f)
(define na? (if (procedure? na) na (lambda (v) (equal? v na))))
(define (decode cell) (if (na? cell) #f cell))
(unless (eof-object? (peek-char input))
(define-values (first-row-cells series-count) (parse-line input qn?))
(if headers?
(let ((index 1)
(seen-header-names '()))
(set! series
(for/list ([h (reverse first-row-cells)])
(let ((name (~a (decode h))))
(unless name
(set! name (~a "col" index))
(set! index (add1 index)))
(let loop ([suffix 1]
[seen? (member name seen-header-names)])
(when seen?
(let ([candidate (format "~a (~a)" name suffix)])
(if (member candidate seen-header-names)
(loop (add1 suffix) #t)
(set! name candidate)))))
(set! seen-header-names (cons name seen-header-names))
(make-series name #:capacity 100)))))
(begin
(set! series (for/list ([idx (in-range series-count)])
(make-series (format "col~a" idx) #:capacity 100)))
(for ([s (in-list series)] [v (in-list (reverse first-row-cells))])
(series-push-back s (decode v)))))
(set! series (reverse series))
(let row-loop ()
(unless (eof-object? (peek-char input))
(let-values ([(cells cell-count) (parse-line input qn?)])
(for ([series (in-list series)]
[cell (in-list (cond ((= series-count cell-count) cells)
((< series-count cell-count)
(drop cells (- cell-count series-count)))
(#t
(append (make-list (- series-count cell-count) #f) cells))))])
(series-push-back series (decode cell))))
(row-loop)))
(for ((s (in-list series)))
(df-add-series! df s)))
df)
Read CSV data in a data frame from the INP which is either a port or a
true , the first row in INPUT becomes the names of the columns , otherwise ,
the columns will be named " col1 " , " col2 " , etc . The first row defines the
with # f , if it has more , they are silently truncated . NA represents the
cell value to be replaced by the NA value in the data frame , by default
only empty cells are NA values , but this allows specifying an additional
string to represent NA values ( some CSV exporters use " - " as the not
(define (df-read/csv inp #:headers? (headers? #t) #:na (na "") #:quoted-numbers? (qn? #f))
(if (path-string? inp)
not ' text : we might read MAC text files on a Windows machine !
(call-with-input-file inp #:mode 'text
(lambda (i) (read-csv i headers? na qn?)))
(read-csv inp headers? na qn?)))
(provide/contract
(df-write/csv (->* (data-frame? (or/c path-string? output-port?))
(#:start exact-nonnegative-integer? #:stop exact-nonnegative-integer?)
#:rest (listof string?)
any/c))
(df-read/csv (->* ((or/c path-string? input-port?))
(#:headers? boolean?
#:na (or/c any/c (-> any/c boolean?))
#:quoted-numbers? boolean?)
data-frame?)))
|
b5f1e13d6ef1e7d15f1fdbabe2d4c93ae9a779baaf7ab48a2fee8c7b0a4fb3e1 | Kappa-Dev/KappaTools | widget_export.mli | module Html = Tyxml_js.Html5
type handler =
{ suffix : string;
label: string;
export : string -> unit }
type configuration =
{ id : string ;
handlers : handler list;
show : bool React.signal }
val content:
configuration ->
[< Html_types.div_content_fun > `Form `Table ] Html.elt
val export_png: ?svg_style_id:string -> svg_div_id:string -> unit -> handler
val export_json: serialize_json:(unit -> string) -> handler
val export_svg: ?svg_style_id:string -> svg_div_id:string -> unit -> handler
val export_data_label: configuration -> string
val onload: configuration -> unit
val inline_content: configuration -> [> `Button | `Div | `PCDATA ] Html.elt list
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/fbbfb3e62f9b80b0fb95675a4c1c28c1bd658bfd/gui/widget_export.mli | ocaml | module Html = Tyxml_js.Html5
type handler =
{ suffix : string;
label: string;
export : string -> unit }
type configuration =
{ id : string ;
handlers : handler list;
show : bool React.signal }
val content:
configuration ->
[< Html_types.div_content_fun > `Form `Table ] Html.elt
val export_png: ?svg_style_id:string -> svg_div_id:string -> unit -> handler
val export_json: serialize_json:(unit -> string) -> handler
val export_svg: ?svg_style_id:string -> svg_div_id:string -> unit -> handler
val export_data_label: configuration -> string
val onload: configuration -> unit
val inline_content: configuration -> [> `Button | `Div | `PCDATA ] Html.elt list
|
|
322f73faeff204d37e54aa7e5b1636fd21d7bbade2838ae37ceaffeb256014ae | hasufell/hsfm | MyPrelude.hs | -
HSFM , a written in Haskell .
Copyright ( C ) 2016
This program is free software ; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA .
-
HSFM, a filemanager written in Haskell.
Copyright (C) 2016 Julian Ospald
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
--}
module HSFM.Utils.MyPrelude where
import Data.List
-- |Turns any list into a list of the same length with the values
-- being the indices.
E.g. : " abdasd " - > [ 0,1,2,3,4,5 ]
listIndices :: [a] -> [Int]
listIndices = findIndices (const True)
| null | https://raw.githubusercontent.com/hasufell/hsfm/322c766ae534fb21e3427d2845011123ddb90952/src/HSFM/Utils/MyPrelude.hs | haskell | }
|Turns any list into a list of the same length with the values
being the indices. | -
HSFM , a written in Haskell .
Copyright ( C ) 2016
This program is free software ; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 , USA .
-
HSFM, a filemanager written in Haskell.
Copyright (C) 2016 Julian Ospald
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
module HSFM.Utils.MyPrelude where
import Data.List
E.g. : " abdasd " - > [ 0,1,2,3,4,5 ]
listIndices :: [a] -> [Int]
listIndices = findIndices (const True)
|
9d26ff47a3cf6c9dbf76e03ed480b2d7a5f8eca82cf6e7957c4fb7458f603838 | ocaml-ppx/ocamlformat | variants.ml | type t =
[ (* xx *) `(* yy *) A (* zz *)
| (* xx *) `B (* zz *)
| `(* yy *) C (* zz *) ]
let (* xx *) `(* yy *) A (* zz *) = x
let (* xx *) `B (* zz *) = x
let `(* yy *) C (* zz *) = x
let _ = (* xx *) `(* yy *) A (* zz *)
let _ = (* xx *) `B (* zz *)
let _ = `(* yy *) C (* zz *)
| null | https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/11e124aac33d74762d6f76fe466708b7be504e92/test/passing/tests/variants.ml | ocaml | xx
yy
zz
xx
zz
yy
zz
xx
yy
zz
xx
zz
yy
zz
xx
yy
zz
xx
zz
yy
zz | type t =
|
955091f082804ec9432ad9ab230ac5f29181b316b0e3709893805d4975e6f041 | dbuenzli/remat | api_gen_repo.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2015 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright (c) 2015 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
open Bos
let repo_href = Path.Rel.file "repo.json"
let v g =
let repo = Api_gen.repo g in
let index_ids = Api_gen.repo_index_ids g in
let indexes = List.(rev (rev_map (Api_gen_index.repo_index g) index_ids)) in
Dapi.Repo.v
~version:D.version
~name:(Ddescr.Repo.name repo)
~publisher:(Ddescr.Repo.publisher repo)
~ui_locales:(Ddescr.Repo.ui_locales repo)
~indexes
?search_href:(Ddescr.Repo.search_href repo)
()
---------------------------------------------------------------------------
Copyright ( c ) 2015 .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright (c) 2015 Daniel C. Bünzli.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/remat/28d572e77bbd1ad46bbfde87c0ba8bd0ab99ed28/src-remat/api_gen_repo.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2015 . All rights reserved .
Distributed under the BSD3 license , see license at the end of the file .
% % NAME%% release % % ---------------------------------------------------------------------------
Copyright (c) 2015 Daniel C. Bünzli. All rights reserved.
Distributed under the BSD3 license, see license at the end of the file.
%%NAME%% release %%VERSION%%
---------------------------------------------------------------------------*)
open Bos
let repo_href = Path.Rel.file "repo.json"
let v g =
let repo = Api_gen.repo g in
let index_ids = Api_gen.repo_index_ids g in
let indexes = List.(rev (rev_map (Api_gen_index.repo_index g) index_ids)) in
Dapi.Repo.v
~version:D.version
~name:(Ddescr.Repo.name repo)
~publisher:(Ddescr.Repo.publisher repo)
~ui_locales:(Ddescr.Repo.ui_locales repo)
~indexes
?search_href:(Ddescr.Repo.search_href repo)
()
---------------------------------------------------------------------------
Copyright ( c ) 2015 .
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions
are met :
1 . Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
2 . Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
3 . Neither the name of nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
---------------------------------------------------------------------------
Copyright (c) 2015 Daniel C. Bünzli.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Daniel C. Bünzli nor the names of
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------*)
|
|
b12b8c3e51109f2388881a05f79eb6eaef861e70247e69b2b696c72bc995b834 | meh/clj-sockets | unix.clj | ;; Copyleft (ɔ) meh. -
;;
;; This file is part of clj-sockets - -sockets
;;
;; clj-sockets is free software: you can redistribute it and/or modify it under
the terms of the Lesser GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
;; any later version.
;;
;; clj-sockets is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
;; FITNESS FOR A PARTICULAR PURPOSE. See the Lesser GNU General Public License
;; for more details.
;;
You should have received a copy of the Lesser GNU General Public License
;; along with clj-sockets If not, see </>.
(ns sockets.unix
(:refer-clojure :exclude [send set get])
(:require [sockets
[native :as native]
[fd :as fd]
[address :as address]
[socket :as socket :refer :all :rename {Socket Socket*}]])
(:import [sockets.address UNIXAddress]
[com.sun.jna Memory]))
(defonce ^:private options
{})
(defn option? [name]
(contains? options name))
(deftype Socket [fd side mode]
Socket*
(fd [this]
fd)
(set [this option]
(if (fd/option? option)
(fd/set fd option)
(set this option true)))
(set [this option data]
(assert (option? option)))
(unset [this option]
(if (fd/option? option)
(fd/unset fd option)
(set this option false)))
(get [this option]
(if (fd/option? option)
(fd/get fd option)
(assert (option? option))))
Stateful
(recv [this size]
(let [ptr (Memory. size)]
(.getByteBuffer ptr 0 (native/recv fd ptr size 0))))
(send [this data]
(assert (satisfies? Sendable data))
(let [[data length] (sendable data)]
(native/send fd data length 0))))
(defn mode [socket]
(.mode socket))
(defn client? [socket]
(assert (instance? Socket socket))
(= (.side socket) :client))
(defn server? [socket]
(assert (instance? Socket socket))
(= (.side socket) :server))
(defn ^:private socket [side mode]
(Socket. (native/socket
(native/domain :unix)
(native/mode mode)
(native/protocol :ip))
side
mode))
(defn ^:private connect [socket addr]
(let [sockaddr (.native addr)]
(native/connect (fd socket) sockaddr (.size sockaddr))))
(defn client
([path-or-addr]
(client path-or-addr :stream))
([path-or-addr mode]
(if (instance? UNIXAddress path-or-addr)
(doto (socket :client mode) (connect path-or-addr))
(client (address/make path-or-addr) mode))))
(defn ^:private bind [this addr]
(let [sockaddr (.native addr)]
(native/bind (fd this) sockaddr (.size sockaddr))))
(defn ^:private listen
([this]
(listen this 4096))
([this backlog]
(native/listen (fd this) fd backlog)))
(defn accept [socket]
(assert (instance? Socket socket))
(Socket. (native/accept (fd socket) nil nil) :client (.mode socket)))
(defn server
([path-or-addr]
(server path-or-addr :stream))
([path-or-addr mode]
(if (instance? UNIXAddress path-or-addr)
(doto (socket :server mode) (bind path-or-addr) (listen))
(server (address/make path-or-addr) mode))))
| null | https://raw.githubusercontent.com/meh/clj-sockets/ad83aa4af10118fd77981a20e219f7539001c185/src/sockets/unix.clj | clojure | Copyleft (ɔ) meh. -
This file is part of clj-sockets - -sockets
clj-sockets is free software: you can redistribute it and/or modify it under
any later version.
clj-sockets is distributed in the hope that it will be useful, but WITHOUT
without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the Lesser GNU General Public License
for more details.
along with clj-sockets If not, see </>. | the terms of the Lesser GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
You should have received a copy of the Lesser GNU General Public License
(ns sockets.unix
(:refer-clojure :exclude [send set get])
(:require [sockets
[native :as native]
[fd :as fd]
[address :as address]
[socket :as socket :refer :all :rename {Socket Socket*}]])
(:import [sockets.address UNIXAddress]
[com.sun.jna Memory]))
(defonce ^:private options
{})
(defn option? [name]
(contains? options name))
(deftype Socket [fd side mode]
Socket*
(fd [this]
fd)
(set [this option]
(if (fd/option? option)
(fd/set fd option)
(set this option true)))
(set [this option data]
(assert (option? option)))
(unset [this option]
(if (fd/option? option)
(fd/unset fd option)
(set this option false)))
(get [this option]
(if (fd/option? option)
(fd/get fd option)
(assert (option? option))))
Stateful
(recv [this size]
(let [ptr (Memory. size)]
(.getByteBuffer ptr 0 (native/recv fd ptr size 0))))
(send [this data]
(assert (satisfies? Sendable data))
(let [[data length] (sendable data)]
(native/send fd data length 0))))
(defn mode [socket]
(.mode socket))
(defn client? [socket]
(assert (instance? Socket socket))
(= (.side socket) :client))
(defn server? [socket]
(assert (instance? Socket socket))
(= (.side socket) :server))
(defn ^:private socket [side mode]
(Socket. (native/socket
(native/domain :unix)
(native/mode mode)
(native/protocol :ip))
side
mode))
(defn ^:private connect [socket addr]
(let [sockaddr (.native addr)]
(native/connect (fd socket) sockaddr (.size sockaddr))))
(defn client
([path-or-addr]
(client path-or-addr :stream))
([path-or-addr mode]
(if (instance? UNIXAddress path-or-addr)
(doto (socket :client mode) (connect path-or-addr))
(client (address/make path-or-addr) mode))))
(defn ^:private bind [this addr]
(let [sockaddr (.native addr)]
(native/bind (fd this) sockaddr (.size sockaddr))))
(defn ^:private listen
([this]
(listen this 4096))
([this backlog]
(native/listen (fd this) fd backlog)))
(defn accept [socket]
(assert (instance? Socket socket))
(Socket. (native/accept (fd socket) nil nil) :client (.mode socket)))
(defn server
([path-or-addr]
(server path-or-addr :stream))
([path-or-addr mode]
(if (instance? UNIXAddress path-or-addr)
(doto (socket :server mode) (bind path-or-addr) (listen))
(server (address/make path-or-addr) mode))))
|
b261c8a8af6009e9e8698030b77a3e775d8150be110bb6b8dad1fbdd11eca4b3 | tiensonqin/lymchat | ring.clj | (ns api.middlewares.ring
"A standard set of commonly used ring middleware"
(:require [api.middlewares.auth :refer [wrap-jwt-auth wrap-authorization]]
[api.schema.human :refer [human-explain]]
[api.services.slack :refer [error]]
[api.util :refer [doc app-key-exists? get-platform-by-app-key prod-or-stage? stage? development? production?]]
[environ-plus.core :refer [env]]
[plumbing.core :refer :all]
[ring.middleware.json :as json]
[ring.middleware.params :as params]
[ring.middleware.reload :refer [wrap-reload]]
[ring.middleware.cors :as cors]
[schema.core :as s]
[taoensso.timbre :as t]
[manifold.deferred :as d]))
(defn wrap-full-url [f]
(fn [request]
(f (assoc-in request [:custom :url]
(str
(if (production?) "https://" "http://")
(get-in request [:headers "host"])
(:uri request))))))
(defn custom-wrap-cors [handler]
(if (production?)
handler
(let [access-control (cors/normalize-config [:access-control-allow-origin [#".*"]
:access-control-allow-methods [:get :put :post :delete :options :patch]
:access-control-allow-credentials "true"])]
(fn [request]
(if (and (cors/preflight? request) (cors/allow-request? request access-control))
(let [blank-response {:status 200
:headers {}
:body "preflight complete"}]
(cors/add-access-control request access-control blank-response))
(if (cors/origin request)
(if (cors/allow-request? request access-control)
(d/let-flow [response (handler request)]
(cors/add-access-control request access-control response)))
(handler request)))))))
(defn wrap-user-id-body-if-post-request [f]
(fn [request]
(if (and
(re-find #"^/v[\d]+" (:uri request))
(not (re-find #"^/v[\d]+/auth" (:uri request)))
(not (app-key-exists? (get-in request [:custom :user-id])))
(= :post (:request-method request)))
(let [user-id (get-in request [:custom :user-id])
req (if (seq? (:body request))
(assoc request :body
(map #(assoc % :user_id user-id) (:body request)))
(assoc-in request [:body :user_id] user-id))]
(f req))
(f request))))
(defn human-errors [m]
(zipmap
(keys m)
(map
#(if (map? %)
(human-errors %)
(if (instance? schema.utils.ValidationError %)
(human-explain %)
%))
(vals m))))
(defn wrap-exception [f]
(fn [request]
(try (f request)
(catch clojure.lang.ExceptionInfo e
(let [{:keys [type error schema data code]} (ex-data e)]
(prn {:request request
:error e})
(cond
(= :coercion-error type)
(do (error {:code :coercion-exception
:data data
:schema schema
:error error
:request request})
(cond
(and (instance? schema.utils.ValidationError e)
(nil? data))
{:status 400 :body {:message "Body empty."}}
:else
(try
(let [errors (-> (s/check schema data)
human-errors
(merge (select-keys (:validation doc) (keys error))))]
(when (development?)
(println "Validation error: ")
(clojure.pprint/pprint {:error error
:data data
:errors errors}))
{:status 400 :body {:message errors}})
(catch Exception e
(let [errors (-> (s/check schema data)
(merge (select-keys (:validation doc) (keys error))))]
{:status 400 :body {:message errors}})))))
(= :input-invalid type)
(let [error-text (get-in doc code)]
(if (and code error-text)
{:status 400 :body {:message error-text}}
{:status 500 :body "input invalid!"}))
:else (do
(error e {:code :validation-exception
:request request})
(throw e)))))
fix schema.utils . ValidationError could not serialize
(catch com.fasterxml.jackson.core.JsonGenerationException e
(error e {:code :schema.utils.ValidationError-serialize
:request request})
{:status 400
:body {:message "input illegal"}})
(catch Exception e
(prn {:exception e})
(t/error e)
{:status 500
:body {:message "Exception caught"}}))))
(defn keywordize-middleware [handler]
(fn [req]
(handler
(-> req
(update-in [:query-params] keywordize-map)
(update-in [:params] keywordize-map)))))
(defn debug-middleware [f]
(fn [request]
(prn "Debug: -------------" request)
(f request)))
(defn ring-middleware [handler]
(-> handler
wrap-reload
wrap-exception
json/wrap-json-response
wrap-user-id-body-if-post-request
(wrap-authorization (:db env))
(wrap-jwt-auth (:jwt-secret env))
wrap-full-url
(json/wrap-json-body {:keywords? true})
keywordize-middleware
(custom-wrap-cors)
params/wrap-params))
| null | https://raw.githubusercontent.com/tiensonqin/lymchat/824026607d30c12bc50afb06f677d1fa95ff1f2f/api/src/api/middlewares/ring.clj | clojure | (ns api.middlewares.ring
"A standard set of commonly used ring middleware"
(:require [api.middlewares.auth :refer [wrap-jwt-auth wrap-authorization]]
[api.schema.human :refer [human-explain]]
[api.services.slack :refer [error]]
[api.util :refer [doc app-key-exists? get-platform-by-app-key prod-or-stage? stage? development? production?]]
[environ-plus.core :refer [env]]
[plumbing.core :refer :all]
[ring.middleware.json :as json]
[ring.middleware.params :as params]
[ring.middleware.reload :refer [wrap-reload]]
[ring.middleware.cors :as cors]
[schema.core :as s]
[taoensso.timbre :as t]
[manifold.deferred :as d]))
(defn wrap-full-url [f]
(fn [request]
(f (assoc-in request [:custom :url]
(str
(if (production?) "https://" "http://")
(get-in request [:headers "host"])
(:uri request))))))
(defn custom-wrap-cors [handler]
(if (production?)
handler
(let [access-control (cors/normalize-config [:access-control-allow-origin [#".*"]
:access-control-allow-methods [:get :put :post :delete :options :patch]
:access-control-allow-credentials "true"])]
(fn [request]
(if (and (cors/preflight? request) (cors/allow-request? request access-control))
(let [blank-response {:status 200
:headers {}
:body "preflight complete"}]
(cors/add-access-control request access-control blank-response))
(if (cors/origin request)
(if (cors/allow-request? request access-control)
(d/let-flow [response (handler request)]
(cors/add-access-control request access-control response)))
(handler request)))))))
(defn wrap-user-id-body-if-post-request [f]
(fn [request]
(if (and
(re-find #"^/v[\d]+" (:uri request))
(not (re-find #"^/v[\d]+/auth" (:uri request)))
(not (app-key-exists? (get-in request [:custom :user-id])))
(= :post (:request-method request)))
(let [user-id (get-in request [:custom :user-id])
req (if (seq? (:body request))
(assoc request :body
(map #(assoc % :user_id user-id) (:body request)))
(assoc-in request [:body :user_id] user-id))]
(f req))
(f request))))
(defn human-errors [m]
(zipmap
(keys m)
(map
#(if (map? %)
(human-errors %)
(if (instance? schema.utils.ValidationError %)
(human-explain %)
%))
(vals m))))
(defn wrap-exception [f]
(fn [request]
(try (f request)
(catch clojure.lang.ExceptionInfo e
(let [{:keys [type error schema data code]} (ex-data e)]
(prn {:request request
:error e})
(cond
(= :coercion-error type)
(do (error {:code :coercion-exception
:data data
:schema schema
:error error
:request request})
(cond
(and (instance? schema.utils.ValidationError e)
(nil? data))
{:status 400 :body {:message "Body empty."}}
:else
(try
(let [errors (-> (s/check schema data)
human-errors
(merge (select-keys (:validation doc) (keys error))))]
(when (development?)
(println "Validation error: ")
(clojure.pprint/pprint {:error error
:data data
:errors errors}))
{:status 400 :body {:message errors}})
(catch Exception e
(let [errors (-> (s/check schema data)
(merge (select-keys (:validation doc) (keys error))))]
{:status 400 :body {:message errors}})))))
(= :input-invalid type)
(let [error-text (get-in doc code)]
(if (and code error-text)
{:status 400 :body {:message error-text}}
{:status 500 :body "input invalid!"}))
:else (do
(error e {:code :validation-exception
:request request})
(throw e)))))
fix schema.utils . ValidationError could not serialize
(catch com.fasterxml.jackson.core.JsonGenerationException e
(error e {:code :schema.utils.ValidationError-serialize
:request request})
{:status 400
:body {:message "input illegal"}})
(catch Exception e
(prn {:exception e})
(t/error e)
{:status 500
:body {:message "Exception caught"}}))))
(defn keywordize-middleware [handler]
(fn [req]
(handler
(-> req
(update-in [:query-params] keywordize-map)
(update-in [:params] keywordize-map)))))
(defn debug-middleware [f]
(fn [request]
(prn "Debug: -------------" request)
(f request)))
(defn ring-middleware [handler]
(-> handler
wrap-reload
wrap-exception
json/wrap-json-response
wrap-user-id-body-if-post-request
(wrap-authorization (:db env))
(wrap-jwt-auth (:jwt-secret env))
wrap-full-url
(json/wrap-json-body {:keywords? true})
keywordize-middleware
(custom-wrap-cors)
params/wrap-params))
|
|
816971adf3dcaa7ddd254af7f2f65efd660a5cbbd6a2dac9d63884664df5b299 | FdelMazo/cl-aristid | cl-aristid.lisp | (in-package #:cl-aristid)
(setq *random-state* (make-random-state t))
(defun -> (old new &optional prob)
#'(lambda (seq)
(if (<= (random 1.00) prob)
(substitute new old seq)
seq)))
(defmacro defrule (sym -> replace &key (prob 1.00))
`(-> ',sym ',replace ,prob))
(defun aristid (&key (angle 0) (len 0) (nodraw nil) (color ""))
#'(lambda (canvas)
(dotimes (n len)
(setq canvas (canvas-move canvas))
(if (null nodraw)
(draw-point canvas color)))
(setq canvas (turn-angle canvas angle))
canvas))
(defmacro defaristid (name &rest body)
`(defun ,name (canvas)
(funcall (aristid ,@body) canvas)))
(defun [ (canvas)
(setq canvas (push-stack canvas)))
(defun ] (canvas)
(setq canvas (pop-stack canvas)))
(defun draw (fractal gen &key (background ""))
(with-open-file (f (format nil "~A_~3,'0d.svg" (fractal-name fractal) gen)
:direction :output :if-exists :supersede)
(cl-svg:stream-out f (draw-fractal fractal gen background))))
| null | https://raw.githubusercontent.com/FdelMazo/cl-aristid/d26c642212548165a4ab4aee39644b231ff6e797/cl-aristid.lisp | lisp | (in-package #:cl-aristid)
(setq *random-state* (make-random-state t))
(defun -> (old new &optional prob)
#'(lambda (seq)
(if (<= (random 1.00) prob)
(substitute new old seq)
seq)))
(defmacro defrule (sym -> replace &key (prob 1.00))
`(-> ',sym ',replace ,prob))
(defun aristid (&key (angle 0) (len 0) (nodraw nil) (color ""))
#'(lambda (canvas)
(dotimes (n len)
(setq canvas (canvas-move canvas))
(if (null nodraw)
(draw-point canvas color)))
(setq canvas (turn-angle canvas angle))
canvas))
(defmacro defaristid (name &rest body)
`(defun ,name (canvas)
(funcall (aristid ,@body) canvas)))
(defun [ (canvas)
(setq canvas (push-stack canvas)))
(defun ] (canvas)
(setq canvas (pop-stack canvas)))
(defun draw (fractal gen &key (background ""))
(with-open-file (f (format nil "~A_~3,'0d.svg" (fractal-name fractal) gen)
:direction :output :if-exists :supersede)
(cl-svg:stream-out f (draw-fractal fractal gen background))))
|
|
a8fa5895e361476acc69fb2ae974d866e96f5107e1564fd905c6613106e08904 | shortishly/tansu | tansu_api.erl | Copyright ( c ) 2016 < >
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(tansu_api).
-export([info/0]).
-export([kv_delete/1]).
-export([kv_get/1]).
-export([kv_get_children_of/1]).
-export([kv_set/2]).
-export([kv_set/3]).
-export([kv_subscribe/1]).
-export([kv_test_and_delete/2]).
-export([kv_test_and_set/3]).
-export([kv_test_and_set/4]).
-export([kv_unsubscribe/1]).
-define(CATEGORY, user).
info() ->
tansu_consensus:info().
kv_delete(Key) ->
tansu_consensus:ckv_delete(?CATEGORY, Key).
kv_get(Key) ->
tansu_consensus:ckv_get(?CATEGORY, Key).
kv_get_children_of(Parent) ->
maps:fold(
fun
({?CATEGORY, Child}, {Data, Metadata}, A) ->
A#{Child => {Data, Metadata}};
(_, _, A) ->
A
end,
#{},
tansu_consensus:ckv_get_children_of(?CATEGORY, Parent)).
kv_set(Key, Value) ->
kv_set(Key, Value, #{}).
kv_set(Key, Value, Options) ->
tansu_consensus:ckv_set(?CATEGORY, Key, Value, Options).
kv_test_and_delete(Key, ExistingValue) ->
tansu_consensus:ckv_test_and_delete(?CATEGORY, Key, ExistingValue).
kv_test_and_set(Key, ExistingValue, NewValue) ->
kv_test_and_set(Key, ExistingValue, NewValue, #{}).
kv_test_and_set(Key, ExistingValue, NewValue, Options) ->
tansu_consensus:ckv_test_and_set(?CATEGORY, Key, ExistingValue, NewValue, Options).
kv_subscribe(Key) ->
tansu_sm:subscribe(?CATEGORY, Key).
kv_unsubscribe(Key) ->
tansu_sm:unsubscribe(?CATEGORY, Key).
| null | https://raw.githubusercontent.com/shortishly/tansu/154811fff81855419de9af380c81c7ae14e435d0/src/tansu_api.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2016 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(tansu_api).
-export([info/0]).
-export([kv_delete/1]).
-export([kv_get/1]).
-export([kv_get_children_of/1]).
-export([kv_set/2]).
-export([kv_set/3]).
-export([kv_subscribe/1]).
-export([kv_test_and_delete/2]).
-export([kv_test_and_set/3]).
-export([kv_test_and_set/4]).
-export([kv_unsubscribe/1]).
-define(CATEGORY, user).
info() ->
tansu_consensus:info().
kv_delete(Key) ->
tansu_consensus:ckv_delete(?CATEGORY, Key).
kv_get(Key) ->
tansu_consensus:ckv_get(?CATEGORY, Key).
kv_get_children_of(Parent) ->
maps:fold(
fun
({?CATEGORY, Child}, {Data, Metadata}, A) ->
A#{Child => {Data, Metadata}};
(_, _, A) ->
A
end,
#{},
tansu_consensus:ckv_get_children_of(?CATEGORY, Parent)).
kv_set(Key, Value) ->
kv_set(Key, Value, #{}).
kv_set(Key, Value, Options) ->
tansu_consensus:ckv_set(?CATEGORY, Key, Value, Options).
kv_test_and_delete(Key, ExistingValue) ->
tansu_consensus:ckv_test_and_delete(?CATEGORY, Key, ExistingValue).
kv_test_and_set(Key, ExistingValue, NewValue) ->
kv_test_and_set(Key, ExistingValue, NewValue, #{}).
kv_test_and_set(Key, ExistingValue, NewValue, Options) ->
tansu_consensus:ckv_test_and_set(?CATEGORY, Key, ExistingValue, NewValue, Options).
kv_subscribe(Key) ->
tansu_sm:subscribe(?CATEGORY, Key).
kv_unsubscribe(Key) ->
tansu_sm:unsubscribe(?CATEGORY, Key).
|
b9158da971c703eec2e5a200119a9d0925a453d6ead09b9d7247c18383f02d11 | OCamlPro/typerex-lldb | LLDBOCamlCode.ml | (**************************************************************************)
(* *)
(* OCamlPro TypeRex *)
(* *)
Copyright OCamlPro 2011 - 2016 . All rights reserved .
(* This file is distributed under the terms of the GPL v3.0 *)
( GNU Public Licence version 3.0 ) .
(* *)
(* Contact: <> (/) *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES *)
(* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND *)
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
(* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN *)
(* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE *)
(* SOFTWARE. *)
(**************************************************************************)
(* LLDB API *)
open LLDBEnums
open LLDBOCaml
(* ocp-lldb modules *)
open LLDBTypes
open LLDBGlobals
let get_code_info target =
let process = SBTarget.getProcess target in
let caml_code_fragments_table =
LLDBUtils.symbol_address target "caml_code_fragments_table" in
let size = LLDBUtils.getMem32 process caml_code_fragments_table in
let _capacity = LLDBUtils.getMem32 process
(Int64.add caml_code_fragments_table 4L) in
let contents = LLDBUtils.getMem64 process
(Int64.add caml_code_fragments_table 8L) in
Printf.printf "size = %d\n" size;
let code_fragments = Array.init size (fun i ->
let cf = LLDBUtils.getMem64 process
(Int64.add contents (Int64.of_int (i*8))) in
let code_start = LLDBUtils.getMem64 process cf in
let code_end = LLDBUtils.getMem64 process (Int64.add cf 8L) in
{ code_start; code_end }
) in
code_fragments
(*
(* Here, we only find statically linked modules. In fact, we could probably
use the code_fragments to find dynamically allocated modules. *)
let get_modules = LLDBUtils.get_cached (fun target ->
let modules = ref [] in
let syms = SBTarget.findSymbols target "caml_program"
ESymbolTypeCode in
let syms = SBSymbolContextList.to_array syms in
Array.iter (fun sc ->
let sym = SBSymbolContext.getSymbol sc in
let _name = SBSymbol.getName sym in
(* Printf.printf "name=%S\n%!" name; *)
let insts = SBSymbol.getInstructions sym target in
let insts = SBInstructionList.to_array insts in
Array.iteri (fun i ins ->
let mne = SBInstruction.getMnemonic ins target in
let ope = SBInstruction.getOperands ins target in
Printf.printf " i : % S % S\n% ! " ;
match mne with
| "callq" ->
let addr = LLDBUtils.int64_of_string ope in
let print_symbol sym =
let name = SBSymbol.getName sym in
(* Printf.printf "sym = %S\n%!" name; *)
let mod_name =
let len = String.length name in
if len > 10 &&
name.[0] = 'c' &&
name.[1] = 'a' &&
name.[2] = 'm' &&
name.[3] = 'l' then
if Filename.check_suffix name "__entry" then
String.sub name 4 (len - 11)
else
if Filename.check_suffix name "__code_begin" then
String.sub name 4 (len - 16)
else
name
else name
in
modules := {
mod_name;
mod_addr = addr;
mod_symbol = name;
} :: !modules
in
let symaddr = SBTarget.resolveLoadAddress target addr in
let sym = SBAddress.getSymbol symaddr in
if SBSymbol.isValid sym then
print_symbol sym
else
(* Find the executable module so we can do a lookup
inside it *)
let exe_file_spec = SBFileSpec.createByName
(SBFileSpec.getFilename (SBTarget.getExecutable target)) true in
let modul =
SBTarget.findModule target exe_file_spec in
(* // Take a file virtual address and resolve it to a
section offset // address that can be used to do a
symbol lookup by address *)
let addr = SBModule.resolveFileAddress modul addr in
let success = SBAddress.isValid addr &&
SBSection.isValid (SBAddress.getSection addr) in
if success then begin
(* // We can resolve a section offset address in the
module // and only ask for what we need. You can
logical or together // bits from the
SymbolContextItem enumeration found in //
lldb-enumeration.h to request only what you
want. Here we // are asking for everything. // //
NOTE: the less you ask for, the less LLDB will parse
as // LLDB does partial parsing on just about
everything. *)
let sc = SBModule.resolveSymbolContextForAddress
modul addr eSymbolContextEverything in
let sym = SBSymbolContext.getSymbol sc in
print_symbol sym;
end;
()
| _ -> ()
) insts
) syms;
let modules = List.rev !modules in
Array.of_list modules)
*)
Assumptions :
* OCaml modules are compiled from .ml files .
* OCaml modules are compiled from .ml files.
*)
let get_compilation_units = LLDBUtils.get_cached (fun target ->
let cus_by_name = ref StringMap.empty in
let exe_file_spec = SBFileSpec.createByName
(SBFileSpec.getFilename (SBTarget.getExecutable target)) true in
let m = SBTarget.findModule target exe_file_spec in
let n = SBModule.getNumCompileUnits m in
let cus = Array.init n (fun i ->
SBModule.getCompileUnitAtIndex m i
) in
Printf.printf "%d compilation units in this component\n%!" n;
let cus = Array.mapi (fun i cu ->
(* Printf.printf "%3d -> %s\n%!" i (SBCompileUnit.to_string cu); *)
let f = SBCompileUnit.getFileSpec cu in
let cu_basename = SBFileSpec.getFilename f in
let cu_dirname = SBFileSpec.getDirectory f in
if !verbose then begin
Printf.printf " %s ... %s\n" cu_dirname cu_basename;
end;
let nl = SBCompileUnit.getNumLineEntries cu in
let les = Array.init nl (fun j ->
SBCompileUnit.getLineEntryAtIndex cu j
) in
let symbols = ref StringMap.empty in
Array.iteri (fun j le ->
let fs = SBLineEntry.getFileSpec le in
let fsname = SBFileSpec.getFilename fs in
(* Check that this line belongs to this file, and not to some code
inlined from another file: *)
if cu_basename = fsname then
let line = SBLineEntry.getLine le in
let col = SBLineEntry.getColumn le in
let addr = SBLineEntry.getStartAddress le in
let sym = SBAddress.getSymbol addr in
match
try Some (SBSymbol.getName sym) with _ -> None
with
| None -> ()
| Some name ->
let f = SBAddress.getFunction addr in
let = try SBFunction.getName f with _ - > " " in
let name2 = try SBFunction.getName f with _ -> "" in *)
if !verbose then begin
Printf.printf "%s:%d:%d %Ld -> %s\n" fsname line col
(SBAddress.getLoadAddress addr target) name ;
end;
try
let _ref_line = StringMap.find name !symbols in
Just keep the first one !
if ! ref_line > line then ref_line : = line
if !ref_line > line then ref_line := line *)
()
with Not_found ->
symbols := StringMap.add name (ref line) !symbols
) les;
let cu_symbols = StringMap.map (fun r -> !r) !symbols in
let cu_descr = SBCompileUnit.to_string cu in
let cu_modname =
if Filename.check_suffix cu_basename ".ml" then begin
let modname = String.capitalize (
Filename.chop_extension cu_basename) in
Some modname
end else None in
let cu ={
cu_modname;
cu_descr;
cu_basename;
cu_dirname;
cu_symbols;
} in
begin
match cu_modname with
None -> ()
| Some cu_modname ->
cus_by_name := StringMap.add cu_modname cu !cus_by_name
end;
cu
) cus
in
{
ima_cus = cus;
ima_cus_by_name = !cus_by_name;
}
)
| null | https://raw.githubusercontent.com/OCamlPro/typerex-lldb/3be12b69f30127bbf8a5dd483a6bfbbd6045ba0e/tools/ocp-lldb/LLDBOCamlCode.ml | ocaml | ************************************************************************
OCamlPro TypeRex
This file is distributed under the terms of the GPL v3.0
Contact: <> (/)
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
************************************************************************
LLDB API
ocp-lldb modules
(* Here, we only find statically linked modules. In fact, we could probably
use the code_fragments to find dynamically allocated modules.
Printf.printf "name=%S\n%!" name;
Printf.printf "sym = %S\n%!" name;
Find the executable module so we can do a lookup
inside it
// Take a file virtual address and resolve it to a
section offset // address that can be used to do a
symbol lookup by address
// We can resolve a section offset address in the
module // and only ask for what we need. You can
logical or together // bits from the
SymbolContextItem enumeration found in //
lldb-enumeration.h to request only what you
want. Here we // are asking for everything. // //
NOTE: the less you ask for, the less LLDB will parse
as // LLDB does partial parsing on just about
everything.
Printf.printf "%3d -> %s\n%!" i (SBCompileUnit.to_string cu);
Check that this line belongs to this file, and not to some code
inlined from another file: | Copyright OCamlPro 2011 - 2016 . All rights reserved .
( GNU Public Licence version 3.0 ) .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
open LLDBEnums
open LLDBOCaml
open LLDBTypes
open LLDBGlobals
let get_code_info target =
let process = SBTarget.getProcess target in
let caml_code_fragments_table =
LLDBUtils.symbol_address target "caml_code_fragments_table" in
let size = LLDBUtils.getMem32 process caml_code_fragments_table in
let _capacity = LLDBUtils.getMem32 process
(Int64.add caml_code_fragments_table 4L) in
let contents = LLDBUtils.getMem64 process
(Int64.add caml_code_fragments_table 8L) in
Printf.printf "size = %d\n" size;
let code_fragments = Array.init size (fun i ->
let cf = LLDBUtils.getMem64 process
(Int64.add contents (Int64.of_int (i*8))) in
let code_start = LLDBUtils.getMem64 process cf in
let code_end = LLDBUtils.getMem64 process (Int64.add cf 8L) in
{ code_start; code_end }
) in
code_fragments
let get_modules = LLDBUtils.get_cached (fun target ->
let modules = ref [] in
let syms = SBTarget.findSymbols target "caml_program"
ESymbolTypeCode in
let syms = SBSymbolContextList.to_array syms in
Array.iter (fun sc ->
let sym = SBSymbolContext.getSymbol sc in
let _name = SBSymbol.getName sym in
let insts = SBSymbol.getInstructions sym target in
let insts = SBInstructionList.to_array insts in
Array.iteri (fun i ins ->
let mne = SBInstruction.getMnemonic ins target in
let ope = SBInstruction.getOperands ins target in
Printf.printf " i : % S % S\n% ! " ;
match mne with
| "callq" ->
let addr = LLDBUtils.int64_of_string ope in
let print_symbol sym =
let name = SBSymbol.getName sym in
let mod_name =
let len = String.length name in
if len > 10 &&
name.[0] = 'c' &&
name.[1] = 'a' &&
name.[2] = 'm' &&
name.[3] = 'l' then
if Filename.check_suffix name "__entry" then
String.sub name 4 (len - 11)
else
if Filename.check_suffix name "__code_begin" then
String.sub name 4 (len - 16)
else
name
else name
in
modules := {
mod_name;
mod_addr = addr;
mod_symbol = name;
} :: !modules
in
let symaddr = SBTarget.resolveLoadAddress target addr in
let sym = SBAddress.getSymbol symaddr in
if SBSymbol.isValid sym then
print_symbol sym
else
let exe_file_spec = SBFileSpec.createByName
(SBFileSpec.getFilename (SBTarget.getExecutable target)) true in
let modul =
SBTarget.findModule target exe_file_spec in
let addr = SBModule.resolveFileAddress modul addr in
let success = SBAddress.isValid addr &&
SBSection.isValid (SBAddress.getSection addr) in
if success then begin
let sc = SBModule.resolveSymbolContextForAddress
modul addr eSymbolContextEverything in
let sym = SBSymbolContext.getSymbol sc in
print_symbol sym;
end;
()
| _ -> ()
) insts
) syms;
let modules = List.rev !modules in
Array.of_list modules)
*)
Assumptions :
* OCaml modules are compiled from .ml files .
* OCaml modules are compiled from .ml files.
*)
let get_compilation_units = LLDBUtils.get_cached (fun target ->
let cus_by_name = ref StringMap.empty in
let exe_file_spec = SBFileSpec.createByName
(SBFileSpec.getFilename (SBTarget.getExecutable target)) true in
let m = SBTarget.findModule target exe_file_spec in
let n = SBModule.getNumCompileUnits m in
let cus = Array.init n (fun i ->
SBModule.getCompileUnitAtIndex m i
) in
Printf.printf "%d compilation units in this component\n%!" n;
let cus = Array.mapi (fun i cu ->
let f = SBCompileUnit.getFileSpec cu in
let cu_basename = SBFileSpec.getFilename f in
let cu_dirname = SBFileSpec.getDirectory f in
if !verbose then begin
Printf.printf " %s ... %s\n" cu_dirname cu_basename;
end;
let nl = SBCompileUnit.getNumLineEntries cu in
let les = Array.init nl (fun j ->
SBCompileUnit.getLineEntryAtIndex cu j
) in
let symbols = ref StringMap.empty in
Array.iteri (fun j le ->
let fs = SBLineEntry.getFileSpec le in
let fsname = SBFileSpec.getFilename fs in
if cu_basename = fsname then
let line = SBLineEntry.getLine le in
let col = SBLineEntry.getColumn le in
let addr = SBLineEntry.getStartAddress le in
let sym = SBAddress.getSymbol addr in
match
try Some (SBSymbol.getName sym) with _ -> None
with
| None -> ()
| Some name ->
let f = SBAddress.getFunction addr in
let = try SBFunction.getName f with _ - > " " in
let name2 = try SBFunction.getName f with _ -> "" in *)
if !verbose then begin
Printf.printf "%s:%d:%d %Ld -> %s\n" fsname line col
(SBAddress.getLoadAddress addr target) name ;
end;
try
let _ref_line = StringMap.find name !symbols in
Just keep the first one !
if ! ref_line > line then ref_line : = line
if !ref_line > line then ref_line := line *)
()
with Not_found ->
symbols := StringMap.add name (ref line) !symbols
) les;
let cu_symbols = StringMap.map (fun r -> !r) !symbols in
let cu_descr = SBCompileUnit.to_string cu in
let cu_modname =
if Filename.check_suffix cu_basename ".ml" then begin
let modname = String.capitalize (
Filename.chop_extension cu_basename) in
Some modname
end else None in
let cu ={
cu_modname;
cu_descr;
cu_basename;
cu_dirname;
cu_symbols;
} in
begin
match cu_modname with
None -> ()
| Some cu_modname ->
cus_by_name := StringMap.add cu_modname cu !cus_by_name
end;
cu
) cus
in
{
ima_cus = cus;
ima_cus_by_name = !cus_by_name;
}
)
|
ce4d8214e2ca5e97c8343468e414bd60e17f0461d4c3ac933104b417624646ae | hemmi/coq2scala | topconstr.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*i*)
open Pp
open Util
open Names
open Nameops
open Libnames
open Glob_term
open Term
open Mod_subst
(*i*)
(**********************************************************************)
This is the subtype of glob_constr allowed in syntactic extensions
For AList : first constr is iterator , second is terminator ;
first i d is where each argument of the list has to be substituted
in iterator and snd i d is alternative name just for printing ;
boolean is associativity
first id is where each argument of the list has to be substituted
in iterator and snd id is alternative name just for printing;
boolean is associativity *)
type aconstr =
(* Part common to glob_constr and cases_pattern *)
| ARef of global_reference
| AVar of identifier
| AApp of aconstr * aconstr list
| AList of identifier * identifier * aconstr * aconstr * bool
(* Part only in glob_constr *)
| ALambda of name * aconstr * aconstr
| AProd of name * aconstr * aconstr
| ABinderList of identifier * identifier * aconstr * aconstr
| ALetIn of name * aconstr * aconstr
| ACases of case_style * aconstr option *
(aconstr * (name * (inductive * int * name list) option)) list *
(cases_pattern list * aconstr) list
| ALetTuple of name list * (name * aconstr option) * aconstr * aconstr
| AIf of aconstr * (name * aconstr option) * aconstr * aconstr
| ARec of fix_kind * identifier array *
(name * aconstr option * aconstr) list array * aconstr array *
aconstr array
| ASort of glob_sort
| AHole of Evd.hole_kind
| APatVar of patvar
| ACast of aconstr * aconstr cast_type
type scope_name = string
type tmp_scope_name = scope_name
type subscopes = tmp_scope_name option * scope_name list
type notation_var_instance_type =
| NtnTypeConstr | NtnTypeConstrList | NtnTypeBinderList
type notation_var_internalization_type =
| NtnInternTypeConstr | NtnInternTypeBinder | NtnInternTypeIdent
type interpretation =
(identifier * (subscopes * notation_var_instance_type)) list * aconstr
(**********************************************************************)
(* Re-interpret a notation as a glob_constr, taking care of binders *)
let name_to_ident = function
| Anonymous -> error "This expression should be a simple identifier."
| Name id -> id
let to_id g e id = let e,na = g e (Name id) in e,name_to_ident na
let rec cases_pattern_fold_map loc g e = function
| PatVar (_,na) ->
let e',na' = g e na in e', PatVar (loc,na')
| PatCstr (_,cstr,patl,na) ->
let e',na' = g e na in
let e',patl' = list_fold_map (cases_pattern_fold_map loc g) e patl in
e', PatCstr (loc,cstr,patl',na')
let rec subst_glob_vars l = function
| GVar (_,id) as r -> (try List.assoc id l with Not_found -> r)
| GProd (loc,Name id,bk,t,c) ->
let id =
try match List.assoc id l with GVar(_,id') -> id' | _ -> id
with Not_found -> id in
GProd (loc,Name id,bk,subst_glob_vars l t,subst_glob_vars l c)
| GLambda (loc,Name id,bk,t,c) ->
let id =
try match List.assoc id l with GVar(_,id') -> id' | _ -> id
with Not_found -> id in
GLambda (loc,Name id,bk,subst_glob_vars l t,subst_glob_vars l c)
| r -> map_glob_constr (subst_glob_vars l) r (* assume: id is not binding *)
let ldots_var = id_of_string ".."
let glob_constr_of_aconstr_with_binders loc g f e = function
| AVar id -> GVar (loc,id)
| AApp (a,args) -> GApp (loc,f e a, List.map (f e) args)
| AList (x,y,iter,tail,swap) ->
let t = f e tail in let it = f e iter in
let innerl = (ldots_var,t)::(if swap then [] else [x,GVar(loc,y)]) in
let inner = GApp (loc,GVar (loc,ldots_var),[subst_glob_vars innerl it]) in
let outerl = (ldots_var,inner)::(if swap then [x,GVar(loc,y)] else []) in
subst_glob_vars outerl it
| ABinderList (x,y,iter,tail) ->
let t = f e tail in let it = f e iter in
let innerl = [(ldots_var,t);(x,GVar(loc,y))] in
let inner = GApp (loc,GVar (loc,ldots_var),[subst_glob_vars innerl it]) in
let outerl = [(ldots_var,inner)] in
subst_glob_vars outerl it
| ALambda (na,ty,c) ->
let e',na = g e na in GLambda (loc,na,Explicit,f e ty,f e' c)
| AProd (na,ty,c) ->
let e',na = g e na in GProd (loc,na,Explicit,f e ty,f e' c)
| ALetIn (na,b,c) ->
let e',na = g e na in GLetIn (loc,na,f e b,f e' c)
| ACases (sty,rtntypopt,tml,eqnl) ->
let e',tml' = List.fold_right (fun (tm,(na,t)) (e',tml') ->
let e',t' = match t with
| None -> e',None
| Some (ind,npar,nal) ->
let e',nal' = List.fold_right (fun na (e',nal) ->
let e',na' = g e' na in e',na'::nal) nal (e',[]) in
e',Some (loc,ind,npar,nal') in
let e',na' = g e' na in
(e',(f e tm,(na',t'))::tml')) tml (e,[]) in
let fold (idl,e) na = let (e,na) = g e na in ((name_cons na idl,e),na) in
let eqnl' = List.map (fun (patl,rhs) ->
let ((idl,e),patl) =
list_fold_map (cases_pattern_fold_map loc fold) ([],e) patl in
(loc,idl,patl,f e rhs)) eqnl in
GCases (loc,sty,Option.map (f e') rtntypopt,tml',eqnl')
| ALetTuple (nal,(na,po),b,c) ->
let e',nal = list_fold_map g e nal in
let e'',na = g e na in
GLetTuple (loc,nal,(na,Option.map (f e'') po),f e b,f e' c)
| AIf (c,(na,po),b1,b2) ->
let e',na = g e na in
GIf (loc,f e c,(na,Option.map (f e') po),f e b1,f e b2)
| ARec (fk,idl,dll,tl,bl) ->
let e,dll = array_fold_map (list_fold_map (fun e (na,oc,b) ->
let e,na = g e na in
(e,(na,Explicit,Option.map (f e) oc,f e b)))) e dll in
let e',idl = array_fold_map (to_id g) e idl in
GRec (loc,fk,idl,dll,Array.map (f e) tl,Array.map (f e') bl)
| ACast (c,k) -> GCast (loc,f e c,
match k with
| CastConv (k,t) -> CastConv (k,f e t)
| CastCoerce -> CastCoerce)
| ASort x -> GSort (loc,x)
| AHole x -> GHole (loc,x)
| APatVar n -> GPatVar (loc,(false,n))
| ARef x -> GRef (loc,x)
let rec glob_constr_of_aconstr loc x =
let rec aux () x =
glob_constr_of_aconstr_with_binders loc (fun () id -> ((),id)) aux () x
in aux () x
(****************************************************************************)
(* Translating a glob_constr into a notation, interpreting recursive patterns *)
let add_id r id = r := (id :: pi1 !r, pi2 !r, pi3 !r)
let add_name r = function Anonymous -> () | Name id -> add_id r id
let split_at_recursive_part c =
let sub = ref None in
let rec aux = function
| GApp (loc0,GVar(loc,v),c::l) when v = ldots_var ->
if !sub <> None then
Not narrowed enough to find only one recursive part
raise Not_found
else
(sub := Some c;
if l = [] then GVar (loc,ldots_var)
else GApp (loc0,GVar (loc,ldots_var),l))
| c -> map_glob_constr aux c in
let outer_iterator = aux c in
match !sub with
| None -> (* No recursive pattern found *) raise Not_found
| Some c ->
match outer_iterator with
| GVar (_,v) when v = ldots_var -> (* Not enough context *) raise Not_found
| _ -> outer_iterator, c
let on_true_do b f c = if b then (f c; b) else b
let compare_glob_constr f add t1 t2 = match t1,t2 with
| GRef (_,r1), GRef (_,r2) -> eq_gr r1 r2
| GVar (_,v1), GVar (_,v2) -> on_true_do (v1 = v2) add (Name v1)
| GApp (_,f1,l1), GApp (_,f2,l2) -> f f1 f2 & list_for_all2eq f l1 l2
| GLambda (_,na1,bk1,ty1,c1), GLambda (_,na2,bk2,ty2,c2) when na1 = na2 && bk1 = bk2 -> on_true_do (f ty1 ty2 & f c1 c2) add na1
| GProd (_,na1,bk1,ty1,c1), GProd (_,na2,bk2,ty2,c2) when na1 = na2 && bk1 = bk2 ->
on_true_do (f ty1 ty2 & f c1 c2) add na1
| GHole _, GHole _ -> true
| GSort (_,s1), GSort (_,s2) -> s1 = s2
| GLetIn (_,na1,b1,c1), GLetIn (_,na2,b2,c2) when na1 = na2 ->
on_true_do (f b1 b2 & f c1 c2) add na1
| (GCases _ | GRec _
| GPatVar _ | GEvar _ | GLetTuple _ | GIf _ | GCast _),_
| _,(GCases _ | GRec _
| GPatVar _ | GEvar _ | GLetTuple _ | GIf _ | GCast _)
-> error "Unsupported construction in recursive notations."
| (GRef _ | GVar _ | GApp _ | GLambda _ | GProd _
| GHole _ | GSort _ | GLetIn _), _
-> false
let rec eq_glob_constr t1 t2 = compare_glob_constr eq_glob_constr (fun _ -> ()) t1 t2
let subtract_loc loc1 loc2 = make_loc (fst (unloc loc1),fst (unloc loc2)-1)
let check_is_hole id = function GHole _ -> () | t ->
user_err_loc (loc_of_glob_constr t,"",
strbrk "In recursive notation with binders, " ++ pr_id id ++
strbrk " is expected to come without type.")
let compare_recursive_parts found f (iterator,subc) =
let diff = ref None in
let terminator = ref None in
let rec aux c1 c2 = match c1,c2 with
| GVar(_,v), term when v = ldots_var ->
(* We found the pattern *)
assert (!terminator = None); terminator := Some term;
true
| GApp (_,GVar(_,v),l1), GApp (_,term,l2) when v = ldots_var ->
(* We found the pattern, but there are extra arguments *)
(* (this allows e.g. alternative (recursive) notation of application) *)
assert (!terminator = None); terminator := Some term;
list_for_all2eq aux l1 l2
| GVar (_,x), GVar (_,y) when x<>y ->
(* We found the position where it differs *)
let lassoc = (!terminator <> None) in
let x,y = if lassoc then y,x else x,y in
!diff = None && (diff := Some (x,y,Some lassoc); true)
| GLambda (_,Name x,_,t_x,c), GLambda (_,Name y,_,t_y,term)
| GProd (_,Name x,_,t_x,c), GProd (_,Name y,_,t_y,term) ->
(* We found a binding position where it differs *)
check_is_hole x t_x;
check_is_hole y t_y;
!diff = None && (diff := Some (x,y,None); aux c term)
| _ ->
compare_glob_constr aux (add_name found) c1 c2 in
if aux iterator subc then
match !diff with
| None ->
let loc1 = loc_of_glob_constr iterator in
let loc2 = loc_of_glob_constr (Option.get !terminator) in
(* Here, we would need a loc made of several parts ... *)
user_err_loc (subtract_loc loc1 loc2,"",
str "Both ends of the recursive pattern are the same.")
| Some (x,y,Some lassoc) ->
let newfound = (pi1 !found, (x,y) :: pi2 !found, pi3 !found) in
let iterator =
f (if lassoc then subst_glob_vars [y,GVar(dummy_loc,x)] iterator
else iterator) in
(* found have been collected by compare_constr *)
found := newfound;
AList (x,y,iterator,f (Option.get !terminator),lassoc)
| Some (x,y,None) ->
let newfound = (pi1 !found, pi2 !found, (x,y) :: pi3 !found) in
let iterator = f iterator in
(* found have been collected by compare_constr *)
found := newfound;
ABinderList (x,y,iterator,f (Option.get !terminator))
else
raise Not_found
let aconstr_and_vars_of_glob_constr a =
let found = ref ([],[],[]) in
let rec aux c =
let keepfound = !found in
(* n^2 complexity but small and done only once per notation *)
try compare_recursive_parts found aux' (split_at_recursive_part c)
with Not_found ->
found := keepfound;
match c with
| GApp (_,GVar (loc,f),[c]) when f = ldots_var ->
Fall on the second part of the recursive pattern w/o having
found the first part
found the first part *)
user_err_loc (loc,"",
str "Cannot find where the recursive pattern starts.")
| c ->
aux' c
and aux' = function
| GVar (_,id) -> add_id found id; AVar id
| GApp (_,g,args) -> AApp (aux g, List.map aux args)
| GLambda (_,na,bk,ty,c) -> add_name found na; ALambda (na,aux ty,aux c)
| GProd (_,na,bk,ty,c) -> add_name found na; AProd (na,aux ty,aux c)
| GLetIn (_,na,b,c) -> add_name found na; ALetIn (na,aux b,aux c)
| GCases (_,sty,rtntypopt,tml,eqnl) ->
let f (_,idl,pat,rhs) = List.iter (add_id found) idl; (pat,aux rhs) in
ACases (sty,Option.map aux rtntypopt,
List.map (fun (tm,(na,x)) ->
add_name found na;
Option.iter
(fun (_,_,_,nl) -> List.iter (add_name found) nl) x;
(aux tm,(na,Option.map (fun (_,ind,n,nal) -> (ind,n,nal)) x))) tml,
List.map f eqnl)
| GLetTuple (loc,nal,(na,po),b,c) ->
add_name found na;
List.iter (add_name found) nal;
ALetTuple (nal,(na,Option.map aux po),aux b,aux c)
| GIf (loc,c,(na,po),b1,b2) ->
add_name found na;
AIf (aux c,(na,Option.map aux po),aux b1,aux b2)
| GRec (_,fk,idl,dll,tl,bl) ->
Array.iter (add_id found) idl;
let dll = Array.map (List.map (fun (na,bk,oc,b) ->
if bk <> Explicit then
error "Binders marked as implicit not allowed in notations.";
add_name found na; (na,Option.map aux oc,aux b))) dll in
ARec (fk,idl,dll,Array.map aux tl,Array.map aux bl)
| GCast (_,c,k) -> ACast (aux c,
match k with CastConv (k,t) -> CastConv (k,aux t)
| CastCoerce -> CastCoerce)
| GSort (_,s) -> ASort s
| GHole (_,w) -> AHole w
| GRef (_,r) -> ARef r
| GPatVar (_,(_,n)) -> APatVar n
| GEvar _ ->
error "Existential variables not allowed in notations."
in
let t = aux a in
(* Side effect *)
t, !found
let rec list_rev_mem_assoc x = function
| [] -> false
| (_,x')::l -> x = x' || list_rev_mem_assoc x l
let check_variables vars recvars (found,foundrec,foundrecbinding) =
let useless_vars = List.map snd recvars in
let vars = List.filter (fun (y,_) -> not (List.mem y useless_vars)) vars in
let check_recvar x =
if List.mem x found then
errorlabstrm "" (pr_id x ++
strbrk " should only be used in the recursive part of a pattern.") in
List.iter (fun (x,y) -> check_recvar x; check_recvar y)
(foundrec@foundrecbinding);
let check_bound x =
if not (List.mem x found) then
if List.mem_assoc x foundrec or List.mem_assoc x foundrecbinding
or list_rev_mem_assoc x foundrec or list_rev_mem_assoc x foundrecbinding
then
error ((string_of_id x)^" should not be bound in a recursive pattern of the right-hand side.")
else
error ((string_of_id x)^" is unbound in the right-hand side.") in
let check_pair s x y where =
if not (List.mem (x,y) where) then
errorlabstrm "" (strbrk "in the right-hand side, " ++ pr_id x ++
str " and " ++ pr_id y ++ strbrk " should appear in " ++ str s ++
str " position as part of a recursive pattern.") in
let check_type (x,typ) =
match typ with
| NtnInternTypeConstr ->
begin
try check_pair "term" x (List.assoc x recvars) foundrec
with Not_found -> check_bound x
end
| NtnInternTypeBinder ->
begin
try check_pair "binding" x (List.assoc x recvars) foundrecbinding
with Not_found -> check_bound x
end
| NtnInternTypeIdent -> check_bound x in
List.iter check_type vars
let aconstr_of_glob_constr vars recvars a =
let a,found = aconstr_and_vars_of_glob_constr a in
check_variables vars recvars found;
a
(* Substitution of kernel names, avoiding a list of bound identifiers *)
let aconstr_of_constr avoiding t =
aconstr_of_glob_constr [] [] (Detyping.detype false avoiding [] t)
let rec subst_pat subst pat =
match pat with
| PatVar _ -> pat
| PatCstr (loc,((kn,i),j),cpl,n) ->
let kn' = subst_ind subst kn
and cpl' = list_smartmap (subst_pat subst) cpl in
if kn' == kn && cpl' == cpl then pat else
PatCstr (loc,((kn',i),j),cpl',n)
let rec subst_aconstr subst bound raw =
match raw with
| ARef ref ->
let ref',t = subst_global subst ref in
if ref' == ref then raw else
aconstr_of_constr bound t
| AVar _ -> raw
| AApp (r,rl) ->
let r' = subst_aconstr subst bound r
and rl' = list_smartmap (subst_aconstr subst bound) rl in
if r' == r && rl' == rl then raw else
AApp(r',rl')
| AList (id1,id2,r1,r2,b) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
AList (id1,id2,r1',r2',b)
| ALambda (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ALambda (n,r1',r2')
| AProd (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
AProd (n,r1',r2')
| ABinderList (id1,id2,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ABinderList (id1,id2,r1',r2')
| ALetIn (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ALetIn (n,r1',r2')
| ACases (sty,rtntypopt,rl,branches) ->
let rtntypopt' = Option.smartmap (subst_aconstr subst bound) rtntypopt
and rl' = list_smartmap
(fun (a,(n,signopt) as x) ->
let a' = subst_aconstr subst bound a in
let signopt' = Option.map (fun ((indkn,i),n,nal as z) ->
let indkn' = subst_ind subst indkn in
if indkn == indkn' then z else ((indkn',i),n,nal)) signopt in
if a' == a && signopt' == signopt then x else (a',(n,signopt')))
rl
and branches' = list_smartmap
(fun (cpl,r as branch) ->
let cpl' = list_smartmap (subst_pat subst) cpl
and r' = subst_aconstr subst bound r in
if cpl' == cpl && r' == r then branch else
(cpl',r'))
branches
in
if rtntypopt' == rtntypopt && rtntypopt == rtntypopt' &
rl' == rl && branches' == branches then raw else
ACases (sty,rtntypopt',rl',branches')
| ALetTuple (nal,(na,po),b,c) ->
let po' = Option.smartmap (subst_aconstr subst bound) po
and b' = subst_aconstr subst bound b
and c' = subst_aconstr subst bound c in
if po' == po && b' == b && c' == c then raw else
ALetTuple (nal,(na,po'),b',c')
| AIf (c,(na,po),b1,b2) ->
let po' = Option.smartmap (subst_aconstr subst bound) po
and b1' = subst_aconstr subst bound b1
and b2' = subst_aconstr subst bound b2
and c' = subst_aconstr subst bound c in
if po' == po && b1' == b1 && b2' == b2 && c' == c then raw else
AIf (c',(na,po'),b1',b2')
| ARec (fk,idl,dll,tl,bl) ->
let dll' =
array_smartmap (list_smartmap (fun (na,oc,b as x) ->
let oc' = Option.smartmap (subst_aconstr subst bound) oc in
let b' = subst_aconstr subst bound b in
if oc' == oc && b' == b then x else (na,oc',b'))) dll in
let tl' = array_smartmap (subst_aconstr subst bound) tl in
let bl' = array_smartmap (subst_aconstr subst bound) bl in
if dll' == dll && tl' == tl && bl' == bl then raw else
ARec (fk,idl,dll',tl',bl')
| APatVar _ | ASort _ -> raw
| AHole (Evd.ImplicitArg (ref,i,b)) ->
let ref',t = subst_global subst ref in
if ref' == ref then raw else
AHole (Evd.InternalHole)
| AHole (Evd.BinderType _ | Evd.QuestionMark _ | Evd.CasesType
| Evd.InternalHole | Evd.TomatchTypeParameter _ | Evd.GoalEvar
| Evd.ImpossibleCase | Evd.MatchingVar _) -> raw
| ACast (r1,k) ->
match k with
CastConv (k, r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ACast (r1',CastConv (k,r2'))
| CastCoerce ->
let r1' = subst_aconstr subst bound r1 in
if r1' == r1 then raw else
ACast (r1',CastCoerce)
let subst_interpretation subst (metas,pat) =
let bound = List.map fst metas in
(metas,subst_aconstr subst bound pat)
(* Pattern-matching glob_constr and aconstr *)
let abstract_return_type_context pi mklam tml rtno =
Option.map (fun rtn ->
let nal =
List.flatten (List.map (fun (_,(na,t)) ->
match t with Some x -> (pi x)@[na] | None -> [na]) tml) in
List.fold_right mklam nal rtn)
rtno
let abstract_return_type_context_glob_constr =
abstract_return_type_context (fun (_,_,_,nal) -> nal)
(fun na c -> GLambda(dummy_loc,na,Explicit,GHole(dummy_loc,Evd.InternalHole),c))
let abstract_return_type_context_aconstr =
abstract_return_type_context pi3
(fun na c -> ALambda(na,AHole Evd.InternalHole,c))
exception No_match
let rec alpha_var id1 id2 = function
| (i1,i2)::_ when i1=id1 -> i2 = id2
| (i1,i2)::_ when i2=id2 -> i1 = id1
| _::idl -> alpha_var id1 id2 idl
| [] -> id1 = id2
let alpha_eq_val (x,y) = x = y
let bind_env alp (sigma,sigmalist,sigmabinders as fullsigma) var v =
try
let vvar = List.assoc var sigma in
if alpha_eq_val (v,vvar) then fullsigma
else raise No_match
with Not_found ->
(* Check that no capture of binding variables occur *)
if List.exists (fun (id,_) ->occur_glob_constr id v) alp then raise No_match;
(* TODO: handle the case of multiple occs in different scopes *)
((var,v)::sigma,sigmalist,sigmabinders)
let bind_binder (sigma,sigmalist,sigmabinders) x bl =
(sigma,sigmalist,(x,List.rev bl)::sigmabinders)
let match_fix_kind fk1 fk2 =
match (fk1,fk2) with
| GCoFix n1, GCoFix n2 -> n1 = n2
| GFix (nl1,n1), GFix (nl2,n2) ->
n1 = n2 &&
array_for_all2 (fun (n1,_) (n2,_) -> n2 = None || n1 = n2) nl1 nl2
| _ -> false
let match_opt f sigma t1 t2 = match (t1,t2) with
| None, None -> sigma
| Some t1, Some t2 -> f sigma t1 t2
| _ -> raise No_match
let match_names metas (alp,sigma) na1 na2 = match (na1,na2) with
| (_,Name id2) when List.mem id2 (fst metas) ->
let rhs = match na1 with
| Name id1 -> GVar (dummy_loc,id1)
| Anonymous -> GHole (dummy_loc,Evd.InternalHole) in
alp, bind_env alp sigma id2 rhs
| (Name id1,Name id2) -> (id1,id2)::alp,sigma
| (Anonymous,Anonymous) -> alp,sigma
| _ -> raise No_match
let rec match_cases_pattern_binders metas acc pat1 pat2 =
match (pat1,pat2) with
| PatVar (_,na1), PatVar (_,na2) -> match_names metas acc na1 na2
| PatCstr (_,c1,patl1,na1), PatCstr (_,c2,patl2,na2)
when c1 = c2 & List.length patl1 = List.length patl2 ->
List.fold_left2 (match_cases_pattern_binders metas)
(match_names metas acc na1 na2) patl1 patl2
| _ -> raise No_match
let glue_letin_with_decls = true
let rec match_iterated_binders islambda decls = function
| GLambda (_,na,bk,t,b) when islambda ->
match_iterated_binders islambda ((na,bk,None,t)::decls) b
| GProd (_,(Name _ as na),bk,t,b) when not islambda ->
match_iterated_binders islambda ((na,bk,None,t)::decls) b
| GLetIn (loc,na,c,b) when glue_letin_with_decls ->
match_iterated_binders islambda
((na,Explicit (*?*), Some c,GHole(loc,Evd.BinderType na))::decls) b
| b -> (decls,b)
let remove_sigma x (sigmavar,sigmalist,sigmabinders) =
(List.remove_assoc x sigmavar,sigmalist,sigmabinders)
let rec match_abinderlist_with_app match_fun metas sigma rest x iter termin =
let rec aux sigma acc rest =
try
let sigma = match_fun (ldots_var::fst metas,snd metas) sigma rest iter in
let rest = List.assoc ldots_var (pi1 sigma) in
let b = match List.assoc x (pi3 sigma) with [b] -> b | _ ->assert false in
let sigma = remove_sigma x (remove_sigma ldots_var sigma) in
aux sigma (b::acc) rest
with No_match when acc <> [] ->
acc, match_fun metas sigma rest termin in
let bl,sigma = aux sigma [] rest in
bind_binder sigma x bl
let match_alist match_fun metas sigma rest x iter termin lassoc =
let rec aux sigma acc rest =
try
let sigma = match_fun (ldots_var::fst metas,snd metas) sigma rest iter in
let rest = List.assoc ldots_var (pi1 sigma) in
let t = List.assoc x (pi1 sigma) in
let sigma = remove_sigma x (remove_sigma ldots_var sigma) in
aux sigma (t::acc) rest
with No_match when acc <> [] ->
acc, match_fun metas sigma rest termin in
let l,sigma = aux sigma [] rest in
(pi1 sigma, (x,if lassoc then l else List.rev l)::pi2 sigma, pi3 sigma)
let does_not_come_from_already_eta_expanded_var =
(* This is hack to avoid looping on a rule with rhs of the form *)
(* "?f (fun ?x => ?g)" since otherwise, matching "F H" expands in *)
(* "F (fun x => H x)" and "H x" is recursively matched against the same *)
(* rule, giving "H (fun x' => x x')" and so on. *)
(* Ideally, we would need the type of the expression to know which of *)
(* the arguments applied to it can be eta-expanded without looping. *)
(* The following test is then an approximation of what can be done *)
(* optimally (whether other looping situations can occur remains to be *)
(* checked). *)
function GVar _ -> false | _ -> true
let rec match_ inner u alp (tmetas,blmetas as metas) sigma a1 a2 =
match (a1,a2) with
(* Matching notation variable *)
| r1, AVar id2 when List.mem id2 tmetas -> bind_env alp sigma id2 r1
(* Matching recursive notations for terms *)
| r1, AList (x,_,iter,termin,lassoc) ->
match_alist (match_hd u alp) metas sigma r1 x iter termin lassoc
(* Matching recursive notations for binders: ad hoc cases supporting let-in *)
| GLambda (_,na1,bk,t1,b1), ABinderList (x,_,ALambda (Name id2,_,b2),termin)->
let (decls,b) = match_iterated_binders true [(na1,bk,None,t1)] b1 in
(* TODO: address the possibility that termin is a Lambda itself *)
match_in u alp metas (bind_binder sigma x decls) b termin
| GProd (_,na1,bk,t1,b1), ABinderList (x,_,AProd (Name id2,_,b2),termin)
when na1 <> Anonymous ->
let (decls,b) = match_iterated_binders false [(na1,bk,None,t1)] b1 in
(* TODO: address the possibility that termin is a Prod itself *)
match_in u alp metas (bind_binder sigma x decls) b termin
(* Matching recursive notations for binders: general case *)
| r, ABinderList (x,_,iter,termin) ->
match_abinderlist_with_app (match_hd u alp) metas sigma r x iter termin
(* Matching individual binders as part of a recursive pattern *)
| GLambda (_,na,bk,t,b1), ALambda (Name id,_,b2) when List.mem id blmetas ->
match_in u alp metas (bind_binder sigma id [(na,bk,None,t)]) b1 b2
| GProd (_,na,bk,t,b1), AProd (Name id,_,b2)
when List.mem id blmetas & na <> Anonymous ->
match_in u alp metas (bind_binder sigma id [(na,bk,None,t)]) b1 b2
(* Matching compositionally *)
| GVar (_,id1), AVar id2 when alpha_var id1 id2 alp -> sigma
| GRef (_,r1), ARef r2 when (eq_gr r1 r2) -> sigma
| GPatVar (_,(_,n1)), APatVar n2 when n1=n2 -> sigma
| GApp (loc,f1,l1), AApp (f2,l2) ->
let n1 = List.length l1 and n2 = List.length l2 in
let f1,l1,f2,l2 =
if n1 < n2 then
let l21,l22 = list_chop (n2-n1) l2 in f1,l1, AApp (f2,l21), l22
else if n1 > n2 then
let l11,l12 = list_chop (n1-n2) l1 in GApp (loc,f1,l11),l12, f2,l2
else f1,l1, f2, l2 in
let may_use_eta = does_not_come_from_already_eta_expanded_var f1 in
List.fold_left2 (match_ may_use_eta u alp metas)
(match_in u alp metas sigma f1 f2) l1 l2
| GLambda (_,na1,_,t1,b1), ALambda (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GProd (_,na1,_,t1,b1), AProd (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GLetIn (_,na1,t1,b1), ALetIn (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GCases (_,sty1,rtno1,tml1,eqnl1), ACases (sty2,rtno2,tml2,eqnl2)
when sty1 = sty2
& List.length tml1 = List.length tml2
& List.length eqnl1 = List.length eqnl2 ->
let rtno1' = abstract_return_type_context_glob_constr tml1 rtno1 in
let rtno2' = abstract_return_type_context_aconstr tml2 rtno2 in
let sigma =
try Option.fold_left2 (match_in u alp metas) sigma rtno1' rtno2'
with Option.Heterogeneous -> raise No_match
in
let sigma = List.fold_left2
(fun s (tm1,_) (tm2,_) ->
match_in u alp metas s tm1 tm2) sigma tml1 tml2 in
List.fold_left2 (match_equations u alp metas) sigma eqnl1 eqnl2
| GLetTuple (_,nal1,(na1,to1),b1,c1), ALetTuple (nal2,(na2,to2),b2,c2)
when List.length nal1 = List.length nal2 ->
let sigma = match_opt (match_binders u alp metas na1 na2) sigma to1 to2 in
let sigma = match_in u alp metas sigma b1 b2 in
let (alp,sigma) =
List.fold_left2 (match_names metas) (alp,sigma) nal1 nal2 in
match_in u alp metas sigma c1 c2
| GIf (_,a1,(na1,to1),b1,c1), AIf (a2,(na2,to2),b2,c2) ->
let sigma = match_opt (match_binders u alp metas na1 na2) sigma to1 to2 in
List.fold_left2 (match_in u alp metas) sigma [a1;b1;c1] [a2;b2;c2]
| GRec (_,fk1,idl1,dll1,tl1,bl1), ARec (fk2,idl2,dll2,tl2,bl2)
when match_fix_kind fk1 fk2 & Array.length idl1 = Array.length idl2 &
array_for_all2 (fun l1 l2 -> List.length l1 = List.length l2) dll1 dll2
->
let alp,sigma = array_fold_left2
(List.fold_left2 (fun (alp,sigma) (na1,_,oc1,b1) (na2,oc2,b2) ->
let sigma =
match_in u alp metas
(match_opt (match_in u alp metas) sigma oc1 oc2) b1 b2
in match_names metas (alp,sigma) na1 na2)) (alp,sigma) dll1 dll2 in
let sigma = array_fold_left2 (match_in u alp metas) sigma tl1 tl2 in
let alp,sigma = array_fold_right2 (fun id1 id2 alsig ->
match_names metas alsig (Name id1) (Name id2)) idl1 idl2 (alp,sigma) in
array_fold_left2 (match_in u alp metas) sigma bl1 bl2
| GCast(_,c1, CastConv(_,t1)), ACast(c2, CastConv (_,t2)) ->
match_in u alp metas (match_in u alp metas sigma c1 c2) t1 t2
| GCast(_,c1, CastCoerce), ACast(c2, CastCoerce) ->
match_in u alp metas sigma c1 c2
| GSort (_,GType _), ASort (GType None) when not u -> sigma
| GSort (_,s1), ASort s2 when s1 = s2 -> sigma
Do n't hide Metas , they bind in ltac
| a, AHole _ -> sigma
On the fly eta - expansion so as to use notations of the form
" exists x , P x " for " ex P " ; expects type not given because do n't know
otherwise how to ensure it corresponds to a well - typed eta - expansion ;
ensure at least one constructor is consumed to avoid looping
"exists x, P x" for "ex P"; expects type not given because don't know
otherwise how to ensure it corresponds to a well-typed eta-expansion;
ensure at least one constructor is consumed to avoid looping *)
| b1, ALambda (Name id,AHole _,b2) when inner ->
let id' = Namegen.next_ident_away id (free_glob_vars b1) in
match_in u alp metas (bind_binder sigma id
[(Name id',Explicit,None,GHole(dummy_loc,Evd.BinderType (Name id')))])
(mkGApp dummy_loc b1 (GVar (dummy_loc,id'))) b2
| (GRec _ | GEvar _), _
| _,_ -> raise No_match
and match_in u = match_ true u
and match_hd u = match_ false u
and match_binders u alp metas na1 na2 sigma b1 b2 =
let (alp,sigma) = match_names metas (alp,sigma) na1 na2 in
match_in u alp metas sigma b1 b2
and match_equations u alp metas sigma (_,_,patl1,rhs1) (patl2,rhs2) =
patl1 and patl2 have the same length because they respectively
correspond to some tml1 and tml2 that have the same length
correspond to some tml1 and tml2 that have the same length *)
let (alp,sigma) =
List.fold_left2 (match_cases_pattern_binders metas)
(alp,sigma) patl1 patl2 in
match_in u alp metas sigma rhs1 rhs2
let match_aconstr u c (metas,pat) =
let vars = list_split_by (fun (_,(_,x)) -> x <> NtnTypeBinderList) metas in
let vars = (List.map fst (fst vars), List.map fst (snd vars)) in
let terms,termlists,binders = match_ false u [] vars ([],[],[]) c pat in
(* Reorder canonically the substitution *)
let find x =
try List.assoc x terms
with Not_found ->
(* Happens for binders bound to Anonymous *)
(* Find a better way to propagate Anonymous... *)
GVar (dummy_loc,x) in
List.fold_right (fun (x,(scl,typ)) (terms',termlists',binders') ->
match typ with
| NtnTypeConstr ->
((find x, scl)::terms',termlists',binders')
| NtnTypeConstrList ->
(terms',(List.assoc x termlists,scl)::termlists',binders')
| NtnTypeBinderList ->
(terms',termlists',(List.assoc x binders,scl)::binders'))
metas ([],[],[])
(* Matching cases pattern *)
let bind_env_cases_pattern (sigma,sigmalist,x as fullsigma) var v =
try
let vvar = List.assoc var sigma in
if v=vvar then fullsigma else raise No_match
with Not_found ->
(* TODO: handle the case of multiple occs in different scopes *)
(var,v)::sigma,sigmalist,x
let rec match_cases_pattern metas sigma a1 a2 = match (a1,a2) with
| r1, AVar id2 when List.mem id2 metas -> bind_env_cases_pattern sigma id2 r1
| PatVar (_,Anonymous), AHole _ -> sigma
| PatCstr (loc,(ind,_ as r1),[],_), ARef (ConstructRef r2) when r1 = r2 ->
sigma
| PatCstr (loc,(ind,_ as r1),args1,_), AApp (ARef (ConstructRef r2),l2)
when r1 = r2 ->
let nparams = Inductive.inductive_params (Global.lookup_inductive ind) in
if List.length l2 <> nparams + List.length args1
then
TODO : revert partially applied notations of the form
" Notation P : = ( ) . "
"Notation P := (@pair)." *)
raise No_match
else
let (p2,args2) = list_chop nparams l2 in
(* All parameters must be _ *)
List.iter (function AHole _ -> () | _ -> raise No_match) p2;
List.fold_left2 (match_cases_pattern metas) sigma args1 args2
| r1, AList (x,_,iter,termin,lassoc) ->
match_alist (fun (metas,_) -> match_cases_pattern metas)
(metas,[]) (pi1 sigma,pi2 sigma,()) r1 x iter termin lassoc
| _ -> raise No_match
let match_aconstr_cases_pattern c (metas,pat) =
let vars = List.map fst metas in
let terms,termlists,() = match_cases_pattern vars ([],[],()) c pat in
(* Reorder canonically the substitution *)
List.fold_right (fun (x,(scl,typ)) (terms',termlists') ->
match typ with
| NtnTypeConstr -> ((List.assoc x terms, scl)::terms',termlists')
| NtnTypeConstrList -> (terms',(List.assoc x termlists,scl)::termlists')
| NtnTypeBinderList -> assert false)
metas ([],[])
(**********************************************************************)
(*s Concrete syntax for terms *)
type notation = string
type explicitation = ExplByPos of int * identifier option | ExplByName of identifier
type binder_kind = Default of binding_kind | Generalized of binding_kind * binding_kind * bool
type abstraction_kind = AbsLambda | AbsPi
type proj_flag = int option (* [Some n] = proj of the n-th visible argument *)
type prim_token = Numeral of Bigint.bigint | String of string
type cases_pattern_expr =
| CPatAlias of loc * cases_pattern_expr * identifier
| CPatCstr of loc * reference * cases_pattern_expr list
| CPatCstrExpl of loc * reference * cases_pattern_expr list
| CPatAtom of loc * reference option
| CPatOr of loc * cases_pattern_expr list
| CPatNotation of loc * notation * cases_pattern_notation_substitution
| CPatPrim of loc * prim_token
| CPatRecord of Util.loc * (reference * cases_pattern_expr) list
| CPatDelimiters of loc * string * cases_pattern_expr
and cases_pattern_notation_substitution =
* for
cases_pattern_expr list list (** for recursive notations *)
type constr_expr =
| CRef of reference
| CFix of loc * identifier located * fix_expr list
| CCoFix of loc * identifier located * cofix_expr list
| CArrow of loc * constr_expr * constr_expr
| CProdN of loc * (name located list * binder_kind * constr_expr) list * constr_expr
| CLambdaN of loc * (name located list * binder_kind * constr_expr) list * constr_expr
| CLetIn of loc * name located * constr_expr * constr_expr
| CAppExpl of loc * (proj_flag * reference) * constr_expr list
| CApp of loc * (proj_flag * constr_expr) *
(constr_expr * explicitation located option) list
| CRecord of loc * constr_expr option * (reference * constr_expr) list
| CCases of loc * case_style * constr_expr option *
(constr_expr * (name located option * constr_expr option)) list *
(loc * cases_pattern_expr list located list * constr_expr) list
| CLetTuple of loc * name located list * (name located option * constr_expr option) *
constr_expr * constr_expr
| CIf of loc * constr_expr * (name located option * constr_expr option)
* constr_expr * constr_expr
| CHole of loc * Evd.hole_kind option
| CPatVar of loc * (bool * patvar)
| CEvar of loc * existential_key * constr_expr list option
| CSort of loc * glob_sort
| CCast of loc * constr_expr * constr_expr cast_type
| CNotation of loc * notation * constr_notation_substitution
| CGeneralization of loc * binding_kind * abstraction_kind option * constr_expr
| CPrim of loc * prim_token
| CDelimiters of loc * string * constr_expr
and fix_expr =
identifier located * (identifier located option * recursion_order_expr) * local_binder list * constr_expr * constr_expr
and cofix_expr =
identifier located * local_binder list * constr_expr * constr_expr
and recursion_order_expr =
| CStructRec
| CWfRec of constr_expr
| CMeasureRec of constr_expr * constr_expr option (* measure, relation *)
and local_binder =
| LocalRawDef of name located * constr_expr
| LocalRawAssum of name located list * binder_kind * constr_expr
and constr_notation_substitution =
for
constr_expr list list * (* for recursive notations *)
local_binder list list (* for binders subexpressions *)
type typeclass_constraint = name located * binding_kind * constr_expr
and typeclass_context = typeclass_constraint list
type constr_pattern_expr = constr_expr
(***********************)
(* For binders parsing *)
let default_binder_kind = Default Explicit
let names_of_local_assums bl =
List.flatten (List.map (function LocalRawAssum(l,_,_)->l|_->[]) bl)
let names_of_local_binders bl =
List.flatten (List.map (function LocalRawAssum(l,_,_)->l|LocalRawDef(l,_)->[l]) bl)
(**********************************************************************)
(* Miscellaneous *)
let error_invalid_pattern_notation loc =
user_err_loc (loc,"",str "Invalid notation for pattern.")
(**********************************************************************)
(* Functions on constr_expr *)
let constr_loc = function
| CRef (Ident (loc,_)) -> loc
| CRef (Qualid (loc,_)) -> loc
| CFix (loc,_,_) -> loc
| CCoFix (loc,_,_) -> loc
| CArrow (loc,_,_) -> loc
| CProdN (loc,_,_) -> loc
| CLambdaN (loc,_,_) -> loc
| CLetIn (loc,_,_,_) -> loc
| CAppExpl (loc,_,_) -> loc
| CApp (loc,_,_) -> loc
| CRecord (loc,_,_) -> loc
| CCases (loc,_,_,_,_) -> loc
| CLetTuple (loc,_,_,_,_) -> loc
| CIf (loc,_,_,_,_) -> loc
| CHole (loc, _) -> loc
| CPatVar (loc,_) -> loc
| CEvar (loc,_,_) -> loc
| CSort (loc,_) -> loc
| CCast (loc,_,_) -> loc
| CNotation (loc,_,_) -> loc
| CGeneralization (loc,_,_,_) -> loc
| CPrim (loc,_) -> loc
| CDelimiters (loc,_,_) -> loc
let cases_pattern_expr_loc = function
| CPatAlias (loc,_,_) -> loc
| CPatCstr (loc,_,_) -> loc
| CPatCstrExpl (loc,_,_) -> loc
| CPatAtom (loc,_) -> loc
| CPatOr (loc,_) -> loc
| CPatNotation (loc,_,_) -> loc
| CPatRecord (loc, _) -> loc
| CPatPrim (loc,_) -> loc
| CPatDelimiters (loc,_,_) -> loc
let local_binder_loc = function
| LocalRawAssum ((loc,_)::_,_,t)
| LocalRawDef ((loc,_),t) -> join_loc loc (constr_loc t)
| LocalRawAssum ([],_,_) -> assert false
let local_binders_loc bll =
if bll = [] then dummy_loc else
join_loc (local_binder_loc (List.hd bll)) (local_binder_loc (list_last bll))
let ids_of_cases_indtype =
let add_var ids = function CRef (Ident (_,id)) -> id::ids | _ -> ids in
let rec vars_of = function
(* We deal only with the regular cases *)
| CApp (_,_,l) -> List.fold_left add_var [] (List.map fst l)
| CNotation (_,_,(l,[],[]))
(* assume the ntn is applicative and does not instantiate the head !! *)
| CAppExpl (_,_,l) -> List.fold_left add_var [] l
| CDelimiters(_,_,c) -> vars_of c
| _ -> [] in
vars_of
let ids_of_cases_tomatch tms =
List.fold_right
(fun (_,(ona,indnal)) l ->
Option.fold_right (fun t -> (@) (ids_of_cases_indtype t))
indnal (Option.fold_right (down_located name_cons) ona l))
tms []
let is_constructor id =
try ignore (Nametab.locate_extended (qualid_of_ident id)); true
with Not_found -> true
let rec cases_pattern_fold_names f a = function
| CPatRecord (_, l) ->
List.fold_left (fun acc (r, cp) -> cases_pattern_fold_names f acc cp) a l
| CPatAlias (_,pat,id) -> f id a
| CPatCstr (_,_,patl) | CPatCstrExpl (_,_,patl) | CPatOr (_,patl) ->
List.fold_left (cases_pattern_fold_names f) a patl
| CPatNotation (_,_,(patl,patll)) ->
List.fold_left (cases_pattern_fold_names f) a ( patll)
| CPatDelimiters (_,_,pat) -> cases_pattern_fold_names f a pat
| CPatAtom (_,Some (Ident (_,id))) when not (is_constructor id) -> f id a
| CPatPrim _ | CPatAtom _ -> a
let ids_of_pattern_list =
List.fold_left
(located_fold_left
(List.fold_left (cases_pattern_fold_names Idset.add)))
Idset.empty
let rec fold_constr_expr_binders g f n acc b = function
| (nal,bk,t)::l ->
let nal = snd (List.split nal) in
let n' = List.fold_right (name_fold g) nal n in
f n (fold_constr_expr_binders g f n' acc b l) t
| [] ->
f n acc b
let rec fold_local_binders g f n acc b = function
| LocalRawAssum (nal,bk,t)::l ->
let nal = snd (List.split nal) in
let n' = List.fold_right (name_fold g) nal n in
f n (fold_local_binders g f n' acc b l) t
| LocalRawDef ((_,na),t)::l ->
f n (fold_local_binders g f (name_fold g na n) acc b l) t
| [] ->
f n acc b
let fold_constr_expr_with_binders g f n acc = function
| CArrow (loc,a,b) -> f n (f n acc a) b
| CAppExpl (loc,(_,_),l) -> List.fold_left (f n) acc l
| CApp (loc,(_,t),l) -> List.fold_left (f n) (f n acc t) (List.map fst l)
| CProdN (_,l,b) | CLambdaN (_,l,b) -> fold_constr_expr_binders g f n acc b l
| CLetIn (_,na,a,b) -> fold_constr_expr_binders g f n acc b [[na],default_binder_kind,a]
| CCast (loc,a,CastConv(_,b)) -> f n (f n acc a) b
| CCast (loc,a,CastCoerce) -> f n acc a
| CNotation (_,_,(l,ll,bll)) ->
(* The following is an approximation: we don't know exactly if
an ident is binding nor to which subterms bindings apply *)
let acc = List.fold_left (f n) acc ( ll) in
List.fold_left (fun acc bl -> fold_local_binders g f n acc (CHole (dummy_loc,None)) bl) acc bll
| CGeneralization (_,_,_,c) -> f n acc c
| CDelimiters (loc,_,a) -> f n acc a
| CHole _ | CEvar _ | CPatVar _ | CSort _ | CPrim _ | CRef _ ->
acc
| CRecord (loc,_,l) -> List.fold_left (fun acc (id, c) -> f n acc c) acc l
| CCases (loc,sty,rtnpo,al,bl) ->
let ids = ids_of_cases_tomatch al in
let acc = Option.fold_left (f (List.fold_right g ids n)) acc rtnpo in
let acc = List.fold_left (f n) acc (List.map fst al) in
List.fold_right (fun (loc,patl,rhs) acc ->
let ids = ids_of_pattern_list patl in
f (Idset.fold g ids n) acc rhs) bl acc
| CLetTuple (loc,nal,(ona,po),b,c) ->
let n' = List.fold_right (down_located (name_fold g)) nal n in
f (Option.fold_right (down_located (name_fold g)) ona n') (f n acc b) c
| CIf (_,c,(ona,po),b1,b2) ->
let acc = f n (f n (f n acc b1) b2) c in
Option.fold_left
(f (Option.fold_right (down_located (name_fold g)) ona n)) acc po
| CFix (loc,_,l) ->
let n' = List.fold_right (fun ((_,id),_,_,_,_) -> g id) l n in
List.fold_right (fun (_,(_,o),lb,t,c) acc ->
fold_local_binders g f n'
(fold_local_binders g f n acc t lb) c lb) l acc
| CCoFix (loc,_,_) ->
Pp.warning "Capture check in multiple binders not done"; acc
let free_vars_of_constr_expr c =
let rec aux bdvars l = function
| CRef (Ident (_,id)) -> if List.mem id bdvars then l else Idset.add id l
| c -> fold_constr_expr_with_binders (fun a l -> a::l) aux bdvars l c
in aux [] Idset.empty c
let occur_var_constr_expr id c = Idset.mem id (free_vars_of_constr_expr c)
let mkIdentC id = CRef (Ident (dummy_loc, id))
let mkRefC r = CRef r
let mkCastC (a,k) = CCast (dummy_loc,a,k)
let mkLambdaC (idl,bk,a,b) = CLambdaN (dummy_loc,[idl,bk,a],b)
let mkLetInC (id,a,b) = CLetIn (dummy_loc,id,a,b)
let mkProdC (idl,bk,a,b) = CProdN (dummy_loc,[idl,bk,a],b)
let mkAppC (f,l) =
let l = List.map (fun x -> (x,None)) l in
match f with
| CApp (_,g,l') -> CApp (dummy_loc, g, l' @ l)
| _ -> CApp (dummy_loc, (None, f), l)
let rec mkCProdN loc bll c =
match bll with
| LocalRawAssum ((loc1,_)::_ as idl,bk,t) :: bll ->
CProdN (loc,[idl,bk,t],mkCProdN (join_loc loc1 loc) bll c)
| LocalRawDef ((loc1,_) as id,b) :: bll ->
CLetIn (loc,id,b,mkCProdN (join_loc loc1 loc) bll c)
| [] -> c
| LocalRawAssum ([],_,_) :: bll -> mkCProdN loc bll c
let rec mkCLambdaN loc bll c =
match bll with
| LocalRawAssum ((loc1,_)::_ as idl,bk,t) :: bll ->
CLambdaN (loc,[idl,bk,t],mkCLambdaN (join_loc loc1 loc) bll c)
| LocalRawDef ((loc1,_) as id,b) :: bll ->
CLetIn (loc,id,b,mkCLambdaN (join_loc loc1 loc) bll c)
| [] -> c
| LocalRawAssum ([],_,_) :: bll -> mkCLambdaN loc bll c
let rec abstract_constr_expr c = function
| [] -> c
| LocalRawDef (x,b)::bl -> mkLetInC(x,b,abstract_constr_expr c bl)
| LocalRawAssum (idl,bk,t)::bl ->
List.fold_right (fun x b -> mkLambdaC([x],bk,t,b)) idl
(abstract_constr_expr c bl)
let rec prod_constr_expr c = function
| [] -> c
| LocalRawDef (x,b)::bl -> mkLetInC(x,b,prod_constr_expr c bl)
| LocalRawAssum (idl,bk,t)::bl ->
List.fold_right (fun x b -> mkProdC([x],bk,t,b)) idl
(prod_constr_expr c bl)
let coerce_reference_to_id = function
| Ident (_,id) -> id
| Qualid (loc,_) ->
user_err_loc (loc, "coerce_reference_to_id",
str "This expression should be a simple identifier.")
let coerce_to_id = function
| CRef (Ident (loc,id)) -> (loc,id)
| a -> user_err_loc
(constr_loc a,"coerce_to_id",
str "This expression should be a simple identifier.")
let coerce_to_name = function
| CRef (Ident (loc,id)) -> (loc,Name id)
| CHole (loc,_) -> (loc,Anonymous)
| a -> user_err_loc
(constr_loc a,"coerce_to_name",
str "This expression should be a name.")
(* Interpret the index of a recursion order annotation *)
let split_at_annot bl na =
let names = List.map snd (names_of_local_assums bl) in
match na with
| None ->
if names = [] then error "A fixpoint needs at least one parameter."
else [], bl
| Some (loc, id) ->
let rec aux acc = function
| LocalRawAssum (bls, k, t) as x :: rest ->
let l, r = list_split_when (fun (loc, na) -> na = Name id) bls in
if r = [] then aux (x :: acc) rest
else
(List.rev (if l = [] then acc else LocalRawAssum (l, k, t) :: acc),
LocalRawAssum (r, k, t) :: rest)
| LocalRawDef _ as x :: rest -> aux (x :: acc) rest
| [] ->
user_err_loc(loc,"",
str "No parameter named " ++ Nameops.pr_id id ++ str".")
in aux [] bl
(* Used in correctness and interface *)
let map_binder g e nal = List.fold_right (down_located (name_fold g)) nal e
let map_binders f g e bl =
(* TODO: avoid variable capture in [t] by some [na] in [List.tl nal] *)
let h (e,bl) (nal,bk,t) = (map_binder g e nal,(nal,bk,f e t)::bl) in
let (e,rbl) = List.fold_left h (e,[]) bl in
(e, List.rev rbl)
let map_local_binders f g e bl =
(* TODO: avoid variable capture in [t] by some [na] in [List.tl nal] *)
let h (e,bl) = function
LocalRawAssum(nal,k,ty) ->
(map_binder g e nal, LocalRawAssum(nal,k,f e ty)::bl)
| LocalRawDef((loc,na),ty) ->
(name_fold g na e, LocalRawDef((loc,na),f e ty)::bl) in
let (e,rbl) = List.fold_left h (e,[]) bl in
(e, List.rev rbl)
let map_constr_expr_with_binders g f e = function
| CArrow (loc,a,b) -> CArrow (loc,f e a,f e b)
| CAppExpl (loc,r,l) -> CAppExpl (loc,r,List.map (f e) l)
| CApp (loc,(p,a),l) ->
CApp (loc,(p,f e a),List.map (fun (a,i) -> (f e a,i)) l)
| CProdN (loc,bl,b) ->
let (e,bl) = map_binders f g e bl in CProdN (loc,bl,f e b)
| CLambdaN (loc,bl,b) ->
let (e,bl) = map_binders f g e bl in CLambdaN (loc,bl,f e b)
| CLetIn (loc,na,a,b) -> CLetIn (loc,na,f e a,f (name_fold g (snd na) e) b)
| CCast (loc,a,CastConv (k,b)) -> CCast (loc,f e a,CastConv(k, f e b))
| CCast (loc,a,CastCoerce) -> CCast (loc,f e a,CastCoerce)
| CNotation (loc,n,(l,ll,bll)) ->
(* This is an approximation because we don't know what binds what *)
CNotation (loc,n,(List.map (f e) l,List.map (List.map (f e)) ll,
List.map (fun bl -> snd (map_local_binders f g e bl)) bll))
| CGeneralization (loc,b,a,c) -> CGeneralization (loc,b,a,f e c)
| CDelimiters (loc,s,a) -> CDelimiters (loc,s,f e a)
| CHole _ | CEvar _ | CPatVar _ | CSort _
| CPrim _ | CRef _ as x -> x
| CRecord (loc,p,l) -> CRecord (loc,p,List.map (fun (id, c) -> (id, f e c)) l)
| CCases (loc,sty,rtnpo,a,bl) ->
(* TODO: apply g on the binding variables in pat... *)
let bl = List.map (fun (loc,pat,rhs) -> (loc,pat,f e rhs)) bl in
let ids = ids_of_cases_tomatch a in
let po = Option.map (f (List.fold_right g ids e)) rtnpo in
CCases (loc, sty, po, List.map (fun (tm,x) -> (f e tm,x)) a,bl)
| CLetTuple (loc,nal,(ona,po),b,c) ->
let e' = List.fold_right (down_located (name_fold g)) nal e in
let e'' = Option.fold_right (down_located (name_fold g)) ona e in
CLetTuple (loc,nal,(ona,Option.map (f e'') po),f e b,f e' c)
| CIf (loc,c,(ona,po),b1,b2) ->
let e' = Option.fold_right (down_located (name_fold g)) ona e in
CIf (loc,f e c,(ona,Option.map (f e') po),f e b1,f e b2)
| CFix (loc,id,dl) ->
CFix (loc,id,List.map (fun (id,n,bl,t,d) ->
let (e',bl') = map_local_binders f g e bl in
let t' = f e' t in
(* Note: fix names should be inserted before the arguments... *)
let e'' = List.fold_left (fun e ((_,id),_,_,_,_) -> g id e) e' dl in
let d' = f e'' d in
(id,n,bl',t',d')) dl)
| CCoFix (loc,id,dl) ->
CCoFix (loc,id,List.map (fun (id,bl,t,d) ->
let (e',bl') = map_local_binders f g e bl in
let t' = f e' t in
let e'' = List.fold_left (fun e ((_,id),_,_,_) -> g id e) e' dl in
let d' = f e'' d in
(id,bl',t',d')) dl)
Used in
let rec replace_vars_constr_expr l = function
| CRef (Ident (loc,id)) as x ->
(try CRef (Ident (loc,List.assoc id l)) with Not_found -> x)
| c -> map_constr_expr_with_binders List.remove_assoc
replace_vars_constr_expr l c
(**********************************************************************)
(* Concrete syntax for modules and modules types *)
type with_declaration_ast =
| CWith_Module of identifier list located * qualid located
| CWith_Definition of identifier list located * constr_expr
type module_ast =
| CMident of qualid located
| CMapply of loc * module_ast * module_ast
| CMwith of loc * module_ast * with_declaration_ast
(* Returns the ranges of locs of the notation that are not occupied by args *)
(* and which are then occupied by proper symbols of the notation (or spaces) *)
let locs_of_notation loc locs ntn =
let (bl,el) = Util.unloc loc in
let locs = List.map Util.unloc locs in
let rec aux pos = function
| [] -> if pos = el then [] else [(pos,el-1)]
| (ba,ea)::l ->if pos = ba then aux ea l else (pos,ba-1)::aux ea l
in aux bl (Sort.list (fun l1 l2 -> fst l1 < fst l2) locs)
let ntn_loc loc (args,argslist,binderslist) =
locs_of_notation loc
(List.map constr_loc ( argslist)@
List.map local_binders_loc binderslist)
let patntn_loc loc (args,argslist) =
locs_of_notation loc
(List.map cases_pattern_expr_loc ( argslist))
| null | https://raw.githubusercontent.com/hemmi/coq2scala/d10f441c18146933a99bf2088116bd213ac3648d/coq-8.4pl2-old/interp/topconstr.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i
i
********************************************************************
Part common to glob_constr and cases_pattern
Part only in glob_constr
********************************************************************
Re-interpret a notation as a glob_constr, taking care of binders
assume: id is not binding
**************************************************************************
Translating a glob_constr into a notation, interpreting recursive patterns
No recursive pattern found
Not enough context
We found the pattern
We found the pattern, but there are extra arguments
(this allows e.g. alternative (recursive) notation of application)
We found the position where it differs
We found a binding position where it differs
Here, we would need a loc made of several parts ...
found have been collected by compare_constr
found have been collected by compare_constr
n^2 complexity but small and done only once per notation
Side effect
Substitution of kernel names, avoiding a list of bound identifiers
Pattern-matching glob_constr and aconstr
Check that no capture of binding variables occur
TODO: handle the case of multiple occs in different scopes
?
This is hack to avoid looping on a rule with rhs of the form
"?f (fun ?x => ?g)" since otherwise, matching "F H" expands in
"F (fun x => H x)" and "H x" is recursively matched against the same
rule, giving "H (fun x' => x x')" and so on.
Ideally, we would need the type of the expression to know which of
the arguments applied to it can be eta-expanded without looping.
The following test is then an approximation of what can be done
optimally (whether other looping situations can occur remains to be
checked).
Matching notation variable
Matching recursive notations for terms
Matching recursive notations for binders: ad hoc cases supporting let-in
TODO: address the possibility that termin is a Lambda itself
TODO: address the possibility that termin is a Prod itself
Matching recursive notations for binders: general case
Matching individual binders as part of a recursive pattern
Matching compositionally
Reorder canonically the substitution
Happens for binders bound to Anonymous
Find a better way to propagate Anonymous...
Matching cases pattern
TODO: handle the case of multiple occs in different scopes
All parameters must be _
Reorder canonically the substitution
********************************************************************
s Concrete syntax for terms
[Some n] = proj of the n-th visible argument
* for recursive notations
measure, relation
for recursive notations
for binders subexpressions
*********************
For binders parsing
********************************************************************
Miscellaneous
********************************************************************
Functions on constr_expr
We deal only with the regular cases
assume the ntn is applicative and does not instantiate the head !!
The following is an approximation: we don't know exactly if
an ident is binding nor to which subterms bindings apply
Interpret the index of a recursion order annotation
Used in correctness and interface
TODO: avoid variable capture in [t] by some [na] in [List.tl nal]
TODO: avoid variable capture in [t] by some [na] in [List.tl nal]
This is an approximation because we don't know what binds what
TODO: apply g on the binding variables in pat...
Note: fix names should be inserted before the arguments...
********************************************************************
Concrete syntax for modules and modules types
Returns the ranges of locs of the notation that are not occupied by args
and which are then occupied by proper symbols of the notation (or spaces) | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open Util
open Names
open Nameops
open Libnames
open Glob_term
open Term
open Mod_subst
This is the subtype of glob_constr allowed in syntactic extensions
For AList : first constr is iterator , second is terminator ;
first i d is where each argument of the list has to be substituted
in iterator and snd i d is alternative name just for printing ;
boolean is associativity
first id is where each argument of the list has to be substituted
in iterator and snd id is alternative name just for printing;
boolean is associativity *)
type aconstr =
| ARef of global_reference
| AVar of identifier
| AApp of aconstr * aconstr list
| AList of identifier * identifier * aconstr * aconstr * bool
| ALambda of name * aconstr * aconstr
| AProd of name * aconstr * aconstr
| ABinderList of identifier * identifier * aconstr * aconstr
| ALetIn of name * aconstr * aconstr
| ACases of case_style * aconstr option *
(aconstr * (name * (inductive * int * name list) option)) list *
(cases_pattern list * aconstr) list
| ALetTuple of name list * (name * aconstr option) * aconstr * aconstr
| AIf of aconstr * (name * aconstr option) * aconstr * aconstr
| ARec of fix_kind * identifier array *
(name * aconstr option * aconstr) list array * aconstr array *
aconstr array
| ASort of glob_sort
| AHole of Evd.hole_kind
| APatVar of patvar
| ACast of aconstr * aconstr cast_type
type scope_name = string
type tmp_scope_name = scope_name
type subscopes = tmp_scope_name option * scope_name list
type notation_var_instance_type =
| NtnTypeConstr | NtnTypeConstrList | NtnTypeBinderList
type notation_var_internalization_type =
| NtnInternTypeConstr | NtnInternTypeBinder | NtnInternTypeIdent
type interpretation =
(identifier * (subscopes * notation_var_instance_type)) list * aconstr
let name_to_ident = function
| Anonymous -> error "This expression should be a simple identifier."
| Name id -> id
let to_id g e id = let e,na = g e (Name id) in e,name_to_ident na
let rec cases_pattern_fold_map loc g e = function
| PatVar (_,na) ->
let e',na' = g e na in e', PatVar (loc,na')
| PatCstr (_,cstr,patl,na) ->
let e',na' = g e na in
let e',patl' = list_fold_map (cases_pattern_fold_map loc g) e patl in
e', PatCstr (loc,cstr,patl',na')
let rec subst_glob_vars l = function
| GVar (_,id) as r -> (try List.assoc id l with Not_found -> r)
| GProd (loc,Name id,bk,t,c) ->
let id =
try match List.assoc id l with GVar(_,id') -> id' | _ -> id
with Not_found -> id in
GProd (loc,Name id,bk,subst_glob_vars l t,subst_glob_vars l c)
| GLambda (loc,Name id,bk,t,c) ->
let id =
try match List.assoc id l with GVar(_,id') -> id' | _ -> id
with Not_found -> id in
GLambda (loc,Name id,bk,subst_glob_vars l t,subst_glob_vars l c)
let ldots_var = id_of_string ".."
let glob_constr_of_aconstr_with_binders loc g f e = function
| AVar id -> GVar (loc,id)
| AApp (a,args) -> GApp (loc,f e a, List.map (f e) args)
| AList (x,y,iter,tail,swap) ->
let t = f e tail in let it = f e iter in
let innerl = (ldots_var,t)::(if swap then [] else [x,GVar(loc,y)]) in
let inner = GApp (loc,GVar (loc,ldots_var),[subst_glob_vars innerl it]) in
let outerl = (ldots_var,inner)::(if swap then [x,GVar(loc,y)] else []) in
subst_glob_vars outerl it
| ABinderList (x,y,iter,tail) ->
let t = f e tail in let it = f e iter in
let innerl = [(ldots_var,t);(x,GVar(loc,y))] in
let inner = GApp (loc,GVar (loc,ldots_var),[subst_glob_vars innerl it]) in
let outerl = [(ldots_var,inner)] in
subst_glob_vars outerl it
| ALambda (na,ty,c) ->
let e',na = g e na in GLambda (loc,na,Explicit,f e ty,f e' c)
| AProd (na,ty,c) ->
let e',na = g e na in GProd (loc,na,Explicit,f e ty,f e' c)
| ALetIn (na,b,c) ->
let e',na = g e na in GLetIn (loc,na,f e b,f e' c)
| ACases (sty,rtntypopt,tml,eqnl) ->
let e',tml' = List.fold_right (fun (tm,(na,t)) (e',tml') ->
let e',t' = match t with
| None -> e',None
| Some (ind,npar,nal) ->
let e',nal' = List.fold_right (fun na (e',nal) ->
let e',na' = g e' na in e',na'::nal) nal (e',[]) in
e',Some (loc,ind,npar,nal') in
let e',na' = g e' na in
(e',(f e tm,(na',t'))::tml')) tml (e,[]) in
let fold (idl,e) na = let (e,na) = g e na in ((name_cons na idl,e),na) in
let eqnl' = List.map (fun (patl,rhs) ->
let ((idl,e),patl) =
list_fold_map (cases_pattern_fold_map loc fold) ([],e) patl in
(loc,idl,patl,f e rhs)) eqnl in
GCases (loc,sty,Option.map (f e') rtntypopt,tml',eqnl')
| ALetTuple (nal,(na,po),b,c) ->
let e',nal = list_fold_map g e nal in
let e'',na = g e na in
GLetTuple (loc,nal,(na,Option.map (f e'') po),f e b,f e' c)
| AIf (c,(na,po),b1,b2) ->
let e',na = g e na in
GIf (loc,f e c,(na,Option.map (f e') po),f e b1,f e b2)
| ARec (fk,idl,dll,tl,bl) ->
let e,dll = array_fold_map (list_fold_map (fun e (na,oc,b) ->
let e,na = g e na in
(e,(na,Explicit,Option.map (f e) oc,f e b)))) e dll in
let e',idl = array_fold_map (to_id g) e idl in
GRec (loc,fk,idl,dll,Array.map (f e) tl,Array.map (f e') bl)
| ACast (c,k) -> GCast (loc,f e c,
match k with
| CastConv (k,t) -> CastConv (k,f e t)
| CastCoerce -> CastCoerce)
| ASort x -> GSort (loc,x)
| AHole x -> GHole (loc,x)
| APatVar n -> GPatVar (loc,(false,n))
| ARef x -> GRef (loc,x)
let rec glob_constr_of_aconstr loc x =
let rec aux () x =
glob_constr_of_aconstr_with_binders loc (fun () id -> ((),id)) aux () x
in aux () x
let add_id r id = r := (id :: pi1 !r, pi2 !r, pi3 !r)
let add_name r = function Anonymous -> () | Name id -> add_id r id
let split_at_recursive_part c =
let sub = ref None in
let rec aux = function
| GApp (loc0,GVar(loc,v),c::l) when v = ldots_var ->
if !sub <> None then
Not narrowed enough to find only one recursive part
raise Not_found
else
(sub := Some c;
if l = [] then GVar (loc,ldots_var)
else GApp (loc0,GVar (loc,ldots_var),l))
| c -> map_glob_constr aux c in
let outer_iterator = aux c in
match !sub with
| Some c ->
match outer_iterator with
| _ -> outer_iterator, c
let on_true_do b f c = if b then (f c; b) else b
let compare_glob_constr f add t1 t2 = match t1,t2 with
| GRef (_,r1), GRef (_,r2) -> eq_gr r1 r2
| GVar (_,v1), GVar (_,v2) -> on_true_do (v1 = v2) add (Name v1)
| GApp (_,f1,l1), GApp (_,f2,l2) -> f f1 f2 & list_for_all2eq f l1 l2
| GLambda (_,na1,bk1,ty1,c1), GLambda (_,na2,bk2,ty2,c2) when na1 = na2 && bk1 = bk2 -> on_true_do (f ty1 ty2 & f c1 c2) add na1
| GProd (_,na1,bk1,ty1,c1), GProd (_,na2,bk2,ty2,c2) when na1 = na2 && bk1 = bk2 ->
on_true_do (f ty1 ty2 & f c1 c2) add na1
| GHole _, GHole _ -> true
| GSort (_,s1), GSort (_,s2) -> s1 = s2
| GLetIn (_,na1,b1,c1), GLetIn (_,na2,b2,c2) when na1 = na2 ->
on_true_do (f b1 b2 & f c1 c2) add na1
| (GCases _ | GRec _
| GPatVar _ | GEvar _ | GLetTuple _ | GIf _ | GCast _),_
| _,(GCases _ | GRec _
| GPatVar _ | GEvar _ | GLetTuple _ | GIf _ | GCast _)
-> error "Unsupported construction in recursive notations."
| (GRef _ | GVar _ | GApp _ | GLambda _ | GProd _
| GHole _ | GSort _ | GLetIn _), _
-> false
let rec eq_glob_constr t1 t2 = compare_glob_constr eq_glob_constr (fun _ -> ()) t1 t2
let subtract_loc loc1 loc2 = make_loc (fst (unloc loc1),fst (unloc loc2)-1)
let check_is_hole id = function GHole _ -> () | t ->
user_err_loc (loc_of_glob_constr t,"",
strbrk "In recursive notation with binders, " ++ pr_id id ++
strbrk " is expected to come without type.")
let compare_recursive_parts found f (iterator,subc) =
let diff = ref None in
let terminator = ref None in
let rec aux c1 c2 = match c1,c2 with
| GVar(_,v), term when v = ldots_var ->
assert (!terminator = None); terminator := Some term;
true
| GApp (_,GVar(_,v),l1), GApp (_,term,l2) when v = ldots_var ->
assert (!terminator = None); terminator := Some term;
list_for_all2eq aux l1 l2
| GVar (_,x), GVar (_,y) when x<>y ->
let lassoc = (!terminator <> None) in
let x,y = if lassoc then y,x else x,y in
!diff = None && (diff := Some (x,y,Some lassoc); true)
| GLambda (_,Name x,_,t_x,c), GLambda (_,Name y,_,t_y,term)
| GProd (_,Name x,_,t_x,c), GProd (_,Name y,_,t_y,term) ->
check_is_hole x t_x;
check_is_hole y t_y;
!diff = None && (diff := Some (x,y,None); aux c term)
| _ ->
compare_glob_constr aux (add_name found) c1 c2 in
if aux iterator subc then
match !diff with
| None ->
let loc1 = loc_of_glob_constr iterator in
let loc2 = loc_of_glob_constr (Option.get !terminator) in
user_err_loc (subtract_loc loc1 loc2,"",
str "Both ends of the recursive pattern are the same.")
| Some (x,y,Some lassoc) ->
let newfound = (pi1 !found, (x,y) :: pi2 !found, pi3 !found) in
let iterator =
f (if lassoc then subst_glob_vars [y,GVar(dummy_loc,x)] iterator
else iterator) in
found := newfound;
AList (x,y,iterator,f (Option.get !terminator),lassoc)
| Some (x,y,None) ->
let newfound = (pi1 !found, pi2 !found, (x,y) :: pi3 !found) in
let iterator = f iterator in
found := newfound;
ABinderList (x,y,iterator,f (Option.get !terminator))
else
raise Not_found
let aconstr_and_vars_of_glob_constr a =
let found = ref ([],[],[]) in
let rec aux c =
let keepfound = !found in
try compare_recursive_parts found aux' (split_at_recursive_part c)
with Not_found ->
found := keepfound;
match c with
| GApp (_,GVar (loc,f),[c]) when f = ldots_var ->
Fall on the second part of the recursive pattern w/o having
found the first part
found the first part *)
user_err_loc (loc,"",
str "Cannot find where the recursive pattern starts.")
| c ->
aux' c
and aux' = function
| GVar (_,id) -> add_id found id; AVar id
| GApp (_,g,args) -> AApp (aux g, List.map aux args)
| GLambda (_,na,bk,ty,c) -> add_name found na; ALambda (na,aux ty,aux c)
| GProd (_,na,bk,ty,c) -> add_name found na; AProd (na,aux ty,aux c)
| GLetIn (_,na,b,c) -> add_name found na; ALetIn (na,aux b,aux c)
| GCases (_,sty,rtntypopt,tml,eqnl) ->
let f (_,idl,pat,rhs) = List.iter (add_id found) idl; (pat,aux rhs) in
ACases (sty,Option.map aux rtntypopt,
List.map (fun (tm,(na,x)) ->
add_name found na;
Option.iter
(fun (_,_,_,nl) -> List.iter (add_name found) nl) x;
(aux tm,(na,Option.map (fun (_,ind,n,nal) -> (ind,n,nal)) x))) tml,
List.map f eqnl)
| GLetTuple (loc,nal,(na,po),b,c) ->
add_name found na;
List.iter (add_name found) nal;
ALetTuple (nal,(na,Option.map aux po),aux b,aux c)
| GIf (loc,c,(na,po),b1,b2) ->
add_name found na;
AIf (aux c,(na,Option.map aux po),aux b1,aux b2)
| GRec (_,fk,idl,dll,tl,bl) ->
Array.iter (add_id found) idl;
let dll = Array.map (List.map (fun (na,bk,oc,b) ->
if bk <> Explicit then
error "Binders marked as implicit not allowed in notations.";
add_name found na; (na,Option.map aux oc,aux b))) dll in
ARec (fk,idl,dll,Array.map aux tl,Array.map aux bl)
| GCast (_,c,k) -> ACast (aux c,
match k with CastConv (k,t) -> CastConv (k,aux t)
| CastCoerce -> CastCoerce)
| GSort (_,s) -> ASort s
| GHole (_,w) -> AHole w
| GRef (_,r) -> ARef r
| GPatVar (_,(_,n)) -> APatVar n
| GEvar _ ->
error "Existential variables not allowed in notations."
in
let t = aux a in
t, !found
let rec list_rev_mem_assoc x = function
| [] -> false
| (_,x')::l -> x = x' || list_rev_mem_assoc x l
let check_variables vars recvars (found,foundrec,foundrecbinding) =
let useless_vars = List.map snd recvars in
let vars = List.filter (fun (y,_) -> not (List.mem y useless_vars)) vars in
let check_recvar x =
if List.mem x found then
errorlabstrm "" (pr_id x ++
strbrk " should only be used in the recursive part of a pattern.") in
List.iter (fun (x,y) -> check_recvar x; check_recvar y)
(foundrec@foundrecbinding);
let check_bound x =
if not (List.mem x found) then
if List.mem_assoc x foundrec or List.mem_assoc x foundrecbinding
or list_rev_mem_assoc x foundrec or list_rev_mem_assoc x foundrecbinding
then
error ((string_of_id x)^" should not be bound in a recursive pattern of the right-hand side.")
else
error ((string_of_id x)^" is unbound in the right-hand side.") in
let check_pair s x y where =
if not (List.mem (x,y) where) then
errorlabstrm "" (strbrk "in the right-hand side, " ++ pr_id x ++
str " and " ++ pr_id y ++ strbrk " should appear in " ++ str s ++
str " position as part of a recursive pattern.") in
let check_type (x,typ) =
match typ with
| NtnInternTypeConstr ->
begin
try check_pair "term" x (List.assoc x recvars) foundrec
with Not_found -> check_bound x
end
| NtnInternTypeBinder ->
begin
try check_pair "binding" x (List.assoc x recvars) foundrecbinding
with Not_found -> check_bound x
end
| NtnInternTypeIdent -> check_bound x in
List.iter check_type vars
let aconstr_of_glob_constr vars recvars a =
let a,found = aconstr_and_vars_of_glob_constr a in
check_variables vars recvars found;
a
let aconstr_of_constr avoiding t =
aconstr_of_glob_constr [] [] (Detyping.detype false avoiding [] t)
let rec subst_pat subst pat =
match pat with
| PatVar _ -> pat
| PatCstr (loc,((kn,i),j),cpl,n) ->
let kn' = subst_ind subst kn
and cpl' = list_smartmap (subst_pat subst) cpl in
if kn' == kn && cpl' == cpl then pat else
PatCstr (loc,((kn',i),j),cpl',n)
let rec subst_aconstr subst bound raw =
match raw with
| ARef ref ->
let ref',t = subst_global subst ref in
if ref' == ref then raw else
aconstr_of_constr bound t
| AVar _ -> raw
| AApp (r,rl) ->
let r' = subst_aconstr subst bound r
and rl' = list_smartmap (subst_aconstr subst bound) rl in
if r' == r && rl' == rl then raw else
AApp(r',rl')
| AList (id1,id2,r1,r2,b) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
AList (id1,id2,r1',r2',b)
| ALambda (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ALambda (n,r1',r2')
| AProd (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
AProd (n,r1',r2')
| ABinderList (id1,id2,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ABinderList (id1,id2,r1',r2')
| ALetIn (n,r1,r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ALetIn (n,r1',r2')
| ACases (sty,rtntypopt,rl,branches) ->
let rtntypopt' = Option.smartmap (subst_aconstr subst bound) rtntypopt
and rl' = list_smartmap
(fun (a,(n,signopt) as x) ->
let a' = subst_aconstr subst bound a in
let signopt' = Option.map (fun ((indkn,i),n,nal as z) ->
let indkn' = subst_ind subst indkn in
if indkn == indkn' then z else ((indkn',i),n,nal)) signopt in
if a' == a && signopt' == signopt then x else (a',(n,signopt')))
rl
and branches' = list_smartmap
(fun (cpl,r as branch) ->
let cpl' = list_smartmap (subst_pat subst) cpl
and r' = subst_aconstr subst bound r in
if cpl' == cpl && r' == r then branch else
(cpl',r'))
branches
in
if rtntypopt' == rtntypopt && rtntypopt == rtntypopt' &
rl' == rl && branches' == branches then raw else
ACases (sty,rtntypopt',rl',branches')
| ALetTuple (nal,(na,po),b,c) ->
let po' = Option.smartmap (subst_aconstr subst bound) po
and b' = subst_aconstr subst bound b
and c' = subst_aconstr subst bound c in
if po' == po && b' == b && c' == c then raw else
ALetTuple (nal,(na,po'),b',c')
| AIf (c,(na,po),b1,b2) ->
let po' = Option.smartmap (subst_aconstr subst bound) po
and b1' = subst_aconstr subst bound b1
and b2' = subst_aconstr subst bound b2
and c' = subst_aconstr subst bound c in
if po' == po && b1' == b1 && b2' == b2 && c' == c then raw else
AIf (c',(na,po'),b1',b2')
| ARec (fk,idl,dll,tl,bl) ->
let dll' =
array_smartmap (list_smartmap (fun (na,oc,b as x) ->
let oc' = Option.smartmap (subst_aconstr subst bound) oc in
let b' = subst_aconstr subst bound b in
if oc' == oc && b' == b then x else (na,oc',b'))) dll in
let tl' = array_smartmap (subst_aconstr subst bound) tl in
let bl' = array_smartmap (subst_aconstr subst bound) bl in
if dll' == dll && tl' == tl && bl' == bl then raw else
ARec (fk,idl,dll',tl',bl')
| APatVar _ | ASort _ -> raw
| AHole (Evd.ImplicitArg (ref,i,b)) ->
let ref',t = subst_global subst ref in
if ref' == ref then raw else
AHole (Evd.InternalHole)
| AHole (Evd.BinderType _ | Evd.QuestionMark _ | Evd.CasesType
| Evd.InternalHole | Evd.TomatchTypeParameter _ | Evd.GoalEvar
| Evd.ImpossibleCase | Evd.MatchingVar _) -> raw
| ACast (r1,k) ->
match k with
CastConv (k, r2) ->
let r1' = subst_aconstr subst bound r1
and r2' = subst_aconstr subst bound r2 in
if r1' == r1 && r2' == r2 then raw else
ACast (r1',CastConv (k,r2'))
| CastCoerce ->
let r1' = subst_aconstr subst bound r1 in
if r1' == r1 then raw else
ACast (r1',CastCoerce)
let subst_interpretation subst (metas,pat) =
let bound = List.map fst metas in
(metas,subst_aconstr subst bound pat)
let abstract_return_type_context pi mklam tml rtno =
Option.map (fun rtn ->
let nal =
List.flatten (List.map (fun (_,(na,t)) ->
match t with Some x -> (pi x)@[na] | None -> [na]) tml) in
List.fold_right mklam nal rtn)
rtno
let abstract_return_type_context_glob_constr =
abstract_return_type_context (fun (_,_,_,nal) -> nal)
(fun na c -> GLambda(dummy_loc,na,Explicit,GHole(dummy_loc,Evd.InternalHole),c))
let abstract_return_type_context_aconstr =
abstract_return_type_context pi3
(fun na c -> ALambda(na,AHole Evd.InternalHole,c))
exception No_match
let rec alpha_var id1 id2 = function
| (i1,i2)::_ when i1=id1 -> i2 = id2
| (i1,i2)::_ when i2=id2 -> i1 = id1
| _::idl -> alpha_var id1 id2 idl
| [] -> id1 = id2
let alpha_eq_val (x,y) = x = y
let bind_env alp (sigma,sigmalist,sigmabinders as fullsigma) var v =
try
let vvar = List.assoc var sigma in
if alpha_eq_val (v,vvar) then fullsigma
else raise No_match
with Not_found ->
if List.exists (fun (id,_) ->occur_glob_constr id v) alp then raise No_match;
((var,v)::sigma,sigmalist,sigmabinders)
let bind_binder (sigma,sigmalist,sigmabinders) x bl =
(sigma,sigmalist,(x,List.rev bl)::sigmabinders)
let match_fix_kind fk1 fk2 =
match (fk1,fk2) with
| GCoFix n1, GCoFix n2 -> n1 = n2
| GFix (nl1,n1), GFix (nl2,n2) ->
n1 = n2 &&
array_for_all2 (fun (n1,_) (n2,_) -> n2 = None || n1 = n2) nl1 nl2
| _ -> false
let match_opt f sigma t1 t2 = match (t1,t2) with
| None, None -> sigma
| Some t1, Some t2 -> f sigma t1 t2
| _ -> raise No_match
let match_names metas (alp,sigma) na1 na2 = match (na1,na2) with
| (_,Name id2) when List.mem id2 (fst metas) ->
let rhs = match na1 with
| Name id1 -> GVar (dummy_loc,id1)
| Anonymous -> GHole (dummy_loc,Evd.InternalHole) in
alp, bind_env alp sigma id2 rhs
| (Name id1,Name id2) -> (id1,id2)::alp,sigma
| (Anonymous,Anonymous) -> alp,sigma
| _ -> raise No_match
let rec match_cases_pattern_binders metas acc pat1 pat2 =
match (pat1,pat2) with
| PatVar (_,na1), PatVar (_,na2) -> match_names metas acc na1 na2
| PatCstr (_,c1,patl1,na1), PatCstr (_,c2,patl2,na2)
when c1 = c2 & List.length patl1 = List.length patl2 ->
List.fold_left2 (match_cases_pattern_binders metas)
(match_names metas acc na1 na2) patl1 patl2
| _ -> raise No_match
let glue_letin_with_decls = true
let rec match_iterated_binders islambda decls = function
| GLambda (_,na,bk,t,b) when islambda ->
match_iterated_binders islambda ((na,bk,None,t)::decls) b
| GProd (_,(Name _ as na),bk,t,b) when not islambda ->
match_iterated_binders islambda ((na,bk,None,t)::decls) b
| GLetIn (loc,na,c,b) when glue_letin_with_decls ->
match_iterated_binders islambda
| b -> (decls,b)
let remove_sigma x (sigmavar,sigmalist,sigmabinders) =
(List.remove_assoc x sigmavar,sigmalist,sigmabinders)
let rec match_abinderlist_with_app match_fun metas sigma rest x iter termin =
let rec aux sigma acc rest =
try
let sigma = match_fun (ldots_var::fst metas,snd metas) sigma rest iter in
let rest = List.assoc ldots_var (pi1 sigma) in
let b = match List.assoc x (pi3 sigma) with [b] -> b | _ ->assert false in
let sigma = remove_sigma x (remove_sigma ldots_var sigma) in
aux sigma (b::acc) rest
with No_match when acc <> [] ->
acc, match_fun metas sigma rest termin in
let bl,sigma = aux sigma [] rest in
bind_binder sigma x bl
let match_alist match_fun metas sigma rest x iter termin lassoc =
let rec aux sigma acc rest =
try
let sigma = match_fun (ldots_var::fst metas,snd metas) sigma rest iter in
let rest = List.assoc ldots_var (pi1 sigma) in
let t = List.assoc x (pi1 sigma) in
let sigma = remove_sigma x (remove_sigma ldots_var sigma) in
aux sigma (t::acc) rest
with No_match when acc <> [] ->
acc, match_fun metas sigma rest termin in
let l,sigma = aux sigma [] rest in
(pi1 sigma, (x,if lassoc then l else List.rev l)::pi2 sigma, pi3 sigma)
let does_not_come_from_already_eta_expanded_var =
function GVar _ -> false | _ -> true
let rec match_ inner u alp (tmetas,blmetas as metas) sigma a1 a2 =
match (a1,a2) with
| r1, AVar id2 when List.mem id2 tmetas -> bind_env alp sigma id2 r1
| r1, AList (x,_,iter,termin,lassoc) ->
match_alist (match_hd u alp) metas sigma r1 x iter termin lassoc
| GLambda (_,na1,bk,t1,b1), ABinderList (x,_,ALambda (Name id2,_,b2),termin)->
let (decls,b) = match_iterated_binders true [(na1,bk,None,t1)] b1 in
match_in u alp metas (bind_binder sigma x decls) b termin
| GProd (_,na1,bk,t1,b1), ABinderList (x,_,AProd (Name id2,_,b2),termin)
when na1 <> Anonymous ->
let (decls,b) = match_iterated_binders false [(na1,bk,None,t1)] b1 in
match_in u alp metas (bind_binder sigma x decls) b termin
| r, ABinderList (x,_,iter,termin) ->
match_abinderlist_with_app (match_hd u alp) metas sigma r x iter termin
| GLambda (_,na,bk,t,b1), ALambda (Name id,_,b2) when List.mem id blmetas ->
match_in u alp metas (bind_binder sigma id [(na,bk,None,t)]) b1 b2
| GProd (_,na,bk,t,b1), AProd (Name id,_,b2)
when List.mem id blmetas & na <> Anonymous ->
match_in u alp metas (bind_binder sigma id [(na,bk,None,t)]) b1 b2
| GVar (_,id1), AVar id2 when alpha_var id1 id2 alp -> sigma
| GRef (_,r1), ARef r2 when (eq_gr r1 r2) -> sigma
| GPatVar (_,(_,n1)), APatVar n2 when n1=n2 -> sigma
| GApp (loc,f1,l1), AApp (f2,l2) ->
let n1 = List.length l1 and n2 = List.length l2 in
let f1,l1,f2,l2 =
if n1 < n2 then
let l21,l22 = list_chop (n2-n1) l2 in f1,l1, AApp (f2,l21), l22
else if n1 > n2 then
let l11,l12 = list_chop (n1-n2) l1 in GApp (loc,f1,l11),l12, f2,l2
else f1,l1, f2, l2 in
let may_use_eta = does_not_come_from_already_eta_expanded_var f1 in
List.fold_left2 (match_ may_use_eta u alp metas)
(match_in u alp metas sigma f1 f2) l1 l2
| GLambda (_,na1,_,t1,b1), ALambda (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GProd (_,na1,_,t1,b1), AProd (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GLetIn (_,na1,t1,b1), ALetIn (na2,t2,b2) ->
match_binders u alp metas na1 na2 (match_in u alp metas sigma t1 t2) b1 b2
| GCases (_,sty1,rtno1,tml1,eqnl1), ACases (sty2,rtno2,tml2,eqnl2)
when sty1 = sty2
& List.length tml1 = List.length tml2
& List.length eqnl1 = List.length eqnl2 ->
let rtno1' = abstract_return_type_context_glob_constr tml1 rtno1 in
let rtno2' = abstract_return_type_context_aconstr tml2 rtno2 in
let sigma =
try Option.fold_left2 (match_in u alp metas) sigma rtno1' rtno2'
with Option.Heterogeneous -> raise No_match
in
let sigma = List.fold_left2
(fun s (tm1,_) (tm2,_) ->
match_in u alp metas s tm1 tm2) sigma tml1 tml2 in
List.fold_left2 (match_equations u alp metas) sigma eqnl1 eqnl2
| GLetTuple (_,nal1,(na1,to1),b1,c1), ALetTuple (nal2,(na2,to2),b2,c2)
when List.length nal1 = List.length nal2 ->
let sigma = match_opt (match_binders u alp metas na1 na2) sigma to1 to2 in
let sigma = match_in u alp metas sigma b1 b2 in
let (alp,sigma) =
List.fold_left2 (match_names metas) (alp,sigma) nal1 nal2 in
match_in u alp metas sigma c1 c2
| GIf (_,a1,(na1,to1),b1,c1), AIf (a2,(na2,to2),b2,c2) ->
let sigma = match_opt (match_binders u alp metas na1 na2) sigma to1 to2 in
List.fold_left2 (match_in u alp metas) sigma [a1;b1;c1] [a2;b2;c2]
| GRec (_,fk1,idl1,dll1,tl1,bl1), ARec (fk2,idl2,dll2,tl2,bl2)
when match_fix_kind fk1 fk2 & Array.length idl1 = Array.length idl2 &
array_for_all2 (fun l1 l2 -> List.length l1 = List.length l2) dll1 dll2
->
let alp,sigma = array_fold_left2
(List.fold_left2 (fun (alp,sigma) (na1,_,oc1,b1) (na2,oc2,b2) ->
let sigma =
match_in u alp metas
(match_opt (match_in u alp metas) sigma oc1 oc2) b1 b2
in match_names metas (alp,sigma) na1 na2)) (alp,sigma) dll1 dll2 in
let sigma = array_fold_left2 (match_in u alp metas) sigma tl1 tl2 in
let alp,sigma = array_fold_right2 (fun id1 id2 alsig ->
match_names metas alsig (Name id1) (Name id2)) idl1 idl2 (alp,sigma) in
array_fold_left2 (match_in u alp metas) sigma bl1 bl2
| GCast(_,c1, CastConv(_,t1)), ACast(c2, CastConv (_,t2)) ->
match_in u alp metas (match_in u alp metas sigma c1 c2) t1 t2
| GCast(_,c1, CastCoerce), ACast(c2, CastCoerce) ->
match_in u alp metas sigma c1 c2
| GSort (_,GType _), ASort (GType None) when not u -> sigma
| GSort (_,s1), ASort s2 when s1 = s2 -> sigma
Do n't hide Metas , they bind in ltac
| a, AHole _ -> sigma
On the fly eta - expansion so as to use notations of the form
" exists x , P x " for " ex P " ; expects type not given because do n't know
otherwise how to ensure it corresponds to a well - typed eta - expansion ;
ensure at least one constructor is consumed to avoid looping
"exists x, P x" for "ex P"; expects type not given because don't know
otherwise how to ensure it corresponds to a well-typed eta-expansion;
ensure at least one constructor is consumed to avoid looping *)
| b1, ALambda (Name id,AHole _,b2) when inner ->
let id' = Namegen.next_ident_away id (free_glob_vars b1) in
match_in u alp metas (bind_binder sigma id
[(Name id',Explicit,None,GHole(dummy_loc,Evd.BinderType (Name id')))])
(mkGApp dummy_loc b1 (GVar (dummy_loc,id'))) b2
| (GRec _ | GEvar _), _
| _,_ -> raise No_match
and match_in u = match_ true u
and match_hd u = match_ false u
and match_binders u alp metas na1 na2 sigma b1 b2 =
let (alp,sigma) = match_names metas (alp,sigma) na1 na2 in
match_in u alp metas sigma b1 b2
and match_equations u alp metas sigma (_,_,patl1,rhs1) (patl2,rhs2) =
patl1 and patl2 have the same length because they respectively
correspond to some tml1 and tml2 that have the same length
correspond to some tml1 and tml2 that have the same length *)
let (alp,sigma) =
List.fold_left2 (match_cases_pattern_binders metas)
(alp,sigma) patl1 patl2 in
match_in u alp metas sigma rhs1 rhs2
let match_aconstr u c (metas,pat) =
let vars = list_split_by (fun (_,(_,x)) -> x <> NtnTypeBinderList) metas in
let vars = (List.map fst (fst vars), List.map fst (snd vars)) in
let terms,termlists,binders = match_ false u [] vars ([],[],[]) c pat in
let find x =
try List.assoc x terms
with Not_found ->
GVar (dummy_loc,x) in
List.fold_right (fun (x,(scl,typ)) (terms',termlists',binders') ->
match typ with
| NtnTypeConstr ->
((find x, scl)::terms',termlists',binders')
| NtnTypeConstrList ->
(terms',(List.assoc x termlists,scl)::termlists',binders')
| NtnTypeBinderList ->
(terms',termlists',(List.assoc x binders,scl)::binders'))
metas ([],[],[])
let bind_env_cases_pattern (sigma,sigmalist,x as fullsigma) var v =
try
let vvar = List.assoc var sigma in
if v=vvar then fullsigma else raise No_match
with Not_found ->
(var,v)::sigma,sigmalist,x
let rec match_cases_pattern metas sigma a1 a2 = match (a1,a2) with
| r1, AVar id2 when List.mem id2 metas -> bind_env_cases_pattern sigma id2 r1
| PatVar (_,Anonymous), AHole _ -> sigma
| PatCstr (loc,(ind,_ as r1),[],_), ARef (ConstructRef r2) when r1 = r2 ->
sigma
| PatCstr (loc,(ind,_ as r1),args1,_), AApp (ARef (ConstructRef r2),l2)
when r1 = r2 ->
let nparams = Inductive.inductive_params (Global.lookup_inductive ind) in
if List.length l2 <> nparams + List.length args1
then
TODO : revert partially applied notations of the form
" Notation P : = ( ) . "
"Notation P := (@pair)." *)
raise No_match
else
let (p2,args2) = list_chop nparams l2 in
List.iter (function AHole _ -> () | _ -> raise No_match) p2;
List.fold_left2 (match_cases_pattern metas) sigma args1 args2
| r1, AList (x,_,iter,termin,lassoc) ->
match_alist (fun (metas,_) -> match_cases_pattern metas)
(metas,[]) (pi1 sigma,pi2 sigma,()) r1 x iter termin lassoc
| _ -> raise No_match
let match_aconstr_cases_pattern c (metas,pat) =
let vars = List.map fst metas in
let terms,termlists,() = match_cases_pattern vars ([],[],()) c pat in
List.fold_right (fun (x,(scl,typ)) (terms',termlists') ->
match typ with
| NtnTypeConstr -> ((List.assoc x terms, scl)::terms',termlists')
| NtnTypeConstrList -> (terms',(List.assoc x termlists,scl)::termlists')
| NtnTypeBinderList -> assert false)
metas ([],[])
type notation = string
type explicitation = ExplByPos of int * identifier option | ExplByName of identifier
type binder_kind = Default of binding_kind | Generalized of binding_kind * binding_kind * bool
type abstraction_kind = AbsLambda | AbsPi
type prim_token = Numeral of Bigint.bigint | String of string
type cases_pattern_expr =
| CPatAlias of loc * cases_pattern_expr * identifier
| CPatCstr of loc * reference * cases_pattern_expr list
| CPatCstrExpl of loc * reference * cases_pattern_expr list
| CPatAtom of loc * reference option
| CPatOr of loc * cases_pattern_expr list
| CPatNotation of loc * notation * cases_pattern_notation_substitution
| CPatPrim of loc * prim_token
| CPatRecord of Util.loc * (reference * cases_pattern_expr) list
| CPatDelimiters of loc * string * cases_pattern_expr
and cases_pattern_notation_substitution =
* for
type constr_expr =
| CRef of reference
| CFix of loc * identifier located * fix_expr list
| CCoFix of loc * identifier located * cofix_expr list
| CArrow of loc * constr_expr * constr_expr
| CProdN of loc * (name located list * binder_kind * constr_expr) list * constr_expr
| CLambdaN of loc * (name located list * binder_kind * constr_expr) list * constr_expr
| CLetIn of loc * name located * constr_expr * constr_expr
| CAppExpl of loc * (proj_flag * reference) * constr_expr list
| CApp of loc * (proj_flag * constr_expr) *
(constr_expr * explicitation located option) list
| CRecord of loc * constr_expr option * (reference * constr_expr) list
| CCases of loc * case_style * constr_expr option *
(constr_expr * (name located option * constr_expr option)) list *
(loc * cases_pattern_expr list located list * constr_expr) list
| CLetTuple of loc * name located list * (name located option * constr_expr option) *
constr_expr * constr_expr
| CIf of loc * constr_expr * (name located option * constr_expr option)
* constr_expr * constr_expr
| CHole of loc * Evd.hole_kind option
| CPatVar of loc * (bool * patvar)
| CEvar of loc * existential_key * constr_expr list option
| CSort of loc * glob_sort
| CCast of loc * constr_expr * constr_expr cast_type
| CNotation of loc * notation * constr_notation_substitution
| CGeneralization of loc * binding_kind * abstraction_kind option * constr_expr
| CPrim of loc * prim_token
| CDelimiters of loc * string * constr_expr
and fix_expr =
identifier located * (identifier located option * recursion_order_expr) * local_binder list * constr_expr * constr_expr
and cofix_expr =
identifier located * local_binder list * constr_expr * constr_expr
and recursion_order_expr =
| CStructRec
| CWfRec of constr_expr
and local_binder =
| LocalRawDef of name located * constr_expr
| LocalRawAssum of name located list * binder_kind * constr_expr
and constr_notation_substitution =
for
type typeclass_constraint = name located * binding_kind * constr_expr
and typeclass_context = typeclass_constraint list
type constr_pattern_expr = constr_expr
let default_binder_kind = Default Explicit
let names_of_local_assums bl =
List.flatten (List.map (function LocalRawAssum(l,_,_)->l|_->[]) bl)
let names_of_local_binders bl =
List.flatten (List.map (function LocalRawAssum(l,_,_)->l|LocalRawDef(l,_)->[l]) bl)
let error_invalid_pattern_notation loc =
user_err_loc (loc,"",str "Invalid notation for pattern.")
let constr_loc = function
| CRef (Ident (loc,_)) -> loc
| CRef (Qualid (loc,_)) -> loc
| CFix (loc,_,_) -> loc
| CCoFix (loc,_,_) -> loc
| CArrow (loc,_,_) -> loc
| CProdN (loc,_,_) -> loc
| CLambdaN (loc,_,_) -> loc
| CLetIn (loc,_,_,_) -> loc
| CAppExpl (loc,_,_) -> loc
| CApp (loc,_,_) -> loc
| CRecord (loc,_,_) -> loc
| CCases (loc,_,_,_,_) -> loc
| CLetTuple (loc,_,_,_,_) -> loc
| CIf (loc,_,_,_,_) -> loc
| CHole (loc, _) -> loc
| CPatVar (loc,_) -> loc
| CEvar (loc,_,_) -> loc
| CSort (loc,_) -> loc
| CCast (loc,_,_) -> loc
| CNotation (loc,_,_) -> loc
| CGeneralization (loc,_,_,_) -> loc
| CPrim (loc,_) -> loc
| CDelimiters (loc,_,_) -> loc
let cases_pattern_expr_loc = function
| CPatAlias (loc,_,_) -> loc
| CPatCstr (loc,_,_) -> loc
| CPatCstrExpl (loc,_,_) -> loc
| CPatAtom (loc,_) -> loc
| CPatOr (loc,_) -> loc
| CPatNotation (loc,_,_) -> loc
| CPatRecord (loc, _) -> loc
| CPatPrim (loc,_) -> loc
| CPatDelimiters (loc,_,_) -> loc
let local_binder_loc = function
| LocalRawAssum ((loc,_)::_,_,t)
| LocalRawDef ((loc,_),t) -> join_loc loc (constr_loc t)
| LocalRawAssum ([],_,_) -> assert false
let local_binders_loc bll =
if bll = [] then dummy_loc else
join_loc (local_binder_loc (List.hd bll)) (local_binder_loc (list_last bll))
let ids_of_cases_indtype =
let add_var ids = function CRef (Ident (_,id)) -> id::ids | _ -> ids in
let rec vars_of = function
| CApp (_,_,l) -> List.fold_left add_var [] (List.map fst l)
| CNotation (_,_,(l,[],[]))
| CAppExpl (_,_,l) -> List.fold_left add_var [] l
| CDelimiters(_,_,c) -> vars_of c
| _ -> [] in
vars_of
let ids_of_cases_tomatch tms =
List.fold_right
(fun (_,(ona,indnal)) l ->
Option.fold_right (fun t -> (@) (ids_of_cases_indtype t))
indnal (Option.fold_right (down_located name_cons) ona l))
tms []
let is_constructor id =
try ignore (Nametab.locate_extended (qualid_of_ident id)); true
with Not_found -> true
let rec cases_pattern_fold_names f a = function
| CPatRecord (_, l) ->
List.fold_left (fun acc (r, cp) -> cases_pattern_fold_names f acc cp) a l
| CPatAlias (_,pat,id) -> f id a
| CPatCstr (_,_,patl) | CPatCstrExpl (_,_,patl) | CPatOr (_,patl) ->
List.fold_left (cases_pattern_fold_names f) a patl
| CPatNotation (_,_,(patl,patll)) ->
List.fold_left (cases_pattern_fold_names f) a ( patll)
| CPatDelimiters (_,_,pat) -> cases_pattern_fold_names f a pat
| CPatAtom (_,Some (Ident (_,id))) when not (is_constructor id) -> f id a
| CPatPrim _ | CPatAtom _ -> a
let ids_of_pattern_list =
List.fold_left
(located_fold_left
(List.fold_left (cases_pattern_fold_names Idset.add)))
Idset.empty
let rec fold_constr_expr_binders g f n acc b = function
| (nal,bk,t)::l ->
let nal = snd (List.split nal) in
let n' = List.fold_right (name_fold g) nal n in
f n (fold_constr_expr_binders g f n' acc b l) t
| [] ->
f n acc b
let rec fold_local_binders g f n acc b = function
| LocalRawAssum (nal,bk,t)::l ->
let nal = snd (List.split nal) in
let n' = List.fold_right (name_fold g) nal n in
f n (fold_local_binders g f n' acc b l) t
| LocalRawDef ((_,na),t)::l ->
f n (fold_local_binders g f (name_fold g na n) acc b l) t
| [] ->
f n acc b
let fold_constr_expr_with_binders g f n acc = function
| CArrow (loc,a,b) -> f n (f n acc a) b
| CAppExpl (loc,(_,_),l) -> List.fold_left (f n) acc l
| CApp (loc,(_,t),l) -> List.fold_left (f n) (f n acc t) (List.map fst l)
| CProdN (_,l,b) | CLambdaN (_,l,b) -> fold_constr_expr_binders g f n acc b l
| CLetIn (_,na,a,b) -> fold_constr_expr_binders g f n acc b [[na],default_binder_kind,a]
| CCast (loc,a,CastConv(_,b)) -> f n (f n acc a) b
| CCast (loc,a,CastCoerce) -> f n acc a
| CNotation (_,_,(l,ll,bll)) ->
let acc = List.fold_left (f n) acc ( ll) in
List.fold_left (fun acc bl -> fold_local_binders g f n acc (CHole (dummy_loc,None)) bl) acc bll
| CGeneralization (_,_,_,c) -> f n acc c
| CDelimiters (loc,_,a) -> f n acc a
| CHole _ | CEvar _ | CPatVar _ | CSort _ | CPrim _ | CRef _ ->
acc
| CRecord (loc,_,l) -> List.fold_left (fun acc (id, c) -> f n acc c) acc l
| CCases (loc,sty,rtnpo,al,bl) ->
let ids = ids_of_cases_tomatch al in
let acc = Option.fold_left (f (List.fold_right g ids n)) acc rtnpo in
let acc = List.fold_left (f n) acc (List.map fst al) in
List.fold_right (fun (loc,patl,rhs) acc ->
let ids = ids_of_pattern_list patl in
f (Idset.fold g ids n) acc rhs) bl acc
| CLetTuple (loc,nal,(ona,po),b,c) ->
let n' = List.fold_right (down_located (name_fold g)) nal n in
f (Option.fold_right (down_located (name_fold g)) ona n') (f n acc b) c
| CIf (_,c,(ona,po),b1,b2) ->
let acc = f n (f n (f n acc b1) b2) c in
Option.fold_left
(f (Option.fold_right (down_located (name_fold g)) ona n)) acc po
| CFix (loc,_,l) ->
let n' = List.fold_right (fun ((_,id),_,_,_,_) -> g id) l n in
List.fold_right (fun (_,(_,o),lb,t,c) acc ->
fold_local_binders g f n'
(fold_local_binders g f n acc t lb) c lb) l acc
| CCoFix (loc,_,_) ->
Pp.warning "Capture check in multiple binders not done"; acc
let free_vars_of_constr_expr c =
let rec aux bdvars l = function
| CRef (Ident (_,id)) -> if List.mem id bdvars then l else Idset.add id l
| c -> fold_constr_expr_with_binders (fun a l -> a::l) aux bdvars l c
in aux [] Idset.empty c
let occur_var_constr_expr id c = Idset.mem id (free_vars_of_constr_expr c)
let mkIdentC id = CRef (Ident (dummy_loc, id))
let mkRefC r = CRef r
let mkCastC (a,k) = CCast (dummy_loc,a,k)
let mkLambdaC (idl,bk,a,b) = CLambdaN (dummy_loc,[idl,bk,a],b)
let mkLetInC (id,a,b) = CLetIn (dummy_loc,id,a,b)
let mkProdC (idl,bk,a,b) = CProdN (dummy_loc,[idl,bk,a],b)
let mkAppC (f,l) =
let l = List.map (fun x -> (x,None)) l in
match f with
| CApp (_,g,l') -> CApp (dummy_loc, g, l' @ l)
| _ -> CApp (dummy_loc, (None, f), l)
let rec mkCProdN loc bll c =
match bll with
| LocalRawAssum ((loc1,_)::_ as idl,bk,t) :: bll ->
CProdN (loc,[idl,bk,t],mkCProdN (join_loc loc1 loc) bll c)
| LocalRawDef ((loc1,_) as id,b) :: bll ->
CLetIn (loc,id,b,mkCProdN (join_loc loc1 loc) bll c)
| [] -> c
| LocalRawAssum ([],_,_) :: bll -> mkCProdN loc bll c
let rec mkCLambdaN loc bll c =
match bll with
| LocalRawAssum ((loc1,_)::_ as idl,bk,t) :: bll ->
CLambdaN (loc,[idl,bk,t],mkCLambdaN (join_loc loc1 loc) bll c)
| LocalRawDef ((loc1,_) as id,b) :: bll ->
CLetIn (loc,id,b,mkCLambdaN (join_loc loc1 loc) bll c)
| [] -> c
| LocalRawAssum ([],_,_) :: bll -> mkCLambdaN loc bll c
let rec abstract_constr_expr c = function
| [] -> c
| LocalRawDef (x,b)::bl -> mkLetInC(x,b,abstract_constr_expr c bl)
| LocalRawAssum (idl,bk,t)::bl ->
List.fold_right (fun x b -> mkLambdaC([x],bk,t,b)) idl
(abstract_constr_expr c bl)
let rec prod_constr_expr c = function
| [] -> c
| LocalRawDef (x,b)::bl -> mkLetInC(x,b,prod_constr_expr c bl)
| LocalRawAssum (idl,bk,t)::bl ->
List.fold_right (fun x b -> mkProdC([x],bk,t,b)) idl
(prod_constr_expr c bl)
let coerce_reference_to_id = function
| Ident (_,id) -> id
| Qualid (loc,_) ->
user_err_loc (loc, "coerce_reference_to_id",
str "This expression should be a simple identifier.")
let coerce_to_id = function
| CRef (Ident (loc,id)) -> (loc,id)
| a -> user_err_loc
(constr_loc a,"coerce_to_id",
str "This expression should be a simple identifier.")
let coerce_to_name = function
| CRef (Ident (loc,id)) -> (loc,Name id)
| CHole (loc,_) -> (loc,Anonymous)
| a -> user_err_loc
(constr_loc a,"coerce_to_name",
str "This expression should be a name.")
let split_at_annot bl na =
let names = List.map snd (names_of_local_assums bl) in
match na with
| None ->
if names = [] then error "A fixpoint needs at least one parameter."
else [], bl
| Some (loc, id) ->
let rec aux acc = function
| LocalRawAssum (bls, k, t) as x :: rest ->
let l, r = list_split_when (fun (loc, na) -> na = Name id) bls in
if r = [] then aux (x :: acc) rest
else
(List.rev (if l = [] then acc else LocalRawAssum (l, k, t) :: acc),
LocalRawAssum (r, k, t) :: rest)
| LocalRawDef _ as x :: rest -> aux (x :: acc) rest
| [] ->
user_err_loc(loc,"",
str "No parameter named " ++ Nameops.pr_id id ++ str".")
in aux [] bl
let map_binder g e nal = List.fold_right (down_located (name_fold g)) nal e
let map_binders f g e bl =
let h (e,bl) (nal,bk,t) = (map_binder g e nal,(nal,bk,f e t)::bl) in
let (e,rbl) = List.fold_left h (e,[]) bl in
(e, List.rev rbl)
let map_local_binders f g e bl =
let h (e,bl) = function
LocalRawAssum(nal,k,ty) ->
(map_binder g e nal, LocalRawAssum(nal,k,f e ty)::bl)
| LocalRawDef((loc,na),ty) ->
(name_fold g na e, LocalRawDef((loc,na),f e ty)::bl) in
let (e,rbl) = List.fold_left h (e,[]) bl in
(e, List.rev rbl)
let map_constr_expr_with_binders g f e = function
| CArrow (loc,a,b) -> CArrow (loc,f e a,f e b)
| CAppExpl (loc,r,l) -> CAppExpl (loc,r,List.map (f e) l)
| CApp (loc,(p,a),l) ->
CApp (loc,(p,f e a),List.map (fun (a,i) -> (f e a,i)) l)
| CProdN (loc,bl,b) ->
let (e,bl) = map_binders f g e bl in CProdN (loc,bl,f e b)
| CLambdaN (loc,bl,b) ->
let (e,bl) = map_binders f g e bl in CLambdaN (loc,bl,f e b)
| CLetIn (loc,na,a,b) -> CLetIn (loc,na,f e a,f (name_fold g (snd na) e) b)
| CCast (loc,a,CastConv (k,b)) -> CCast (loc,f e a,CastConv(k, f e b))
| CCast (loc,a,CastCoerce) -> CCast (loc,f e a,CastCoerce)
| CNotation (loc,n,(l,ll,bll)) ->
CNotation (loc,n,(List.map (f e) l,List.map (List.map (f e)) ll,
List.map (fun bl -> snd (map_local_binders f g e bl)) bll))
| CGeneralization (loc,b,a,c) -> CGeneralization (loc,b,a,f e c)
| CDelimiters (loc,s,a) -> CDelimiters (loc,s,f e a)
| CHole _ | CEvar _ | CPatVar _ | CSort _
| CPrim _ | CRef _ as x -> x
| CRecord (loc,p,l) -> CRecord (loc,p,List.map (fun (id, c) -> (id, f e c)) l)
| CCases (loc,sty,rtnpo,a,bl) ->
let bl = List.map (fun (loc,pat,rhs) -> (loc,pat,f e rhs)) bl in
let ids = ids_of_cases_tomatch a in
let po = Option.map (f (List.fold_right g ids e)) rtnpo in
CCases (loc, sty, po, List.map (fun (tm,x) -> (f e tm,x)) a,bl)
| CLetTuple (loc,nal,(ona,po),b,c) ->
let e' = List.fold_right (down_located (name_fold g)) nal e in
let e'' = Option.fold_right (down_located (name_fold g)) ona e in
CLetTuple (loc,nal,(ona,Option.map (f e'') po),f e b,f e' c)
| CIf (loc,c,(ona,po),b1,b2) ->
let e' = Option.fold_right (down_located (name_fold g)) ona e in
CIf (loc,f e c,(ona,Option.map (f e') po),f e b1,f e b2)
| CFix (loc,id,dl) ->
CFix (loc,id,List.map (fun (id,n,bl,t,d) ->
let (e',bl') = map_local_binders f g e bl in
let t' = f e' t in
let e'' = List.fold_left (fun e ((_,id),_,_,_,_) -> g id e) e' dl in
let d' = f e'' d in
(id,n,bl',t',d')) dl)
| CCoFix (loc,id,dl) ->
CCoFix (loc,id,List.map (fun (id,bl,t,d) ->
let (e',bl') = map_local_binders f g e bl in
let t' = f e' t in
let e'' = List.fold_left (fun e ((_,id),_,_,_) -> g id e) e' dl in
let d' = f e'' d in
(id,bl',t',d')) dl)
Used in
let rec replace_vars_constr_expr l = function
| CRef (Ident (loc,id)) as x ->
(try CRef (Ident (loc,List.assoc id l)) with Not_found -> x)
| c -> map_constr_expr_with_binders List.remove_assoc
replace_vars_constr_expr l c
type with_declaration_ast =
| CWith_Module of identifier list located * qualid located
| CWith_Definition of identifier list located * constr_expr
type module_ast =
| CMident of qualid located
| CMapply of loc * module_ast * module_ast
| CMwith of loc * module_ast * with_declaration_ast
let locs_of_notation loc locs ntn =
let (bl,el) = Util.unloc loc in
let locs = List.map Util.unloc locs in
let rec aux pos = function
| [] -> if pos = el then [] else [(pos,el-1)]
| (ba,ea)::l ->if pos = ba then aux ea l else (pos,ba-1)::aux ea l
in aux bl (Sort.list (fun l1 l2 -> fst l1 < fst l2) locs)
let ntn_loc loc (args,argslist,binderslist) =
locs_of_notation loc
(List.map constr_loc ( argslist)@
List.map local_binders_loc binderslist)
let patntn_loc loc (args,argslist) =
locs_of_notation loc
(List.map cases_pattern_expr_loc ( argslist))
|
fe30dcb623dbe995c1905f16bc7eaba2491229814aaab8cfb57d1a521d4889be | ilya-klyuchnikov/lambdapi | Parser.hs | module LambdaPi.Parser where
import Data.List
import Text.ParserCombinators.Parsec hiding (parse, State)
import qualified Text.ParserCombinators.Parsec as P
import Text.ParserCombinators.Parsec.Token
import Text.ParserCombinators.Parsec.Language
import Common
import LambdaPi.AST
lambdaPi = makeTokenParser (haskellStyle { identStart = letter <|> P.char '_',
reservedNames = ["forall", "let", "assume", "putStrLn", "out"] })
parseStmt_ :: [String] -> CharParser () (Stmt ITerm_ CTerm_)
parseStmt_ e =
do
reserved lambdaPi "let"
x <- identifier lambdaPi
reserved lambdaPi "="
t <- parseITerm_ 0 e
return (Let x t)
<|> do
reserved lambdaPi "assume"
(xs, ts) <- parseBindings_ False []
return (Assume (reverse (zip xs ts)))
<|> do
reserved lambdaPi "putStrLn"
x <- stringLiteral lambdaPi
return (PutStrLn x)
<|> do
reserved lambdaPi "out"
x <- option "" (stringLiteral lambdaPi)
return (Out x)
<|> fmap Eval (parseITerm_ 0 e)
parseBindings_ :: Bool -> [String] -> CharParser () ([String], [CTerm_])
parseBindings_ b e =
(let rec :: [String] -> [CTerm_] -> CharParser () ([String], [CTerm_])
rec e ts =
do
(x,t) <- parens lambdaPi
(do
x <- identifier lambdaPi
reserved lambdaPi "::"
t <- parseCTerm_ 0 (if b then e else [])
return (x,t))
(rec (x : e) (t : ts) <|> return (x : e, t : ts))
in rec e [])
<|>
do x <- identifier lambdaPi
reserved lambdaPi "::"
t <- parseCTerm_ 0 e
return (x : e, [t])
parseITerm_ :: Int -> [String] -> CharParser () ITerm_
parseITerm_ 0 e =
do
reserved lambdaPi "forall"
(fe,t:ts) <- parseBindings_ True e
reserved lambdaPi "."
t' <- parseCTerm_ 0 fe
return (foldl (\ p t -> Pi_ t (Inf_ p)) (Pi_ t t') ts)
<|>
try
(do
t <- parseITerm_ 1 e
rest (Inf_ t) <|> return t)
<|> do
t <- parens lambdaPi (parseLam_ e)
rest t
where
rest t =
do
reserved lambdaPi "->"
t' <- parseCTerm_ 0 ([]:e)
return (Pi_ t t')
parseITerm_ 1 e =
try
(do
t <- parseITerm_ 2 e
rest (Inf_ t) <|> return t)
<|> do
t <- parens lambdaPi (parseLam_ e)
rest t
where
rest t =
do
reserved lambdaPi "::"
t' <- parseCTerm_ 0 e
return (Ann_ t t')
parseITerm_ 2 e =
do
t <- parseITerm_ 3 e
ts <- many (parseCTerm_ 3 e)
return (foldl (:$:) t ts)
parseITerm_ 3 e =
do
reserved lambdaPi "*"
return Star_
<|> do
n <- natural lambdaPi
return (toNat_ n)
<|> do
x <- identifier lambdaPi
case findIndex (== x) e of
Just n -> return (Bound_ n)
Nothing -> return (Free_ (Global x))
<|> parens lambdaPi (parseITerm_ 0 e)
parseCTerm_ :: Int -> [String] -> CharParser () CTerm_
parseCTerm_ 0 e =
parseLam_ e
<|> fmap Inf_ (parseITerm_ 0 e)
parseCTerm_ p e =
try (parens lambdaPi (parseLam_ e))
<|> fmap Inf_ (parseITerm_ p e)
parseLam_ :: [String] -> CharParser () CTerm_
parseLam_ e =
do reservedOp lambdaPi "\\"
xs <- many1 (identifier lambdaPi)
reservedOp lambdaPi "->"
t <- parseCTerm_ 0 (reverse xs ++ e)
-- reserved lambdaPi "."
return (iterate Lam_ t !! length xs)
toNat_ :: Integer -> ITerm_
toNat_ n = Ann_ (toNat_' n) (Inf_ Nat_)
toNat_' :: Integer -> CTerm_
toNat_' 0 = Zero_
toNat_' n = Succ_ (toNat_' (n - 1)) | null | https://raw.githubusercontent.com/ilya-klyuchnikov/lambdapi/79ddf21581e03ea34a94cc00ffd5c8684d845ed9/src/LambdaPi/Parser.hs | haskell | reserved lambdaPi "." | module LambdaPi.Parser where
import Data.List
import Text.ParserCombinators.Parsec hiding (parse, State)
import qualified Text.ParserCombinators.Parsec as P
import Text.ParserCombinators.Parsec.Token
import Text.ParserCombinators.Parsec.Language
import Common
import LambdaPi.AST
lambdaPi = makeTokenParser (haskellStyle { identStart = letter <|> P.char '_',
reservedNames = ["forall", "let", "assume", "putStrLn", "out"] })
parseStmt_ :: [String] -> CharParser () (Stmt ITerm_ CTerm_)
parseStmt_ e =
do
reserved lambdaPi "let"
x <- identifier lambdaPi
reserved lambdaPi "="
t <- parseITerm_ 0 e
return (Let x t)
<|> do
reserved lambdaPi "assume"
(xs, ts) <- parseBindings_ False []
return (Assume (reverse (zip xs ts)))
<|> do
reserved lambdaPi "putStrLn"
x <- stringLiteral lambdaPi
return (PutStrLn x)
<|> do
reserved lambdaPi "out"
x <- option "" (stringLiteral lambdaPi)
return (Out x)
<|> fmap Eval (parseITerm_ 0 e)
parseBindings_ :: Bool -> [String] -> CharParser () ([String], [CTerm_])
parseBindings_ b e =
(let rec :: [String] -> [CTerm_] -> CharParser () ([String], [CTerm_])
rec e ts =
do
(x,t) <- parens lambdaPi
(do
x <- identifier lambdaPi
reserved lambdaPi "::"
t <- parseCTerm_ 0 (if b then e else [])
return (x,t))
(rec (x : e) (t : ts) <|> return (x : e, t : ts))
in rec e [])
<|>
do x <- identifier lambdaPi
reserved lambdaPi "::"
t <- parseCTerm_ 0 e
return (x : e, [t])
parseITerm_ :: Int -> [String] -> CharParser () ITerm_
parseITerm_ 0 e =
do
reserved lambdaPi "forall"
(fe,t:ts) <- parseBindings_ True e
reserved lambdaPi "."
t' <- parseCTerm_ 0 fe
return (foldl (\ p t -> Pi_ t (Inf_ p)) (Pi_ t t') ts)
<|>
try
(do
t <- parseITerm_ 1 e
rest (Inf_ t) <|> return t)
<|> do
t <- parens lambdaPi (parseLam_ e)
rest t
where
rest t =
do
reserved lambdaPi "->"
t' <- parseCTerm_ 0 ([]:e)
return (Pi_ t t')
parseITerm_ 1 e =
try
(do
t <- parseITerm_ 2 e
rest (Inf_ t) <|> return t)
<|> do
t <- parens lambdaPi (parseLam_ e)
rest t
where
rest t =
do
reserved lambdaPi "::"
t' <- parseCTerm_ 0 e
return (Ann_ t t')
parseITerm_ 2 e =
do
t <- parseITerm_ 3 e
ts <- many (parseCTerm_ 3 e)
return (foldl (:$:) t ts)
parseITerm_ 3 e =
do
reserved lambdaPi "*"
return Star_
<|> do
n <- natural lambdaPi
return (toNat_ n)
<|> do
x <- identifier lambdaPi
case findIndex (== x) e of
Just n -> return (Bound_ n)
Nothing -> return (Free_ (Global x))
<|> parens lambdaPi (parseITerm_ 0 e)
parseCTerm_ :: Int -> [String] -> CharParser () CTerm_
parseCTerm_ 0 e =
parseLam_ e
<|> fmap Inf_ (parseITerm_ 0 e)
parseCTerm_ p e =
try (parens lambdaPi (parseLam_ e))
<|> fmap Inf_ (parseITerm_ p e)
parseLam_ :: [String] -> CharParser () CTerm_
parseLam_ e =
do reservedOp lambdaPi "\\"
xs <- many1 (identifier lambdaPi)
reservedOp lambdaPi "->"
t <- parseCTerm_ 0 (reverse xs ++ e)
return (iterate Lam_ t !! length xs)
toNat_ :: Integer -> ITerm_
toNat_ n = Ann_ (toNat_' n) (Inf_ Nat_)
toNat_' :: Integer -> CTerm_
toNat_' 0 = Zero_
toNat_' n = Succ_ (toNat_' (n - 1)) |
8ddc811f0a25492dc70386bcf72cc53e2365ca65fa53543d9318ec6e3837caa7 | GaloisInc/cryptol | TypeCheck.hs | -- |
Module : Cryptol .
Copyright : ( c ) 2013 - 2016 Galois , Inc.
-- License : BSD3
-- Maintainer :
-- Stability : provisional
-- Portability : portable
# LANGUAGE PatternGuards , OverloadedStrings #
module Cryptol.TypeCheck
( tcModule
, tcModuleInst
, tcExpr
, tcDecls
, InferInput(..)
, InferOutput(..)
, SolverConfig(..)
, defaultSolverConfig
, NameSeeds
, nameSeeds
, Error(..)
, Warning(..)
, ppWarning
, ppError
, WithNames(..)
, NameMap
, ppNamedWarning
, ppNamedError
) where
import Data.IORef(IORef,modifyIORef')
import Data.Map(Map)
import Cryptol.ModuleSystem.Name
(liftSupply,mkDeclared,NameSource(..),ModPath(..))
import Cryptol.ModuleSystem.NamingEnv(NamingEnv,namingEnvRename)
import qualified Cryptol.Parser.AST as P
import Cryptol.Parser.Position(Range,emptyRange)
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.Error
import Cryptol.TypeCheck.Monad
( runInferM
, InferInput(..)
, InferOutput(..)
, NameSeeds
, nameSeeds
, lookupVar
, newLocalScope, endLocalScope
, newModuleScope, addParamType, addParameterConstraints
, endModuleInstance
, io
)
import Cryptol.TypeCheck.Infer (inferModule, inferBinds, checkTopDecls)
import Cryptol.TypeCheck.InferTypes(VarType(..), SolverConfig(..), defaultSolverConfig)
import Cryptol.TypeCheck.Solve(proveModuleTopLevel)
import Cryptol.TypeCheck.CheckModuleInstance(checkModuleInstance)
import Cryptol . . Monad(withParamType , withParameterConstraints )
import Cryptol.TypeCheck.PP(WithNames(..),NameMap)
import Cryptol.Utils.Ident (exprModName,packIdent,Namespace(..))
import Cryptol.Utils.PP
import Cryptol.Utils.Panic(panic)
tcModule :: P.Module Name -> InferInput -> IO (InferOutput Module)
tcModule m inp = runInferM inp (inferModule m)
-- | Check a module instantiation, assuming that the functor has already
-- been checked.
-- XXX: This will change
tcModuleInst :: IORef NamingEnv {- ^ renaming environment of functor -} ->
Module {- ^ functor -} ->
P.Module Name {- ^ params -} ->
InferInput {- ^ TC settings -} ->
IO (InferOutput Module) {- ^ new version of instance -}
tcModuleInst renThis func m inp = runInferM inp $
do x <- inferModule m
newModuleScope (mName func) [] mempty
mapM_ addParamType (mParamTypes x)
addParameterConstraints (mParamConstraints x)
(ren,y) <- checkModuleInstance func x
io $ modifyIORef' renThis (namingEnvRename ren)
proveModuleTopLevel
endModuleInstance
pure y
tcExpr :: P.Expr Name -> InferInput -> IO (InferOutput (Expr,Schema))
tcExpr e0 inp = runInferM inp
$ do x <- go emptyRange e0
proveModuleTopLevel
return x
where
go loc expr =
case expr of
P.ELocated e loc' ->
do (te, sch) <- go loc' e
pure $! if inpCallStacks inp then (ELocated loc' te, sch) else (te,sch)
P.EVar x ->
do res <- lookupVar x
case res of
ExtVar s -> return (EVar x, s)
CurSCC e' t -> panic "Cryptol.TypeCheck.tcExpr"
[ "CurSCC outside binder checking:"
, show e'
, show t
]
_ -> do fresh <- liftSupply $
mkDeclared NSValue (TopModule exprModName) SystemName
(packIdent "(expression)") Nothing loc
res <- inferBinds True False
[ P.Bind
{ P.bName = P.Located { P.srcRange = loc, P.thing = fresh }
, P.bParams = []
, P.bDef = P.Located (inpRange inp) (P.DExpr expr)
, P.bPragmas = []
, P.bSignature = Nothing
, P.bMono = False
, P.bInfix = False
, P.bFixity = Nothing
, P.bDoc = Nothing
, P.bExport = Public
} ]
case res of
[d] | DExpr e <- dDefinition d -> return (e, dSignature d)
| otherwise ->
panic "Cryptol.TypeCheck.tcExpr"
[ "Expected an expression in definition"
, show d ]
_ -> panic "Cryptol.TypeCheck.tcExpr"
( "Multiple declarations when check expression:"
: map show res
)
tcDecls :: [P.TopDecl Name] -> InferInput -> IO (InferOutput ([DeclGroup],Map Name TySyn))
tcDecls ds inp = runInferM inp $
do newLocalScope
checkTopDecls ds
proveModuleTopLevel
endLocalScope
ppWarning :: (Range,Warning) -> Doc
ppWarning (r,w) = nest 2 (text "[warning] at" <+> pp r <.> colon $$ pp w)
ppError :: (Range,Error) -> Doc
ppError (r,w) = nest 2 (text "[error] at" <+> pp r <.> colon $$ pp w)
ppNamedWarning :: NameMap -> (Range,Warning) -> Doc
ppNamedWarning nm (r,w) =
nest 2 (text "[warning] at" <+> pp r <.> colon $$ pp (WithNames w nm))
ppNamedError :: NameMap -> (Range,Error) -> Doc
ppNamedError nm (r,e) =
nest 2 (text "[error] at" <+> pp r <.> colon $$ pp (WithNames e nm))
| null | https://raw.githubusercontent.com/GaloisInc/cryptol/8cca24568ad499f06032c2e4eaa7dfd4c542efb6/src/Cryptol/TypeCheck.hs | haskell | |
License : BSD3
Maintainer :
Stability : provisional
Portability : portable
| Check a module instantiation, assuming that the functor has already
been checked.
XXX: This will change
^ renaming environment of functor
^ functor
^ params
^ TC settings
^ new version of instance | Module : Cryptol .
Copyright : ( c ) 2013 - 2016 Galois , Inc.
# LANGUAGE PatternGuards , OverloadedStrings #
module Cryptol.TypeCheck
( tcModule
, tcModuleInst
, tcExpr
, tcDecls
, InferInput(..)
, InferOutput(..)
, SolverConfig(..)
, defaultSolverConfig
, NameSeeds
, nameSeeds
, Error(..)
, Warning(..)
, ppWarning
, ppError
, WithNames(..)
, NameMap
, ppNamedWarning
, ppNamedError
) where
import Data.IORef(IORef,modifyIORef')
import Data.Map(Map)
import Cryptol.ModuleSystem.Name
(liftSupply,mkDeclared,NameSource(..),ModPath(..))
import Cryptol.ModuleSystem.NamingEnv(NamingEnv,namingEnvRename)
import qualified Cryptol.Parser.AST as P
import Cryptol.Parser.Position(Range,emptyRange)
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.Error
import Cryptol.TypeCheck.Monad
( runInferM
, InferInput(..)
, InferOutput(..)
, NameSeeds
, nameSeeds
, lookupVar
, newLocalScope, endLocalScope
, newModuleScope, addParamType, addParameterConstraints
, endModuleInstance
, io
)
import Cryptol.TypeCheck.Infer (inferModule, inferBinds, checkTopDecls)
import Cryptol.TypeCheck.InferTypes(VarType(..), SolverConfig(..), defaultSolverConfig)
import Cryptol.TypeCheck.Solve(proveModuleTopLevel)
import Cryptol.TypeCheck.CheckModuleInstance(checkModuleInstance)
import Cryptol . . Monad(withParamType , withParameterConstraints )
import Cryptol.TypeCheck.PP(WithNames(..),NameMap)
import Cryptol.Utils.Ident (exprModName,packIdent,Namespace(..))
import Cryptol.Utils.PP
import Cryptol.Utils.Panic(panic)
tcModule :: P.Module Name -> InferInput -> IO (InferOutput Module)
tcModule m inp = runInferM inp (inferModule m)
tcModuleInst renThis func m inp = runInferM inp $
do x <- inferModule m
newModuleScope (mName func) [] mempty
mapM_ addParamType (mParamTypes x)
addParameterConstraints (mParamConstraints x)
(ren,y) <- checkModuleInstance func x
io $ modifyIORef' renThis (namingEnvRename ren)
proveModuleTopLevel
endModuleInstance
pure y
tcExpr :: P.Expr Name -> InferInput -> IO (InferOutput (Expr,Schema))
tcExpr e0 inp = runInferM inp
$ do x <- go emptyRange e0
proveModuleTopLevel
return x
where
go loc expr =
case expr of
P.ELocated e loc' ->
do (te, sch) <- go loc' e
pure $! if inpCallStacks inp then (ELocated loc' te, sch) else (te,sch)
P.EVar x ->
do res <- lookupVar x
case res of
ExtVar s -> return (EVar x, s)
CurSCC e' t -> panic "Cryptol.TypeCheck.tcExpr"
[ "CurSCC outside binder checking:"
, show e'
, show t
]
_ -> do fresh <- liftSupply $
mkDeclared NSValue (TopModule exprModName) SystemName
(packIdent "(expression)") Nothing loc
res <- inferBinds True False
[ P.Bind
{ P.bName = P.Located { P.srcRange = loc, P.thing = fresh }
, P.bParams = []
, P.bDef = P.Located (inpRange inp) (P.DExpr expr)
, P.bPragmas = []
, P.bSignature = Nothing
, P.bMono = False
, P.bInfix = False
, P.bFixity = Nothing
, P.bDoc = Nothing
, P.bExport = Public
} ]
case res of
[d] | DExpr e <- dDefinition d -> return (e, dSignature d)
| otherwise ->
panic "Cryptol.TypeCheck.tcExpr"
[ "Expected an expression in definition"
, show d ]
_ -> panic "Cryptol.TypeCheck.tcExpr"
( "Multiple declarations when check expression:"
: map show res
)
tcDecls :: [P.TopDecl Name] -> InferInput -> IO (InferOutput ([DeclGroup],Map Name TySyn))
tcDecls ds inp = runInferM inp $
do newLocalScope
checkTopDecls ds
proveModuleTopLevel
endLocalScope
ppWarning :: (Range,Warning) -> Doc
ppWarning (r,w) = nest 2 (text "[warning] at" <+> pp r <.> colon $$ pp w)
ppError :: (Range,Error) -> Doc
ppError (r,w) = nest 2 (text "[error] at" <+> pp r <.> colon $$ pp w)
ppNamedWarning :: NameMap -> (Range,Warning) -> Doc
ppNamedWarning nm (r,w) =
nest 2 (text "[warning] at" <+> pp r <.> colon $$ pp (WithNames w nm))
ppNamedError :: NameMap -> (Range,Error) -> Doc
ppNamedError nm (r,e) =
nest 2 (text "[error] at" <+> pp r <.> colon $$ pp (WithNames e nm))
|
f365f443ac4588bf6dda863d5ab94b1dc03f69497466ad98d25972f25f830d1b | reflectionalist/S9fES | sublist.scm | Scheme 9 from Empty Space , Function Library
By , 2010
; Placed in the Public Domain
;
( sublist list ) = = > list
;
; Return a fresh list formed from the members of LIST beginning with
index INTEGER1 ( inclusive ) and ending with index INTEGER2 ( exclusive ) .
;
Example : ( sublist ' ( a b c d e ) 2 4 ) = = > ( c d )
( sublist ' ( a b c d e ) 2 2 ) = = > ( )
(define (sublist x p0 pn)
(let ((k (length x)))
(cond ((<= 0 p0 pn k)
(do ((i p0 (+ 1 i))
(in (list-tail x p0) (cdr in))
(out '() (cons (car in) out)))
((= i pn)
(reverse! out))))
(else
(error "sublist: bad range" (list p0 pn))))))
| null | https://raw.githubusercontent.com/reflectionalist/S9fES/0ade11593cf35f112e197026886fc819042058dd/lib/sublist.scm | scheme | Placed in the Public Domain
Return a fresh list formed from the members of LIST beginning with
| Scheme 9 from Empty Space , Function Library
By , 2010
( sublist list ) = = > list
index INTEGER1 ( inclusive ) and ending with index INTEGER2 ( exclusive ) .
Example : ( sublist ' ( a b c d e ) 2 4 ) = = > ( c d )
( sublist ' ( a b c d e ) 2 2 ) = = > ( )
(define (sublist x p0 pn)
(let ((k (length x)))
(cond ((<= 0 p0 pn k)
(do ((i p0 (+ 1 i))
(in (list-tail x p0) (cdr in))
(out '() (cons (car in) out)))
((= i pn)
(reverse! out))))
(else
(error "sublist: bad range" (list p0 pn))))))
|
51978aca2f6af0ef2659b2be271f1332165a7d7ce6715b5511f6de0858ef813e | mfoemmel/erlang-otp | wxAuiDockArt.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxAuiDockArt</a>.
%% @type wxAuiDockArt(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxAuiDockArt).
-include("wxe.hrl").
-export([]).
%% inherited exports
-export([parent_class/1]).
%% @hidden
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxAuiDockArt.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxAuiDockArt</a>.
@type wxAuiDockArt(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(wxAuiDockArt).
-include("wxe.hrl").
-export([]).
-export([parent_class/1]).
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
|
5a4f57e8c062821b03c9c703e3e8351cec2543dce3c04bcc3af20f9e94278209 | racket/racket7 | for-body.rkt | #lang racket/base
(require (for-template racket/private/for))
(provide split-for-body)
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/collects/syntax/for-body.rkt | racket | #lang racket/base
(require (for-template racket/private/for))
(provide split-for-body)
|
|
eaff29fac1bfe336070f912d99753df4006bd86994dc584ec2f029617b633a13 | scalaris-team/scalaris | l_on_cseq_SUITE.erl | 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
@doc Unit tests for l_on_cseq
%% @end
-module(l_on_cseq_SUITE).
-author('').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
-dialyzer({[no_opaque, no_return],
[test_renew_with_concurrent_range_change/1,
test_renew_with_concurrent_aux_change_invalid_split/1,
test_renew_with_concurrent_aux_change_valid_split/1,
test_renew_with_concurrent_aux_change_invalid_merge/1,
test_renew_with_concurrent_aux_change_valid_merge/1,
test_handover_with_concurrent_aux_change/1]}).
groups() ->
[{tester_tests, [sequence], [
tester_type_check_l_on_cseq
]},
{renew_tests, [sequence], [
test_renew_with_concurrent_renew,
%test_renew_with_concurrent_owner_change,
test_renew_with_concurrent_range_change,
test_renew_with_concurrent_aux_change_invalid_split,
test_renew_with_concurrent_aux_change_valid_split,
test_renew_with_concurrent_aux_change_invalid_merge,
test_renew_with_concurrent_aux_change_valid_merge
]},
{split_tests, [sequence], [
test_split,
test_split_with_concurrent_renew,
test_split_but_lease_already_exists,
%test_split_with_owner_change_in_step1,
%test_split_with_owner_change_in_step2,
%test_split_with_owner_change_in_step3,
test_split_with_aux_change_in_step1
]},
{merge_tests, [sequence], [
]}, % @todo
{takeover_tests, [sequence], [
test_takeover
]},
{handover_tests, [sequence], [
test_handover,
test_handover_with_concurrent_renew,
test_handover_with_concurrent_aux_change%,
%test_handover_with_concurrent_owner_change
]}
].
all() ->
[
{group, tester_tests},
{group, renew_tests},
{group, split_tests},
{group, handover_tests},
{group, takeover_tests}
].
suite() -> [ {timetrap, {seconds, 180}} ].
group(tester_tests) ->
[{timetrap, {seconds, 400}}];
group(renew_tests) ->
[{timetrap, {seconds, 60}}];
group(split_tests) ->
[{timetrap, {seconds, 60}}];
group(takeover_tests) ->
[{timetrap, {seconds, 60}}];
group(handover_tests) ->
[{timetrap, {seconds, 60}}];
group(_) ->
suite().
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(Group, Config) -> unittest_helper:init_per_group(Group, Config).
end_per_group(Group, Config) -> unittest_helper:end_per_group(Group, Config).
init_per_testcase(TestCase, Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Config2 = unittest_helper:start_minimal_procs(Config, [], true),
RingSize = config:read(replication_factor),
Config3 = unittest_helper:stop_minimal_procs(Config2),
case TestCase of
test_garbage_collector ->
unittest_helper:make_ring(RingSize, [{config, [{log_path, PrivDir},
{leases, true}]}]),
unittest_helper:check_ring_size_fully_joined(RingSize),
ok;
_ ->
unittest_helper:make_ring(RingSize, [{config, [{log_path, PrivDir},
{leases, true}]}]),
unittest_helper:check_ring_size_fully_joined(RingSize),
ok
end,
[{stop_ring, true} | Config3].
end_per_testcase(_TestCase, _Config) ->
ok.
tester_type_check_l_on_cseq(_Config) ->
Count = 500,
config:write(no_print_ring_data, true),
tester:register_value_creator({typedef, prbr, write_filter, []},
prbr, tester_create_write_filter, 1),
%% [{modulename, [excludelist = {fun, arity}]}]
Modules =
[ {l_on_cseq,
can not create DB refs for State
{lease_renew, 2}, %% sends messages
{lease_renew, 3}, %% sends messages
{lease_handover, 3}, %% sends messages
{lease_takeover, 2}, %% sends messages
{lease_takeover_after, 3}, %% sends messages
{lease_split, 4}, %% sends messages
{lease_merge, 3}, %% sends messages
{lease_send_lease_to_node, 3}, %% sends messages
{lease_split_and_change_owner, 5}, %% sends messages
{id, 1}, %% todo
{split_range, 1}, %% todo
{unittest_lease_update, 4}, %% only for unittests
{unittest_lease_update_unsafe, 3}, %% only for unittests
{unittest_clear_lease_list, 1}, %% only for unittests
{disable_lease, 2}, %% requires dht_node_state
{on, 2}, %% cannot create dht_node_state (reference for bulkowner)
{get_pretty_timeout, 1}, %% cannot create valid timestamps
{read, 2} %% cannot create pids
],
[
can not create reference ( bulkowner uses one in dht_node_state
can not create reference ( bulkowner uses one in dht_node_state
can not create reference ( bulkowner uses one in dht_node_state
{format_utc_timestamp, 1} %% cannot create valid timestamps
]},
{lease_list,
[
{update_lease_in_dht_node_state, 4}, %% cannot create dht_node_state (reference for bulkowner)
{remove_lease_from_dht_node_state, 4}, %% cannot create dht_node_state (reference for bulkowner)
{get_next_round, 2}, %% cannot create dht_node_state (reference for bulkowner)
{update_next_round, 3} %% cannot create dht_node_state (reference for bulkowner)
],
[
{update_lease_in_dht_node_state, 3}, %% cannot create dht_node_state (reference for bulkowner)
{update_active_lease, 2}, %% assert fails for random input
{remove_next_round, 2}, %% cannot create dht_node_state (reference for bulkowner)
{remove_passive_lease_from_dht_node_state, 3}, %% cannot create dht_node_state (reference for bulkowner)
{remove_active_lease_from_dht_node_state, 3} %% cannot create dht_node_state (reference for bulkowner)
]},
{leases,
[
{is_responsible, 2} %% cannot create dht_node_state (reference for bulkowner)
],
[
]
}
],
%% join a dht_node group to be able to call lease trigger functions
pid_groups:join(pid_groups:group_with(dht_node)),
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
tester:unregister_value_creator({typedef, prbr, write_filter, []}),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% renew unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_renew_with_concurrent_renew(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_timeout(
l_on_cseq:set_version(Old, l_on_cseq:get_version(Old)+1))
end,
WaitF = fun wait_for_simple_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_owner_change(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
comm:this())
end,
WaitF = fun wait_for_delete/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_range_change(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_range(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
obfuscated_intervals_all())
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_invalid_split(_Config) ->
ModifyF =
fun(Old) ->
Aux = {invalid, split, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_valid_split(_Config) ->
ModifyF =
fun(Old) ->
Aux = {valid, split, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_invalid_merge(_Config) ->
ModifyF =
fun(Old) ->
Aux = {invalid, merge, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_valid_merge(_Config) ->
ModifyF =
fun(Old) ->
Aux = {valid, merge, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% split unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_split(_Config) ->
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
wait_for_lease_version(Id, OldEpoch+2, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 2, 0)
end,
FinalWaitF = fun wait_for_split_success_msg/0,
test_split_helper_for_4_steps(_Config,
NullF, NullF,NullF, NullF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_concurrent_renew(_Config) ->
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
RenewLeaseLeftF = fun (_Id, Lease, _DHTNode) ->
log:log("left renew lease with ~w ~w", [_Id, Lease]),
l_on_cseq:lease_renew(Lease, passive),
wait_for_lease_version(l_on_cseq:get_id(Lease),
l_on_cseq:get_epoch(Lease),
l_on_cseq:get_version(Lease)+1)
end,
RenewLeaseRightF = fun (_Id, Lease, _DHTNode) ->
log:log("right renew lease with ~w ~w", [_Id, Lease]),
l_on_cseq:lease_renew(Lease, active),
wait_for_lease_version(l_on_cseq:get_id(Lease),
l_on_cseq:get_epoch(Lease),
l_on_cseq:get_version(Lease)+1)
end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
wait_for_lease_version(Id, OldEpoch+2, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 2, 0)
end,
FinalWaitF = fun wait_for_split_success_msg/0,
test_split_helper_for_4_steps(_Config,
NullF, NullF, RenewLeaseLeftF, RenewLeaseRightF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_but_lease_already_exists(_Config) ->
ContentCheck =
fun (Current, _WriteFilter, _Next) ->
case Current == prbr_bottom of
true ->
{true, null};
false ->
{false, lease_already_exists}
end
end,
CreateLeaseF =
fun(LeftId) ->
New = l_on_cseq:set_version(
l_on_cseq:set_epoch(
l_on_cseq:unittest_create_lease(LeftId),
47),
11),
DB = rbrcseq:get_db_for_id(lease_db, LeftId),
rbrcseq:qwrite(DB, self(), LeftId, l_on_cseq,
ContentCheck,
New),
receive
{qwrite_done, _ReqId, _Round, _, _} -> ok;
X -> ct:pal("wrong message ~p", [X]),
timer:sleep(4000)
end
end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
OldVersion = l_on_cseq:get_version(Lease),
wait_for_lease_version(Id, OldEpoch, OldVersion)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 47, 11)
end,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_1_step(_Config,
CreateLeaseF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step1(_Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, active, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun wait_for_delete/2,
WaitLeftLeaseF = fun (_Id) -> ok end,
% we cannot read the left lease anymore, because
% consistent routing will prevent the delivery of
% messages
%fun (Id) ->
wait_for_lease_version(Id , 1 , 0 )
%end,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_2_steps(_Config,
NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step2(Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, passive, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (_Id, _Lease) -> ok end,
% we cannot read the left lease anymore, because
% consistent routing will prevent the delivery of
% messages
%fun (Id, Lease) ->
% OldEpoch = l_on_cseq:get_epoch(Lease),
wait_for_lease_version(Id , OldEpoch + 1 , 0 )
%end,
WaitLeftLeaseF = fun wait_for_delete/1,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_3_steps(Config,
NullF, NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step3(Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, active, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitLeftLeaseF = fun (_Id) -> ok end,
% we cannot read the left lease anymore, because
% consistent routing will prevent the delivery of
% messages
%fun (Id) ->
wait_for_lease_version(Id , 2 , 0 )
%end,
WaitRightLeaseF = fun wait_for_delete/2,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_4_steps(Config,
NullF, NullF, NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_aux_change_in_step1(_Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing aux: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
{invalid, merge, intervals:empty(), intervals:empty()}),
l_on_cseq:unittest_lease_update(Lease, New, passive, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (Id, Lease) ->
wait_for_lease_version(Id, l_on_cseq:get_epoch(Lease) + 1, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 1, 0)
end,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_2_steps(_Config,
NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% handover unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_handover(_Config) ->
ModifyF = fun(Old) -> Old end,
WaitF = fun (Id, _Lease) ->
wait_for_lease_owner(Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_renew(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)
end,
WaitF = fun (Id, _Lease) ->
wait_for_lease_owner(Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_aux_change(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_aux(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0),
{valid, merge, foo, bar})
end,
WaitF = fun (_Id, _Lease) ->
receive
{handover, failed, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_owner_change(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_owner(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0),
comm:this())
end,
WaitF = fun (_Id, _Lease) ->
receive
{handover, failed, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% takeover unit tests
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_takeover(_Config) ->
log:log("start test_takeover"),
ModifyF = fun(Old) -> Old end,
WaitF = fun (Id, _Lease, OriginalOwner) ->
ct:pal("takeover: wait_for_lease_owner ~p", [OriginalOwner]),
wait_for_lease_owner(Id, OriginalOwner),
ct:pal("takeover: wait_for_lease_owner done")
end,
test_takeover_helper(_Config, ModifyF, WaitF),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% takeover helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_takeover_helper(_Config, ModifyF, WaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
% intercept lease renew
{l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
OriginalOwner = l_on_cseq : get_owner(Old ) ,
ct:pal("takeover: old lease ~p", [Old]),
Id = l_on_cseq:get_id(Old),
% now we change the owner of the lease
l_on_cseq:lease_handover(Old, comm:this(), self()),
ct:pal("new owner ~p", [comm:this()]),
HandoverWaitF = fun (_Id, _Lease) ->
wait_for_lease_owner(_Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
HandoverWaitF(Id, Old),
ct:pal("takeover: now we update the lease"),
% now we update the lease
{ok, Current} = l_on_cseq:read(Id),
ct:pal("takeover: current lease: ~p", [Current]),
New = ModifyF(Current),
case New =/= Current of
true ->
Res = l_on_cseq:unittest_lease_update(Current, New, active, DHTNode),
ct:pal("takeover: lease_update: ~p (~p -> ~p)", [Res, Current, New]),
wait_for_lease(New);
false ->
ok
end,
ct:pal("takeover: takeover"),
now the error handling of lease_takeover is going to be tested
takeover_loop(Current),
ct:pal("takeover: wait_for_lease2"),
WaitF(Id, Current, comm:make_global(DHTNode)),
ct:pal("takeover: done"),
true.
takeover_loop(L) ->
l_on_cseq:lease_takeover(L, self()),
M = receive
{takeover, _ , _} = _M -> _M;
{takeover, _ , _, _} = _M -> _M
end,
case M of
{takeover, success, L2} ->
ct:pal("takeover succeed ~w", [L2]),
ok;
{takeover, failed, L2, _Result} ->
ct:pal("retrying takeover ~p ~p", [L2, l_on_cseq:get_pretty_timeout(L2)]),
%% we repeat until the lease expired and then hopefully succeed
timer:sleep(500),
takeover_loop(L2)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% handover helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_handover_helper(_Config, ModifyF, WaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
% intercept lease renew
{l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
Id = l_on_cseq:get_id(Old),
% now we update the lease
New = ModifyF(Old),
l_on_cseq:unittest_lease_update(Old, New, active, DHTNode),
wait_for_lease(New),
% now the error handling of lease_handover is going to be tested
l_on_cseq:lease_handover(Old, comm:this(), self()),
WaitF(Id, Old),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% split helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_split_prepare(DHTNode) ->
% intercept lease renew
{l_on_cseq, renew, _Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
% prepeare split
comm:send_local(DHTNode, {get_state, comm:this(), lease_list}),
L = receive
{get_state_response, LeaseList} ->
lease_list:get_active_lease(LeaseList)
end,
{ok, R1, R2} = l_on_cseq:split_range(l_on_cseq:get_range(L)),
log:log("split under test:~n~w~n~w~n~w~n", [l_on_cseq:get_range(L), R1, R2]),
[ R1 , R2 ] = intervals : split(l_on_cseq : get_range(L ) , 2 ) ,
LeftId = l_on_cseq:id(R1),
RightId = l_on_cseq:id(R2),
intercept_split_request(DHTNode), % install intercepts
intercept_split_reply(DHTNode, split_reply_step1), %
intercept_split_reply(DHTNode, split_reply_step2), %
intercept_split_reply(DHTNode, split_reply_step3), %
intercept_split_reply(DHTNode, split_reply_step4), %
step1
log:log("starting the split under test"),
l_on_cseq:lease_split(L, R1, R2, self()), % trigger step
ct:pal("intercepting msg"),
StartMsg = receive % intercept msg
M = {l_on_cseq, split, _Lease, __R1, __R2,
__ReplyTo, __PostAux} ->
M
end,
ct:pal("intercepted msg"),
{l_on_cseq, split, Lease, _R1, _R2, _ReplyTo, _PostAux} = StartMsg,
{Lease, LeftId, RightId, StartMsg}.
test_split_helper_for_1_step(_Config,
ModifyBeforeStep1,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
ModifyBeforeStep1(LeftId), % modify world
gen_component:bp_del(DHTNode, block_split_request),
gen_component:bp_del(DHTNode, split_reply_step1),
comm:send_local(DHTNode, StartMsg), % release msg
% wait for result
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_2_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
ct:pal("0"),
ModifyBeforeStep1(LeftId, Lease, DHTNode), % modify world
gen_component:bp_del(DHTNode, block_split_request),
comm:send_local(DHTNode, StartMsg), % release msg
step 2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, RightId),
wait_for_split_message(DHTNode, split_reply_step2),
% wait for result
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_3_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
ModifyBeforeStep3,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
ModifyBeforeStep1(RightId, Lease, DHTNode), % modify world
gen_component:bp_del(DHTNode, block_split_request),
comm:send_local(DHTNode, StartMsg), % release msg
step 2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, LeftId),
step 3
split_helper_do_step(DHTNode, split_reply_step2, ModifyBeforeStep3, LeftId),
wait_for_split_message(DHTNode, split_reply_step3),
% wait for result
log:pal("wait left"),
WaitLeftLease(LeftId),
log:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_4_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
ModifyBeforeStep3,
ModifyBeforeStep4,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
log:log("left and right-id:~w~n~w~n", [LeftId, RightId]),
ModifyBeforeStep1(RightId, Lease, DHTNode), % modify world
gen_component:bp_del(DHTNode, block_split_request),
comm:send_local(DHTNode, StartMsg), % release msg
step2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, LeftId),
log:log("finished step2"),
step3
split_helper_do_step(DHTNode, split_reply_step2, ModifyBeforeStep3, RightId),
log:log("finished step3"),
% step4
split_helper_do_step(DHTNode, split_reply_step3, ModifyBeforeStep4, LeftId),
log:log("finished step4"),
wait_for_split_message(DHTNode, split_reply_step4),
log:log("got split message"),
% wait for result
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
split_helper_do_step(DHTNode, StepTag, ModifyBeforeStep, Id) ->
log:pal("doing ~p", [StepTag]),
ReplyMsg = receive
M = {l_on_cseq, StepTag, Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
M
end,
ModifyBeforeStep(Id, Lease, DHTNode),
gen_component:bp_del(DHTNode, StepTag),
watch_message(DHTNode, ReplyMsg).
wait_for_split_message(DHTNode, StepTag) ->
log:pal("waiting for ~p", [StepTag]),
receive
M = {l_on_cseq, StepTag, _Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
%log:pal("got ~p", [M]),
gen_component:bp_del(DHTNode, StepTag),
watch_message(DHTNode, M)
end.
wait_for_split_success_msg() ->
log:pal("wait_for_split_success_msg() ~p", [self()]),
receive
{split, success, _, _} ->
ok
end.
wait_for_split_fail_msg() ->
receive
{split, fail, _} ->
ok
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% renew helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
test_renew_helper(_Config, ModifyF, WaitF) ->
%% pid_groups:join(pid_groups:group_with(dht_node)),
DHTNode = pid_groups:find_a(dht_node),
% intercept lease renew
M = {l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
Id = l_on_cseq:get_id(Old),
% now we update the lease
New = ModifyF(Old),
l_on_cseq:unittest_lease_update(Old, New, active, DHTNode),
wait_for_lease(New),
now the error handling of is going to be tested
comm:send_local(DHTNode, M),
WaitF(Id, Old),
true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% wait helper
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
wait_for(F) ->
case F() of
true ->
ok;
false ->
wait_for(F)
end.
wait_for_lease(Lease) ->
Id = l_on_cseq:get_id(Lease),
wait_for_lease_helper(Id, fun (L) -> L == Lease end).
wait_for_lease_version(Id, Epoch, Version) ->
ct:pal("wait_for_lease_version ~p", [Id]),
wait_for_lease_helper(Id,
fun (Lease) ->
ct:pal("want ~p:~p; have ~p:~p", [Epoch, Version, l_on_cseq:get_epoch(Lease), l_on_cseq:get_version(Lease)]),
Epoch == l_on_cseq:get_epoch(Lease)
andalso Version == l_on_cseq:get_version(Lease)
end).
wait_for_lease_owner(Id, NewOwner) ->
wait_for_lease_helper(Id,
fun (Lease) ->
NewOwner == l_on_cseq:get_owner(Lease)
end).
wait_for_lease_helper(Id, F) ->
wait_for(fun () ->
%DHTNode = pid_groups:find_a(dht_node),
comm : , { get_state , comm : this ( ) , lease_list } ) ,
%{A, P} = receive
{ get_state_response , { ActiveList , PassiveList } } - >
{ ActiveList , PassiveList }
% end,
%ct:pal("~p ~p", [A, P]),
case l_on_cseq:read(Id) of
{ok, Lease} ->
F(Lease);
_ ->
false
end
end).
get_dht_node_state(Pid, What) ->
comm:send_local(Pid, {get_state, comm:this(), What}),
receive
{get_state_response, Data} ->
Data
end.
get_all_active_leases() ->
[ get_active_lease(DHTNode) || DHTNode <- pid_groups:find_all(dht_node) ].
get_active_lease(Pid) ->
LeaseList = get_dht_node_state(Pid, lease_list),
lease_list:get_active_lease(LeaseList).
wait_for_simple_update(Id, Old) ->
OldVersion = l_on_cseq:get_version(Old),
OldEpoch = l_on_cseq:get_epoch(Old),
wait_for_lease_version(Id, OldEpoch, OldVersion+1).
wait_for_epoch_update(Id, Old) ->
OldEpoch = l_on_cseq:get_epoch(Old),
wait_for_lease_version(Id, OldEpoch+1, 0).
wait_for_delete(Id, _Old) ->
DHTNode = pid_groups:find_a(dht_node),
ct:pal("wait_for_delete ~p", [Id]),
wait_for(fun () ->
LeaseList = get_dht_node_state(DHTNode, lease_list),
L = lease_list:get_active_lease(LeaseList),
case L of
empty ->
true;
_ ->
l_on_cseq:get_id(L) =/= Id
end
end).
wait_for_delete(Id) ->
ct:pal("wait_for_delete ~p", [Id]),
DHTNode = pid_groups:find_a(dht_node),
wait_for(fun () ->
LeaseList = get_dht_node_state(DHTNode, lease_list),
L = lease_list:get_active_lease(LeaseList),
case L of
empty ->
true;
_ ->
l_on_cseq:get_id(L) =/= Id
end
end).
wait_for_number_of_leases(Nr) ->
wait_for(fun() ->
length(get_all_active_leases()) == Nr
end).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% intercepting and blocking
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
watch_message(Pid, Message) ->
gen_component:bp_set_cond(Pid, block_message(self(), Message), watch_message),
comm:send_local(Pid, Message),
receive
{saw_message} ->
_ = gen_component:bp_step(Pid),
gen_component:bp_del(Pid, watch_message),
gen_component:bp_cont(Pid)
end.
intercept_split_request(DHTNode) ->
% we wait for the next periodic trigger
gen_component:bp_set_cond(DHTNode, block_split_request(self()), block_split_request).
intercept_split_reply(DHTNode, StepTag) ->
% we wait for the next periodic trigger
gen_component:bp_set_cond(DHTNode, block_split_reply(self(), StepTag), StepTag).
block_message(Pid, WatchedMessage) ->
fun (Message, _State) ->
case Message of
WatchedMessage ->
comm:send_local(Pid, {saw_message}),
true;
_ ->
false
end
end.
block_split_request(Pid) ->
fun (Message, _State) ->
case Message of
{l_on_cseq, split, _Lease, _R1, _R2, _ReplyTo, _PostAux} ->
comm:send_local(Pid, Message),
drop_single;
_ ->
false
end
end.
block_split_reply(Pid, StepTag) ->
fun (Message, _State) ->
case Message of
{l_on_cseq, StepTag, _Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
comm:send_local(Pid, Message),
drop_single;
_ ->
false
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% utility functions
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
obfuscated_intervals_all() ->
[{'(',0,5,']'},
{0},
{'(',5,340282366920938463463374607431768211456,')'}
].
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/test/l_on_cseq_SUITE.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@end
test_renew_with_concurrent_owner_change,
test_split_with_owner_change_in_step1,
test_split_with_owner_change_in_step2,
test_split_with_owner_change_in_step3,
@todo
,
test_handover_with_concurrent_owner_change
[{modulename, [excludelist = {fun, arity}]}]
sends messages
sends messages
sends messages
sends messages
sends messages
sends messages
sends messages
sends messages
sends messages
todo
todo
only for unittests
only for unittests
only for unittests
requires dht_node_state
cannot create dht_node_state (reference for bulkowner)
cannot create valid timestamps
cannot create pids
cannot create valid timestamps
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
assert fails for random input
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
cannot create dht_node_state (reference for bulkowner)
join a dht_node group to be able to call lease trigger functions
renew unit tests
split unit tests
we cannot read the left lease anymore, because
consistent routing will prevent the delivery of
messages
fun (Id) ->
end,
we cannot read the left lease anymore, because
consistent routing will prevent the delivery of
messages
fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
end,
we cannot read the left lease anymore, because
consistent routing will prevent the delivery of
messages
fun (Id) ->
end,
handover unit tests
takeover unit tests
takeover helper
intercept lease renew
now we change the owner of the lease
now we update the lease
we repeat until the lease expired and then hopefully succeed
handover helper
intercept lease renew
now we update the lease
now the error handling of lease_handover is going to be tested
split helper
intercept lease renew
prepeare split
install intercepts
trigger step
intercept msg
modify world
release msg
wait for result
modify world
release msg
wait for result
modify world
release msg
wait for result
modify world
release msg
step4
wait for result
log:pal("got ~p", [M]),
renew helper
pid_groups:join(pid_groups:group_with(dht_node)),
intercept lease renew
now we update the lease
wait helper
DHTNode = pid_groups:find_a(dht_node),
{A, P} = receive
end,
ct:pal("~p ~p", [A, P]),
intercepting and blocking
we wait for the next periodic trigger
we wait for the next periodic trigger
utility functions
| 2012 - 2016 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
@doc Unit tests for l_on_cseq
-module(l_on_cseq_SUITE).
-author('').
-compile(export_all).
-include("scalaris.hrl").
-include("unittest.hrl").
-include("client_types.hrl").
-dialyzer({[no_opaque, no_return],
[test_renew_with_concurrent_range_change/1,
test_renew_with_concurrent_aux_change_invalid_split/1,
test_renew_with_concurrent_aux_change_valid_split/1,
test_renew_with_concurrent_aux_change_invalid_merge/1,
test_renew_with_concurrent_aux_change_valid_merge/1,
test_handover_with_concurrent_aux_change/1]}).
groups() ->
[{tester_tests, [sequence], [
tester_type_check_l_on_cseq
]},
{renew_tests, [sequence], [
test_renew_with_concurrent_renew,
test_renew_with_concurrent_range_change,
test_renew_with_concurrent_aux_change_invalid_split,
test_renew_with_concurrent_aux_change_valid_split,
test_renew_with_concurrent_aux_change_invalid_merge,
test_renew_with_concurrent_aux_change_valid_merge
]},
{split_tests, [sequence], [
test_split,
test_split_with_concurrent_renew,
test_split_but_lease_already_exists,
test_split_with_aux_change_in_step1
]},
{merge_tests, [sequence], [
{takeover_tests, [sequence], [
test_takeover
]},
{handover_tests, [sequence], [
test_handover,
test_handover_with_concurrent_renew,
]}
].
all() ->
[
{group, tester_tests},
{group, renew_tests},
{group, split_tests},
{group, handover_tests},
{group, takeover_tests}
].
suite() -> [ {timetrap, {seconds, 180}} ].
group(tester_tests) ->
[{timetrap, {seconds, 400}}];
group(renew_tests) ->
[{timetrap, {seconds, 60}}];
group(split_tests) ->
[{timetrap, {seconds, 60}}];
group(takeover_tests) ->
[{timetrap, {seconds, 60}}];
group(handover_tests) ->
[{timetrap, {seconds, 60}}];
group(_) ->
suite().
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(Group, Config) -> unittest_helper:init_per_group(Group, Config).
end_per_group(Group, Config) -> unittest_helper:end_per_group(Group, Config).
init_per_testcase(TestCase, Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
Config2 = unittest_helper:start_minimal_procs(Config, [], true),
RingSize = config:read(replication_factor),
Config3 = unittest_helper:stop_minimal_procs(Config2),
case TestCase of
test_garbage_collector ->
unittest_helper:make_ring(RingSize, [{config, [{log_path, PrivDir},
{leases, true}]}]),
unittest_helper:check_ring_size_fully_joined(RingSize),
ok;
_ ->
unittest_helper:make_ring(RingSize, [{config, [{log_path, PrivDir},
{leases, true}]}]),
unittest_helper:check_ring_size_fully_joined(RingSize),
ok
end,
[{stop_ring, true} | Config3].
end_per_testcase(_TestCase, _Config) ->
ok.
tester_type_check_l_on_cseq(_Config) ->
Count = 500,
config:write(no_print_ring_data, true),
tester:register_value_creator({typedef, prbr, write_filter, []},
prbr, tester_create_write_filter, 1),
Modules =
[ {l_on_cseq,
can not create DB refs for State
],
[
can not create reference ( bulkowner uses one in dht_node_state
can not create reference ( bulkowner uses one in dht_node_state
can not create reference ( bulkowner uses one in dht_node_state
]},
{lease_list,
[
],
[
]},
{leases,
[
],
[
]
}
],
pid_groups:join(pid_groups:group_with(dht_node)),
_ = [ tester:type_check_module(Mod, Excl, ExclPriv, Count)
|| {Mod, Excl, ExclPriv} <- Modules ],
tester:unregister_value_creator({typedef, prbr, write_filter, []}),
true.
test_renew_with_concurrent_renew(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_timeout(
l_on_cseq:set_version(Old, l_on_cseq:get_version(Old)+1))
end,
WaitF = fun wait_for_simple_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_owner_change(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
comm:this())
end,
WaitF = fun wait_for_delete/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_range_change(_Config) ->
ModifyF =
fun(Old) ->
l_on_cseq:set_range(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
obfuscated_intervals_all())
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_invalid_split(_Config) ->
ModifyF =
fun(Old) ->
Aux = {invalid, split, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_valid_split(_Config) ->
ModifyF =
fun(Old) ->
Aux = {valid, split, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_invalid_merge(_Config) ->
ModifyF =
fun(Old) ->
Aux = {invalid, merge, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_renew_with_concurrent_aux_change_valid_merge(_Config) ->
ModifyF =
fun(Old) ->
Aux = {valid, merge, r1, r2},
l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)),
Aux)
end,
WaitF = fun wait_for_epoch_update/2,
test_renew_helper(_Config, ModifyF, WaitF),
true.
test_split(_Config) ->
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
wait_for_lease_version(Id, OldEpoch+2, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 2, 0)
end,
FinalWaitF = fun wait_for_split_success_msg/0,
test_split_helper_for_4_steps(_Config,
NullF, NullF,NullF, NullF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_concurrent_renew(_Config) ->
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
RenewLeaseLeftF = fun (_Id, Lease, _DHTNode) ->
log:log("left renew lease with ~w ~w", [_Id, Lease]),
l_on_cseq:lease_renew(Lease, passive),
wait_for_lease_version(l_on_cseq:get_id(Lease),
l_on_cseq:get_epoch(Lease),
l_on_cseq:get_version(Lease)+1)
end,
RenewLeaseRightF = fun (_Id, Lease, _DHTNode) ->
log:log("right renew lease with ~w ~w", [_Id, Lease]),
l_on_cseq:lease_renew(Lease, active),
wait_for_lease_version(l_on_cseq:get_id(Lease),
l_on_cseq:get_epoch(Lease),
l_on_cseq:get_version(Lease)+1)
end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
wait_for_lease_version(Id, OldEpoch+2, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 2, 0)
end,
FinalWaitF = fun wait_for_split_success_msg/0,
test_split_helper_for_4_steps(_Config,
NullF, NullF, RenewLeaseLeftF, RenewLeaseRightF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_but_lease_already_exists(_Config) ->
ContentCheck =
fun (Current, _WriteFilter, _Next) ->
case Current == prbr_bottom of
true ->
{true, null};
false ->
{false, lease_already_exists}
end
end,
CreateLeaseF =
fun(LeftId) ->
New = l_on_cseq:set_version(
l_on_cseq:set_epoch(
l_on_cseq:unittest_create_lease(LeftId),
47),
11),
DB = rbrcseq:get_db_for_id(lease_db, LeftId),
rbrcseq:qwrite(DB, self(), LeftId, l_on_cseq,
ContentCheck,
New),
receive
{qwrite_done, _ReqId, _Round, _, _} -> ok;
X -> ct:pal("wrong message ~p", [X]),
timer:sleep(4000)
end
end,
WaitRightLeaseF = fun (Id, Lease) ->
OldEpoch = l_on_cseq:get_epoch(Lease),
OldVersion = l_on_cseq:get_version(Lease),
wait_for_lease_version(Id, OldEpoch, OldVersion)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 47, 11)
end,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_1_step(_Config,
CreateLeaseF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step1(_Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, active, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun wait_for_delete/2,
WaitLeftLeaseF = fun (_Id) -> ok end,
wait_for_lease_version(Id , 1 , 0 )
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_2_steps(_Config,
NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step2(Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, passive, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (_Id, _Lease) -> ok end,
wait_for_lease_version(Id , OldEpoch + 1 , 0 )
WaitLeftLeaseF = fun wait_for_delete/1,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_3_steps(Config,
NullF, NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_owner_change_in_step3(Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing owner: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_owner(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
comm:this()),
l_on_cseq:unittest_lease_update(Lease, New, active, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitLeftLeaseF = fun (_Id) -> ok end,
wait_for_lease_version(Id , 2 , 0 )
WaitRightLeaseF = fun wait_for_delete/2,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_4_steps(Config,
NullF, NullF, NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_split_with_aux_change_in_step1(_Config) ->
ChangeOwnerF =
fun (Id, Lease, DHTNode) ->
ct:pal("changing aux: ~p ~p", [Id, Lease]),
New = l_on_cseq:set_aux(
l_on_cseq:set_timeout(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Lease, l_on_cseq:get_epoch(Lease)+1),
0)),
{invalid, merge, intervals:empty(), intervals:empty()}),
l_on_cseq:unittest_lease_update(Lease, New, passive, DHTNode)
end,
NullF = fun (_Id, _Lease, _DHTNode) -> ok end,
WaitRightLeaseF = fun (Id, Lease) ->
wait_for_lease_version(Id, l_on_cseq:get_epoch(Lease) + 1, 0)
end,
WaitLeftLeaseF = fun (Id) ->
wait_for_lease_version(Id, 1, 0)
end,
FinalWaitF = fun wait_for_split_fail_msg/0,
test_split_helper_for_2_steps(_Config,
NullF, ChangeOwnerF,
WaitLeftLeaseF, WaitRightLeaseF, FinalWaitF),
true.
test_handover(_Config) ->
ModifyF = fun(Old) -> Old end,
WaitF = fun (Id, _Lease) ->
wait_for_lease_owner(Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_renew(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0)
end,
WaitF = fun (Id, _Lease) ->
wait_for_lease_owner(Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_aux_change(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_aux(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0),
{valid, merge, foo, bar})
end,
WaitF = fun (_Id, _Lease) ->
receive
{handover, failed, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_handover_with_concurrent_owner_change(_Config) ->
ModifyF = fun(Old) ->
l_on_cseq:set_owner(
l_on_cseq:set_version(
l_on_cseq:set_epoch(Old, l_on_cseq:get_epoch(Old)+1),
0),
comm:this())
end,
WaitF = fun (_Id, _Lease) ->
receive
{handover, failed, _} -> ok
end
end,
test_handover_helper(_Config, ModifyF, WaitF),
true.
test_takeover(_Config) ->
log:log("start test_takeover"),
ModifyF = fun(Old) -> Old end,
WaitF = fun (Id, _Lease, OriginalOwner) ->
ct:pal("takeover: wait_for_lease_owner ~p", [OriginalOwner]),
wait_for_lease_owner(Id, OriginalOwner),
ct:pal("takeover: wait_for_lease_owner done")
end,
test_takeover_helper(_Config, ModifyF, WaitF),
true.
test_takeover_helper(_Config, ModifyF, WaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
OriginalOwner = l_on_cseq : get_owner(Old ) ,
ct:pal("takeover: old lease ~p", [Old]),
Id = l_on_cseq:get_id(Old),
l_on_cseq:lease_handover(Old, comm:this(), self()),
ct:pal("new owner ~p", [comm:this()]),
HandoverWaitF = fun (_Id, _Lease) ->
wait_for_lease_owner(_Id, comm:this()),
receive
{handover, success, _} -> ok
end
end,
HandoverWaitF(Id, Old),
ct:pal("takeover: now we update the lease"),
{ok, Current} = l_on_cseq:read(Id),
ct:pal("takeover: current lease: ~p", [Current]),
New = ModifyF(Current),
case New =/= Current of
true ->
Res = l_on_cseq:unittest_lease_update(Current, New, active, DHTNode),
ct:pal("takeover: lease_update: ~p (~p -> ~p)", [Res, Current, New]),
wait_for_lease(New);
false ->
ok
end,
ct:pal("takeover: takeover"),
now the error handling of lease_takeover is going to be tested
takeover_loop(Current),
ct:pal("takeover: wait_for_lease2"),
WaitF(Id, Current, comm:make_global(DHTNode)),
ct:pal("takeover: done"),
true.
takeover_loop(L) ->
l_on_cseq:lease_takeover(L, self()),
M = receive
{takeover, _ , _} = _M -> _M;
{takeover, _ , _, _} = _M -> _M
end,
case M of
{takeover, success, L2} ->
ct:pal("takeover succeed ~w", [L2]),
ok;
{takeover, failed, L2, _Result} ->
ct:pal("retrying takeover ~p ~p", [L2, l_on_cseq:get_pretty_timeout(L2)]),
timer:sleep(500),
takeover_loop(L2)
end.
test_handover_helper(_Config, ModifyF, WaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
Id = l_on_cseq:get_id(Old),
New = ModifyF(Old),
l_on_cseq:unittest_lease_update(Old, New, active, DHTNode),
wait_for_lease(New),
l_on_cseq:lease_handover(Old, comm:this(), self()),
WaitF(Id, Old),
true.
test_split_prepare(DHTNode) ->
{l_on_cseq, renew, _Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
comm:send_local(DHTNode, {get_state, comm:this(), lease_list}),
L = receive
{get_state_response, LeaseList} ->
lease_list:get_active_lease(LeaseList)
end,
{ok, R1, R2} = l_on_cseq:split_range(l_on_cseq:get_range(L)),
log:log("split under test:~n~w~n~w~n~w~n", [l_on_cseq:get_range(L), R1, R2]),
[ R1 , R2 ] = intervals : split(l_on_cseq : get_range(L ) , 2 ) ,
LeftId = l_on_cseq:id(R1),
RightId = l_on_cseq:id(R2),
step1
log:log("starting the split under test"),
ct:pal("intercepting msg"),
M = {l_on_cseq, split, _Lease, __R1, __R2,
__ReplyTo, __PostAux} ->
M
end,
ct:pal("intercepted msg"),
{l_on_cseq, split, Lease, _R1, _R2, _ReplyTo, _PostAux} = StartMsg,
{Lease, LeftId, RightId, StartMsg}.
test_split_helper_for_1_step(_Config,
ModifyBeforeStep1,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
gen_component:bp_del(DHTNode, block_split_request),
gen_component:bp_del(DHTNode, split_reply_step1),
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_2_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
ct:pal("0"),
gen_component:bp_del(DHTNode, block_split_request),
step 2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, RightId),
wait_for_split_message(DHTNode, split_reply_step2),
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_3_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
ModifyBeforeStep3,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
gen_component:bp_del(DHTNode, block_split_request),
step 2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, LeftId),
step 3
split_helper_do_step(DHTNode, split_reply_step2, ModifyBeforeStep3, LeftId),
wait_for_split_message(DHTNode, split_reply_step3),
log:pal("wait left"),
WaitLeftLease(LeftId),
log:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
test_split_helper_for_4_steps(_Config,
ModifyBeforeStep1,
ModifyBeforeStep2,
ModifyBeforeStep3,
ModifyBeforeStep4,
WaitLeftLease, WaitRightLease, FinalWaitF) ->
DHTNode = pid_groups:find_a(dht_node),
pid_groups:join(pid_groups:group_of(DHTNode)),
{Lease, LeftId, RightId, StartMsg} = test_split_prepare(DHTNode),
log:log("left and right-id:~w~n~w~n", [LeftId, RightId]),
gen_component:bp_del(DHTNode, block_split_request),
step2
split_helper_do_step(DHTNode, split_reply_step1, ModifyBeforeStep2, LeftId),
log:log("finished step2"),
step3
split_helper_do_step(DHTNode, split_reply_step2, ModifyBeforeStep3, RightId),
log:log("finished step3"),
split_helper_do_step(DHTNode, split_reply_step3, ModifyBeforeStep4, LeftId),
log:log("finished step4"),
wait_for_split_message(DHTNode, split_reply_step4),
log:log("got split message"),
ct:pal("wait left"),
WaitLeftLease(LeftId),
ct:pal("wait right"),
WaitRightLease(RightId, Lease),
FinalWaitF().
split_helper_do_step(DHTNode, StepTag, ModifyBeforeStep, Id) ->
log:pal("doing ~p", [StepTag]),
ReplyMsg = receive
M = {l_on_cseq, StepTag, Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
M
end,
ModifyBeforeStep(Id, Lease, DHTNode),
gen_component:bp_del(DHTNode, StepTag),
watch_message(DHTNode, ReplyMsg).
wait_for_split_message(DHTNode, StepTag) ->
log:pal("waiting for ~p", [StepTag]),
receive
M = {l_on_cseq, StepTag, _Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
gen_component:bp_del(DHTNode, StepTag),
watch_message(DHTNode, M)
end.
wait_for_split_success_msg() ->
log:pal("wait_for_split_success_msg() ~p", [self()]),
receive
{split, success, _, _} ->
ok
end.
wait_for_split_fail_msg() ->
receive
{split, fail, _} ->
ok
end.
test_renew_helper(_Config, ModifyF, WaitF) ->
DHTNode = pid_groups:find_a(dht_node),
M = {l_on_cseq, renew, Old, _Mode} = lease_helper:intercept_lease_renew(DHTNode),
Id = l_on_cseq:get_id(Old),
New = ModifyF(Old),
l_on_cseq:unittest_lease_update(Old, New, active, DHTNode),
wait_for_lease(New),
now the error handling of is going to be tested
comm:send_local(DHTNode, M),
WaitF(Id, Old),
true.
wait_for(F) ->
case F() of
true ->
ok;
false ->
wait_for(F)
end.
wait_for_lease(Lease) ->
Id = l_on_cseq:get_id(Lease),
wait_for_lease_helper(Id, fun (L) -> L == Lease end).
wait_for_lease_version(Id, Epoch, Version) ->
ct:pal("wait_for_lease_version ~p", [Id]),
wait_for_lease_helper(Id,
fun (Lease) ->
ct:pal("want ~p:~p; have ~p:~p", [Epoch, Version, l_on_cseq:get_epoch(Lease), l_on_cseq:get_version(Lease)]),
Epoch == l_on_cseq:get_epoch(Lease)
andalso Version == l_on_cseq:get_version(Lease)
end).
wait_for_lease_owner(Id, NewOwner) ->
wait_for_lease_helper(Id,
fun (Lease) ->
NewOwner == l_on_cseq:get_owner(Lease)
end).
wait_for_lease_helper(Id, F) ->
wait_for(fun () ->
comm : , { get_state , comm : this ( ) , lease_list } ) ,
{ get_state_response , { ActiveList , PassiveList } } - >
{ ActiveList , PassiveList }
case l_on_cseq:read(Id) of
{ok, Lease} ->
F(Lease);
_ ->
false
end
end).
get_dht_node_state(Pid, What) ->
comm:send_local(Pid, {get_state, comm:this(), What}),
receive
{get_state_response, Data} ->
Data
end.
get_all_active_leases() ->
[ get_active_lease(DHTNode) || DHTNode <- pid_groups:find_all(dht_node) ].
get_active_lease(Pid) ->
LeaseList = get_dht_node_state(Pid, lease_list),
lease_list:get_active_lease(LeaseList).
wait_for_simple_update(Id, Old) ->
OldVersion = l_on_cseq:get_version(Old),
OldEpoch = l_on_cseq:get_epoch(Old),
wait_for_lease_version(Id, OldEpoch, OldVersion+1).
wait_for_epoch_update(Id, Old) ->
OldEpoch = l_on_cseq:get_epoch(Old),
wait_for_lease_version(Id, OldEpoch+1, 0).
wait_for_delete(Id, _Old) ->
DHTNode = pid_groups:find_a(dht_node),
ct:pal("wait_for_delete ~p", [Id]),
wait_for(fun () ->
LeaseList = get_dht_node_state(DHTNode, lease_list),
L = lease_list:get_active_lease(LeaseList),
case L of
empty ->
true;
_ ->
l_on_cseq:get_id(L) =/= Id
end
end).
wait_for_delete(Id) ->
ct:pal("wait_for_delete ~p", [Id]),
DHTNode = pid_groups:find_a(dht_node),
wait_for(fun () ->
LeaseList = get_dht_node_state(DHTNode, lease_list),
L = lease_list:get_active_lease(LeaseList),
case L of
empty ->
true;
_ ->
l_on_cseq:get_id(L) =/= Id
end
end).
wait_for_number_of_leases(Nr) ->
wait_for(fun() ->
length(get_all_active_leases()) == Nr
end).
watch_message(Pid, Message) ->
gen_component:bp_set_cond(Pid, block_message(self(), Message), watch_message),
comm:send_local(Pid, Message),
receive
{saw_message} ->
_ = gen_component:bp_step(Pid),
gen_component:bp_del(Pid, watch_message),
gen_component:bp_cont(Pid)
end.
intercept_split_request(DHTNode) ->
gen_component:bp_set_cond(DHTNode, block_split_request(self()), block_split_request).
intercept_split_reply(DHTNode, StepTag) ->
gen_component:bp_set_cond(DHTNode, block_split_reply(self(), StepTag), StepTag).
block_message(Pid, WatchedMessage) ->
fun (Message, _State) ->
case Message of
WatchedMessage ->
comm:send_local(Pid, {saw_message}),
true;
_ ->
false
end
end.
block_split_request(Pid) ->
fun (Message, _State) ->
case Message of
{l_on_cseq, split, _Lease, _R1, _R2, _ReplyTo, _PostAux} ->
comm:send_local(Pid, Message),
drop_single;
_ ->
false
end
end.
block_split_reply(Pid, StepTag) ->
fun (Message, _State) ->
case Message of
{l_on_cseq, StepTag, _Lease, _R1, _R2, _ReplyTo, _PostAux, _Resp} ->
comm:send_local(Pid, Message),
drop_single;
_ ->
false
end
end.
obfuscated_intervals_all() ->
[{'(',0,5,']'},
{0},
{'(',5,340282366920938463463374607431768211456,')'}
].
|
d43dcbca4ccabfa0abc6ba667f63995308c0326beff0036e0744d01e4b51561a | snoyberg/why-you-should-use-stm | solution.hs | #!/usr/bin/env stack
-- stack --resolver lts-13.21 script
import Control.Concurrent.STM
import System.IO.Unsafe -- yeah baby!
counter :: TVar Int
counter = unsafePerformIO $ newTVarIO 0
# NOINLINE counter #
main :: IO ()
main = do
atomically $ modifyTVar' counter (+ 1)
atomically $ modifyTVar' counter (+ 1)
atomically $ modifyTVar' counter (+ 1)
count <- atomically $ readTVar counter
putStrLn $ "The count is: " ++ show count
| null | https://raw.githubusercontent.com/snoyberg/why-you-should-use-stm/adf3366aebd6daf1dd702ed4cad1c2303d296afc/exercises/09-global-variables/solution.hs | haskell | stack --resolver lts-13.21 script
yeah baby! | #!/usr/bin/env stack
import Control.Concurrent.STM
counter :: TVar Int
counter = unsafePerformIO $ newTVarIO 0
# NOINLINE counter #
main :: IO ()
main = do
atomically $ modifyTVar' counter (+ 1)
atomically $ modifyTVar' counter (+ 1)
atomically $ modifyTVar' counter (+ 1)
count <- atomically $ readTVar counter
putStrLn $ "The count is: " ++ show count
|
bbdf3e640bc925e8655dd574e98bfd922dd724339679df439d4581b2942a8bd7 | openvstorage/alba | range_query_args2.ml |
Copyright ( C ) iNuron -
This file is part of Open vStorage . For license information , see < LICENSE.txt >
Copyright (C) iNuron -
This file is part of Open vStorage. For license information, see <LICENSE.txt>
*)
open! Prelude
module RangeQueryArgs =
struct
include Range_query_args.RangeQueryArgs
(* these serialization functions eventually depend on
* ctypes, and can't be loaded in the arakoon plugin,
* hence the separate file... *)
let from_buffer' order a_from buf =
let module Llio = Llio2.ReadBuffer in
let first = a_from buf in
let finc = Llio.bool_from buf in
let last =
Llio.option_from
(Llio.pair_from
a_from
Llio.bool_from)
buf
in
let reverse, max =
match order with
| `MaxThenReverse ->
let max = Llio.int_from buf in
let reverse = Llio.bool_from buf in
reverse, max
| `ReverseThenMax ->
let reverse = Llio.bool_from buf in
let max = Llio.int_from buf in
reverse, max
in
{ first; finc; last; reverse; max }
let to_buffer' order a_to buf t =
let module Llio = Llio2.WriteBuffer in
let () = a_to buf t.first in
Llio.bool_to buf t.finc;
Llio.option_to (Llio.pair_to
a_to
Llio.bool_to)
buf
t.last;
match order with
| `MaxThenReverse ->
Llio.int_to buf t.max;
Llio.bool_to buf t.reverse
| `ReverseThenMax ->
Llio.bool_to buf t.reverse;
Llio.int_to buf t.max
let deser' order (a_from, a_to) = from_buffer' order a_from, to_buffer' order a_to
end
| null | https://raw.githubusercontent.com/openvstorage/alba/459bd459335138d6b282d332fcff53a1b4300c29/ocaml/src/range_query_args2.ml | ocaml | these serialization functions eventually depend on
* ctypes, and can't be loaded in the arakoon plugin,
* hence the separate file... |
Copyright ( C ) iNuron -
This file is part of Open vStorage . For license information , see < LICENSE.txt >
Copyright (C) iNuron -
This file is part of Open vStorage. For license information, see <LICENSE.txt>
*)
open! Prelude
module RangeQueryArgs =
struct
include Range_query_args.RangeQueryArgs
let from_buffer' order a_from buf =
let module Llio = Llio2.ReadBuffer in
let first = a_from buf in
let finc = Llio.bool_from buf in
let last =
Llio.option_from
(Llio.pair_from
a_from
Llio.bool_from)
buf
in
let reverse, max =
match order with
| `MaxThenReverse ->
let max = Llio.int_from buf in
let reverse = Llio.bool_from buf in
reverse, max
| `ReverseThenMax ->
let reverse = Llio.bool_from buf in
let max = Llio.int_from buf in
reverse, max
in
{ first; finc; last; reverse; max }
let to_buffer' order a_to buf t =
let module Llio = Llio2.WriteBuffer in
let () = a_to buf t.first in
Llio.bool_to buf t.finc;
Llio.option_to (Llio.pair_to
a_to
Llio.bool_to)
buf
t.last;
match order with
| `MaxThenReverse ->
Llio.int_to buf t.max;
Llio.bool_to buf t.reverse
| `ReverseThenMax ->
Llio.bool_to buf t.reverse;
Llio.int_to buf t.max
let deser' order (a_from, a_to) = from_buffer' order a_from, to_buffer' order a_to
end
|
615301d1da437f7e479d4128a259ea355cdd71974395bcc5c4d43063a3f3740e | spell-music/csound-expression | Utilities.hs | module Csound.Typed.Plugins.Utilities(
delay1k
) where
import Csound.Dynamic
import Csound.Typed.Types.Prim
import Csound.Typed.GlobalState
import qualified Csound.Typed.GlobalState.Elements as E(delay1kPlugin)
-------------------------------------------------------------------------------
-- | Delay a control signal by single sample.
delay1k :: Sig -> Sig
delay1k ain = fromGE $ do
addUdoPlugin E.delay1kPlugin
f <$> toGE ain
where f x = opcs "Delay1k" [(Kr, [Kr])] [x]
| null | https://raw.githubusercontent.com/spell-music/csound-expression/29c1611172153347b16d0b6b133e4db61a7218d5/csound-expression-typed/src/Csound/Typed/Plugins/Utilities.hs | haskell | -----------------------------------------------------------------------------
| Delay a control signal by single sample. | module Csound.Typed.Plugins.Utilities(
delay1k
) where
import Csound.Dynamic
import Csound.Typed.Types.Prim
import Csound.Typed.GlobalState
import qualified Csound.Typed.GlobalState.Elements as E(delay1kPlugin)
delay1k :: Sig -> Sig
delay1k ain = fromGE $ do
addUdoPlugin E.delay1kPlugin
f <$> toGE ain
where f x = opcs "Delay1k" [(Kr, [Kr])] [x]
|
d5541ecb4c1c8c92e33c38786276aa90657098ba049e58426ab59439d6422dfc | gonimo/gonimo | Random.hs | module Utils.System.Random where
import System.Random
import Control.Monad.State
-- Warning: an empty list leads to a crash.
-- TODO: make more failsafe randomL/randomLs functions.
randomL :: RandomGen g => [a] -> g -> (a, g)
randomL [] _ = error "randomL: empty list"
randomL lst gen =
let len = length lst
(idx, gen') = randomR (0, len-1) gen
in (lst !! idx, gen')
randomLs :: RandomGen g => [[a]] -> g -> ([a], g)
randomLs =
runState . sequence . map (state . randomL)
| null | https://raw.githubusercontent.com/gonimo/gonimo/f4072db9e56f0c853a9f07e048e254eaa671283b/back/src/Utils/System/Random.hs | haskell | Warning: an empty list leads to a crash.
TODO: make more failsafe randomL/randomLs functions. | module Utils.System.Random where
import System.Random
import Control.Monad.State
randomL :: RandomGen g => [a] -> g -> (a, g)
randomL [] _ = error "randomL: empty list"
randomL lst gen =
let len = length lst
(idx, gen') = randomR (0, len-1) gen
in (lst !! idx, gen')
randomLs :: RandomGen g => [[a]] -> g -> ([a], g)
randomLs =
runState . sequence . map (state . randomL)
|
e6c19a6f84fb44b3dab7d48249b6bb3bb81794303f528d2cad868cfaece61364 | haskell-repa/repa | Maybe.hs |
-- | Conversions for "Data.Maybe" wrapped formats.
module Data.Repa.Convert.Format.Maybe
( MaybeChars (..)
, MaybeBytes (..))
where
import Data.Repa.Convert.Internal.Format
import Data.Repa.Convert.Internal.Packable
import Data.Repa.Convert.Format.Bytes
import Data.Word
import GHC.Exts
import Prelude hiding (fail)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Internal as BS
import qualified Foreign.Storable as F
import qualified Foreign.ForeignPtr as F
import qualified Foreign.Ptr as F
#include "repa-convert.h"
--------------------------------------------------------------------------------------
-- | Maybe a raw list of characters, or something else.
data MaybeChars f = MaybeChars String f deriving (Eq, Show)
instance Format f => Format (MaybeChars f) where
type Value (MaybeChars f)
= Maybe (Value f)
fieldCount _
= 1
# INLINE fieldCount #
minSize (MaybeChars str f)
= minSize (MaybeBytes (BS.pack str) f)
{-# INLINE minSize #-}
fixedSize (MaybeChars str f)
= fixedSize (MaybeBytes (BS.pack str) f)
# INLINE fixedSize #
packedSize (MaybeChars str f)
= kk
where !bs = BS.pack str
kk mv
= packedSize (MaybeBytes bs f) mv
# INLINE kk #
# INLINE packedSize #
instance Packable f
=> Packable (MaybeChars f) where
Convert the Nothing string to a ByteString which has a better runtime representation .
-- We do this before accepting the actual value, so the conversion happens only
-- once, instead of when we pack every value.
packer (MaybeChars str f)
= kk
where !bs = BS.pack str
kk x start k
= packer (MaybeBytes bs f) x start k
# INLINE kk #
{-# INLINE packer #-}
instance Unpackable f
=> Unpackable (MaybeChars f) where
As above , convert the Nothing string to a ByteString which has a better runtime
-- representation.
unpacker (MaybeChars str f)
= kk
where !bs = BS.pack str
kk start end stop fail eat
= unpacker (MaybeBytes bs f) start end stop fail eat
# INLINE kk #
# INLINE unpacker #
-------------------------------------------------------------------------------------- MaybeBytes
-- | Maybe a raw sequence of bytes, or something else.
data MaybeBytes f = MaybeBytes ByteString f deriving (Eq, Show)
instance Format f => Format (MaybeBytes f) where
type Value (MaybeBytes f)
= Maybe (Value f)
fieldCount _
= 1
# INLINE fieldCount #
minSize (MaybeBytes str f)
= let !(I# ms) = minSize f
in I# (minSize_MaybeBytes str ms)
{-# INLINE minSize #-}
fixedSize (MaybeBytes str f)
= fixedSize_MaybeBytes str (fixedSize f)
# INLINE fixedSize #
packedSize (MaybeBytes str f) mv
= case mv of
Nothing -> Just $ BS.length str
Just v -> packedSize f v
# NOINLINE packedSize #
NOINLINE to hide the case from the simplifier .
Minsize , hiding the case expression from the simplifier .
minSize_MaybeBytes :: ByteString -> Int# -> Int#
minSize_MaybeBytes s i
= case min (BS.length s) (I# i) of
I# i' -> i'
# NOINLINE minSize_MaybeBytes #
Fixedsize , hiding the case expression from the simplifier .
fixedSize_MaybeBytes :: ByteString -> Maybe Int -> Maybe Int
fixedSize_MaybeBytes s r
= case r of
Nothing -> Nothing
Just sf -> if BS.length s == sf
then Just sf
else Nothing
# NOINLINE fixedSize_MaybeBytes #
NOINLINE to hide the case from the simplifier .
instance Packable f
=> Packable (MaybeBytes f) where
packer (MaybeBytes str f) mv start k
= case mv of
Nothing -> packer VarBytes str start k
Just v -> packer f v start k
# NOINLINE packer #
-- We're NOINLINEing this so we don't duplicate the code for the continuation.
-- It would be better to use an Either format and use that to express the branch.
instance Unpackable f
=> Unpackable (MaybeBytes f) where
unpacker (MaybeBytes (BS.PS bsFptr bsStart bsLen) f)
start end stop fail eat
= F.withForeignPtr bsFptr
$ \bsPtr_
-> let
-- Length of the input buffer.
!lenBuf = F.minusPtr (pw8 end) (pw8 start)
-- Pointer to active bytes in Nothing string.
!bsPtr = F.plusPtr bsPtr_ bsStart
-- Check for the Nothing string,
We do an early exit , bailing out on the first byte that does n't match .
-- If this isn't the Nothing string then we need to unpack the inner format.
checkNothing !ix
-- Matched the complete Nothing string.
| ix >= bsLen
= do -- Give the continuation the starting pointer for the next field.
let !(Ptr start') = F.plusPtr (pw8 start) ix
eatIt start' Nothing
-- Hit the end of the buffer and the Nothing string itself is empty,
-- which we count as detecting the Nothing string.
| bsLen == 0
, ix >= lenBuf
= do let !(Ptr start') = F.plusPtr (pw8 start) ix
eatIt start' Nothing
-- Hit the end of the buffer before matching the Nothing string.
| ix >= lenBuf
= unpackInner
-- Check if the next byte is the next byte in the Nothing string.
| otherwise
= do !x <- F.peekByteOff (pw8 start) ix
if stop x
then unpackInner
else do
!x' <- F.peekByteOff bsPtr ix
if x /= x'
then unpackInner
else checkNothing (ix + 1)
unpackInner
= unpacker f start end stop fail
$ \addr x -> eatIt addr (Just x)
# NOINLINE unpackInner #
eatIt addr val
= eat addr val
# NOINLINE eatIt #
NOINLINE so we do n't duplicate the continuation .
in checkNothing 0
# INLINE unpacker #
pw8 :: Addr# -> Ptr Word8
pw8 addr = Ptr addr
# INLINE pw8 #
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa-convert/Data/Repa/Convert/Format/Maybe.hs | haskell | | Conversions for "Data.Maybe" wrapped formats.
------------------------------------------------------------------------------------
| Maybe a raw list of characters, or something else.
# INLINE minSize #
We do this before accepting the actual value, so the conversion happens only
once, instead of when we pack every value.
# INLINE packer #
representation.
------------------------------------------------------------------------------------ MaybeBytes
| Maybe a raw sequence of bytes, or something else.
# INLINE minSize #
We're NOINLINEing this so we don't duplicate the code for the continuation.
It would be better to use an Either format and use that to express the branch.
Length of the input buffer.
Pointer to active bytes in Nothing string.
Check for the Nothing string,
If this isn't the Nothing string then we need to unpack the inner format.
Matched the complete Nothing string.
Give the continuation the starting pointer for the next field.
Hit the end of the buffer and the Nothing string itself is empty,
which we count as detecting the Nothing string.
Hit the end of the buffer before matching the Nothing string.
Check if the next byte is the next byte in the Nothing string. |
module Data.Repa.Convert.Format.Maybe
( MaybeChars (..)
, MaybeBytes (..))
where
import Data.Repa.Convert.Internal.Format
import Data.Repa.Convert.Internal.Packable
import Data.Repa.Convert.Format.Bytes
import Data.Word
import GHC.Exts
import Prelude hiding (fail)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Internal as BS
import qualified Foreign.Storable as F
import qualified Foreign.ForeignPtr as F
import qualified Foreign.Ptr as F
#include "repa-convert.h"
data MaybeChars f = MaybeChars String f deriving (Eq, Show)
instance Format f => Format (MaybeChars f) where
type Value (MaybeChars f)
= Maybe (Value f)
fieldCount _
= 1
# INLINE fieldCount #
minSize (MaybeChars str f)
= minSize (MaybeBytes (BS.pack str) f)
fixedSize (MaybeChars str f)
= fixedSize (MaybeBytes (BS.pack str) f)
# INLINE fixedSize #
packedSize (MaybeChars str f)
= kk
where !bs = BS.pack str
kk mv
= packedSize (MaybeBytes bs f) mv
# INLINE kk #
# INLINE packedSize #
instance Packable f
=> Packable (MaybeChars f) where
Convert the Nothing string to a ByteString which has a better runtime representation .
packer (MaybeChars str f)
= kk
where !bs = BS.pack str
kk x start k
= packer (MaybeBytes bs f) x start k
# INLINE kk #
instance Unpackable f
=> Unpackable (MaybeChars f) where
As above , convert the Nothing string to a ByteString which has a better runtime
unpacker (MaybeChars str f)
= kk
where !bs = BS.pack str
kk start end stop fail eat
= unpacker (MaybeBytes bs f) start end stop fail eat
# INLINE kk #
# INLINE unpacker #
data MaybeBytes f = MaybeBytes ByteString f deriving (Eq, Show)
instance Format f => Format (MaybeBytes f) where
type Value (MaybeBytes f)
= Maybe (Value f)
fieldCount _
= 1
# INLINE fieldCount #
minSize (MaybeBytes str f)
= let !(I# ms) = minSize f
in I# (minSize_MaybeBytes str ms)
fixedSize (MaybeBytes str f)
= fixedSize_MaybeBytes str (fixedSize f)
# INLINE fixedSize #
packedSize (MaybeBytes str f) mv
= case mv of
Nothing -> Just $ BS.length str
Just v -> packedSize f v
# NOINLINE packedSize #
NOINLINE to hide the case from the simplifier .
Minsize , hiding the case expression from the simplifier .
minSize_MaybeBytes :: ByteString -> Int# -> Int#
minSize_MaybeBytes s i
= case min (BS.length s) (I# i) of
I# i' -> i'
# NOINLINE minSize_MaybeBytes #
Fixedsize , hiding the case expression from the simplifier .
fixedSize_MaybeBytes :: ByteString -> Maybe Int -> Maybe Int
fixedSize_MaybeBytes s r
= case r of
Nothing -> Nothing
Just sf -> if BS.length s == sf
then Just sf
else Nothing
# NOINLINE fixedSize_MaybeBytes #
NOINLINE to hide the case from the simplifier .
instance Packable f
=> Packable (MaybeBytes f) where
packer (MaybeBytes str f) mv start k
= case mv of
Nothing -> packer VarBytes str start k
Just v -> packer f v start k
# NOINLINE packer #
instance Unpackable f
=> Unpackable (MaybeBytes f) where
unpacker (MaybeBytes (BS.PS bsFptr bsStart bsLen) f)
start end stop fail eat
= F.withForeignPtr bsFptr
$ \bsPtr_
-> let
!lenBuf = F.minusPtr (pw8 end) (pw8 start)
!bsPtr = F.plusPtr bsPtr_ bsStart
We do an early exit , bailing out on the first byte that does n't match .
checkNothing !ix
| ix >= bsLen
let !(Ptr start') = F.plusPtr (pw8 start) ix
eatIt start' Nothing
| bsLen == 0
, ix >= lenBuf
= do let !(Ptr start') = F.plusPtr (pw8 start) ix
eatIt start' Nothing
| ix >= lenBuf
= unpackInner
| otherwise
= do !x <- F.peekByteOff (pw8 start) ix
if stop x
then unpackInner
else do
!x' <- F.peekByteOff bsPtr ix
if x /= x'
then unpackInner
else checkNothing (ix + 1)
unpackInner
= unpacker f start end stop fail
$ \addr x -> eatIt addr (Just x)
# NOINLINE unpackInner #
eatIt addr val
= eat addr val
# NOINLINE eatIt #
NOINLINE so we do n't duplicate the continuation .
in checkNothing 0
# INLINE unpacker #
pw8 :: Addr# -> Ptr Word8
pw8 addr = Ptr addr
# INLINE pw8 #
|
3b0564ed158dc81bc930b892c1484cd37f40d9a7294e6c05f552e124146dc360 | fhunleth/relsync | target_syncer_sup.erl | Copyright 2014
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%% @doc
This module provides a supervisor for the
%%% module that does all of the syncing work on the remote.
%%% @end
-module(target_syncer_sup).
-behaviour(supervisor).
%% API
-export([start_link/0, start_child/1]).
%% Supervisor callbacks
-export([init/1]).
-define(SERVER, ?MODULE).
%%%===================================================================
%%% API functions
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the supervisor
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
-spec start_child(atom()) ->
{ok, undefined | pid()} | {ok, undefined | pid(), _} |
{error, _}.
start_child(Node) ->
% Make sure that the code is up to date on the remote
{Mod, Bin, File} = code:get_object_code(target_syncer),
{module, Mod} = rpc:call(Node, code, load_binary, [Mod, File, Bin], 5000),
% Start the supervisor
AChild = {{target_syncer, Node},
{target_syncer, start_link, [Node]},
transient, 2000, worker, [target_syncer]},
supervisor:start_child(?SERVER, AChild).
%%%===================================================================
%%% Supervisor callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Whenever a supervisor is started using supervisor:start_link/[2,3],
%% this function is called by the new process to find out about
%% restart strategy, maximum restart frequency and child
%% specifications.
%%
) - > { ok , { SupFlags , [ ChildSpec ] } } |
%% ignore |
%% {error, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
{ok, {SupFlags, []}}.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/fhunleth/relsync/4f49df183fa4b5cff5f7afb4818d907b65d9ab37/src/target_syncer_sup.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc
module that does all of the syncing work on the remote.
@end
API
Supervisor callbacks
===================================================================
API functions
===================================================================
--------------------------------------------------------------------
@doc
Starts the supervisor
@end
--------------------------------------------------------------------
Make sure that the code is up to date on the remote
Start the supervisor
===================================================================
Supervisor callbacks
===================================================================
--------------------------------------------------------------------
@doc
Whenever a supervisor is started using supervisor:start_link/[2,3],
this function is called by the new process to find out about
restart strategy, maximum restart frequency and child
specifications.
ignore |
{error, Reason}
@end
--------------------------------------------------------------------
===================================================================
=================================================================== | Copyright 2014
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
This module provides a supervisor for the
-module(target_syncer_sup).
-behaviour(supervisor).
-export([start_link/0, start_child/1]).
-export([init/1]).
-define(SERVER, ?MODULE).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
-spec start_child(atom()) ->
{ok, undefined | pid()} | {ok, undefined | pid(), _} |
{error, _}.
start_child(Node) ->
{Mod, Bin, File} = code:get_object_code(target_syncer),
{module, Mod} = rpc:call(Node, code, load_binary, [Mod, File, Bin], 5000),
AChild = {{target_syncer, Node},
{target_syncer, start_link, [Node]},
transient, 2000, worker, [target_syncer]},
supervisor:start_child(?SERVER, AChild).
@private
) - > { ok , { SupFlags , [ ChildSpec ] } } |
init([]) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
{ok, {SupFlags, []}}.
Internal functions
|
2d531985587354a34639e510ffb77c45af693a50e632eb95113ab1d44eb705b8 | geophf/1HaskellADay | Exercise.hs | module Y2016.M08.D01.Exercise where
{--
The Numbers Game!
So, you have a number of variables of integer values that sum to a number. These
numbers must all be different.
Do it, to it.
Or, more specifically: Given a number of 'slots' and a value these slots, when
filled, sum to, give the value of these slots:
--}
summer :: Int -> Int -> [[Int]]
summer sum slotCount = undefined
-- Note the return value: there may be multiple solutions.
Note also [ 1,2,3 ] for the sum of 6 is a different value than [ 3,2,1 ]
-- Question: What are the solutions to the pairs (sum,slots):
( 4,2 ) , ( 10,4 ) , ( 17,2 ) , ( 12,2 ) , ( 13,4 )
Answer ( for ( 4,2 ) ) is : [ [ 1,3],[3,1 ] ]
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2016/M08/D01/Exercise.hs | haskell | -
The Numbers Game!
So, you have a number of variables of integer values that sum to a number. These
numbers must all be different.
Do it, to it.
Or, more specifically: Given a number of 'slots' and a value these slots, when
filled, sum to, give the value of these slots:
-
Note the return value: there may be multiple solutions.
Question: What are the solutions to the pairs (sum,slots): | module Y2016.M08.D01.Exercise where
summer :: Int -> Int -> [[Int]]
summer sum slotCount = undefined
Note also [ 1,2,3 ] for the sum of 6 is a different value than [ 3,2,1 ]
( 4,2 ) , ( 10,4 ) , ( 17,2 ) , ( 12,2 ) , ( 13,4 )
Answer ( for ( 4,2 ) ) is : [ [ 1,3],[3,1 ] ]
|
2e8257c5effc878a722ca4bc2985f6fd32ca4351dd958007b59d64b0df8fdd37 | TrustInSoft/tis-interpreter | mem_exec2.ml | Modified by TrustInSoft
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
module type Domain = sig
include Datatype.S_with_collections
type summary
module Summary : Datatype.S with type t = summary
val filter_by_bases: Base.Hptset.t -> t -> t
val reuse: current_input:t -> previous_output:t -> t
end
(* Reference filled in by the callwise-inout callback *)
module ResultFromCallback =
State_builder.Option_ref(Datatype.Pair(Value_types.Callstack)(Inout_type))
(struct
let dependencies = [Db.Value.self]
let name = "Mem_exec2.ResultFromCallback"
end)
let register_callback () =
if Value_parameters.MemExecAll.get () then
Db.Operational_inputs.Record_Inout_Callbacks.extend_once
(fun (_stack, _inout as v) ->
ResultFromCallback.set v)
let () = Cmdline.run_after_configuring_stage register_callback
module SaveCounter =
State_builder.SharedCounter(struct let name = "Mem_exec2.save_counter" end)
let new_counter, current_counter =
let cur = ref (-1) in
(fun () -> cur := SaveCounter.next (); !cur),
(fun () -> !cur)
let cleanup_ref = ref (fun () -> ())
(* TODO: it would be great to clear also the tables within the plugins. Export
self and add dependencies *)
let cleanup_results () =
ResultFromCallback.clear ();
!cleanup_ref ()
exception TooImprecise
(* Extract all the bases from a zone *)
let bases = function
| Locations.Zone.Top (Base.SetLattice.Top, _) -> raise TooImprecise
| Locations.Zone.Top (Base.SetLattice.Set s, _) -> s
| Locations.Zone.Map m -> Base.Hptset.from_shape (Locations.Zone.shape m)
let counter = ref 0
module Make
(Value : Datatype.S)
(Domain : Domain)
= struct
incr counter;
module ReturnedValue =
Datatype.Triple
(Datatype.Option (Value)) (* None is bottom. *)
(Datatype.Bool) (* initialized *)
(Datatype.Bool) (* escaping *)
module CallOutput =
Datatype.List
(Datatype.Triple
(Domain) (Domain.Summary) (Datatype.Option (ReturnedValue)))
module StoredResult =
Datatype.Pair
(CallOutput)
(Datatype.Int) (* Call number, for plugins *)
(* Map from input states to outputs (summary and state). *)
module CallEffect = Domain.Hashtbl.Make (StoredResult)
(* Map from useful input bases to call effects. *)
module InputBasesToCallEffect = Base.Hptset.Hashtbl.Make (CallEffect)
(* List of the arguments of a call. *)
module ActualArgs =
Datatype.List_with_collections (Datatype.Option (Value)) (* None is bottom *)
(struct let module_name = "Mem_exec2.ActualArgs("
^ string_of_int !counter ^ ")"
end)
(* Map from the arguments of a call to stored results. *)
module ArgsToStoredCalls = ActualArgs.Map.Make (InputBasesToCallEffect)
module PreviousCalls =
Kernel_function.Make_Table
(ArgsToStoredCalls)
(struct
let size = 17
let dependencies = [Db.Value.self]
let name = "Mem_exec2.PreviousCalls(" ^ string_of_int !counter ^ ")"
end)
let cleanup = !cleanup_ref
let () = cleanup_ref := fun () -> cleanup (); PreviousCalls.clear ()
let result_to_output result =
let open Eval in
let return = result.returned_value in
let returned_value = match return with
| None -> None
| Some return ->
let value = match return.v with
| `Bottom -> None
| `Value v -> Some v
in
Some (value, return.initialized, return.escaping)
in
result.post_state, result.summary, returned_value
let output_to_result output =
let open Eval in
let post_state, summary, return = output in
let returned_value = match return with
| None -> None
| Some (value, initialized, escaping) ->
Some
{ v = (match value with None -> `Bottom | Some v -> `Value v);
initialized;
escaping;
}
in
{post_state; summary; returned_value}
let map_to_outputs f =
List.map
(fun ((state: Domain.t),
(summary: Domain.Summary.t),
(value : ReturnedValue.t option)) ->
(f state, summary, value))
(** [diff_base_full_zone bases zones] remove from the set of bases [bases]
those of which all bits are present in [zones] *)
let diff_base_full_zone =
let cache = Hptmap_sig.PersistentCache "Mem_exec2.diff_base_full_zone" in
let empty_left _ = Base.Hptset.empty (* nothing left to clear *) in
let empty_right v = v (* return all bases unchanged *) in
(* Check whether [range] covers the validity of [b]. If so, remove [b]
(hence, return an empty set). Otherwise, keep [b]. Variable bases are
always kept, because they may be changed into weak variables later.
This is specific to the way this function is used later in this file. *)
let both b range = begin
match Base.validity b with
| Base.Invalid -> assert false
| Base.Empty -> Base.Hptset.empty
| Base.Variable _ -> Base.Hptset.singleton b
| Base.Known (min, max) | Base.Unknown (min, _, max) ->
match Int_Intervals.project_singleton range with
| Some (min', max') ->
if Integer.equal min min' && Integer.equal max max' then
Base.Hptset.empty
else
Base.Hptset.singleton b
| None -> Base.Hptset.singleton b
end in
let join = Base.Hptset.union in
let empty = Base.Hptset.empty in
let f = Base.Hptset.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun bases z ->
match z with
| Locations.Zone.Map m -> f bases (Locations.Zone.shape m)
| Locations.Zone.Top _ -> bases (* Never happens anyway *)
let store_computed_call kf input_state args
(call_result: (Domain.t, Domain.Summary.t, Value.t) Eval.call_result) =
match ResultFromCallback.get_option () with
| None -> ()
| Some (_stack, inout) ->
try
let output_bases = bases inout.Inout_type.over_outputs_if_termination
and input_bases = bases inout.Inout_type.over_inputs in
There are two strategies to compute the ' inputs ' for a memexec
function : either we take all inputs_bases+outputs_bases
( outputs_bases are important because of weak updates ) , or we
remove the sure outputs from the outputs , as sure outputs by
definition strong updated . The latter will enable to fire
more often , but requires more computations .
function: either we take all inputs_bases+outputs_bases
(outputs_bases are important because of weak updates), or we
remove the sure outputs from the outputs, as sure outputs by
definition strong updated. The latter will enable memexec to fire
more often, but requires more computations. *)
let remove_sure_outputs = true in
let input_bases =
if remove_sure_outputs then
let uncertain_output_bases =
(* Remove outputs whose base is completely overwritten *)
diff_base_full_zone
output_bases inout.Inout_type.under_outputs_if_termination
in
Base.Hptset.union input_bases uncertain_output_bases
else
Base.Hptset.union input_bases output_bases
in
let state_input = Domain.filter_by_bases input_bases input_state in
(* Outputs bases, that is bases that are copy-pasted, also include
input bases. Indeed, those may get reduced during the call. *)
let all_output_bases =
if remove_sure_outputs
then Base.Hptset.union input_bases output_bases
else input_bases
in
let clear state = Domain.filter_by_bases all_output_bases state in
let call_result = match call_result with
| `Bottom -> []
| `Value list -> list
in
let outputs = List.map result_to_output call_result in
let outputs = map_to_outputs clear outputs in
let call_number = current_counter () in
let map_a =
try PreviousCalls.find kf
with Not_found -> ActualArgs.Map.empty
in
let hkf =
let args =
List.map (function `Bottom -> None | `Value v -> Some v) args in
try ActualArgs.Map.find args map_a
with Not_found ->
let h = Base.Hptset.Hashtbl.create 11 in
let map_a = ActualArgs.Map.add args h map_a in
PreviousCalls.replace kf map_a;
h
in
let hkb =
try Base.Hptset.Hashtbl.find hkf input_bases
with Not_found ->
let h = Domain.Hashtbl.create 11 in
Base.Hptset.Hashtbl.add hkf input_bases h;
h
in
Domain.Hashtbl.add hkb state_input
(outputs, call_number);
ResultFromCallback.clear ()
with
| TooImprecise
| Kernel_function.No_Statement
| Not_found -> ResultFromCallback.clear ()
exception Result_found of CallOutput.t * int
(** Find a previous execution in [map_inputs] that matches [st].
raise [Result_found] when this execution exists, or do nothing. *)
let find_match_in_previous (map_inputs: InputBasesToCallEffect.t) state =
let aux_previous_call binputs hstates =
(* restrict [state] to the inputs of this call *)
let st_filtered = Domain.filter_by_bases binputs state in
try
let outputs, i = Domain.Hashtbl.find hstates st_filtered in
(* We have found a previous execution, in which the outputs are
[outputs]. Copy them in [state] and return this result. *)
let process output =
Domain.reuse ~current_input:state ~previous_output:output in
let outputs = map_to_outputs process outputs in
raise (Result_found (outputs, i))
with Not_found -> ()
in
Base.Hptset.Hashtbl.iter aux_previous_call map_inputs
let reuse_previous_call kf state args =
try
let previous_kf = PreviousCalls.find kf in
let args = List.map (function `Bottom -> None | `Value v -> Some v) args in
let previous = ActualArgs.Map.find args previous_kf in
find_match_in_previous previous state;
None
with
| Not_found -> None
| Result_found (outputs, i) ->
let call_result = List.map output_to_result outputs in
Some (Bottom.bot_of_list call_result, i)
end
(*
Local Variables:
compile-command: "make -C ../../../.."
End:
*)
| null | https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/value/engine/mem_exec2.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
Reference filled in by the callwise-inout callback
TODO: it would be great to clear also the tables within the plugins. Export
self and add dependencies
Extract all the bases from a zone
None is bottom.
initialized
escaping
Call number, for plugins
Map from input states to outputs (summary and state).
Map from useful input bases to call effects.
List of the arguments of a call.
None is bottom
Map from the arguments of a call to stored results.
* [diff_base_full_zone bases zones] remove from the set of bases [bases]
those of which all bits are present in [zones]
nothing left to clear
return all bases unchanged
Check whether [range] covers the validity of [b]. If so, remove [b]
(hence, return an empty set). Otherwise, keep [b]. Variable bases are
always kept, because they may be changed into weak variables later.
This is specific to the way this function is used later in this file.
Never happens anyway
Remove outputs whose base is completely overwritten
Outputs bases, that is bases that are copy-pasted, also include
input bases. Indeed, those may get reduced during the call.
* Find a previous execution in [map_inputs] that matches [st].
raise [Result_found] when this execution exists, or do nothing.
restrict [state] to the inputs of this call
We have found a previous execution, in which the outputs are
[outputs]. Copy them in [state] and return this result.
Local Variables:
compile-command: "make -C ../../../.."
End:
| Modified by TrustInSoft
This file is part of Frama - C.
Copyright ( C ) 2007 - 2015
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
module type Domain = sig
include Datatype.S_with_collections
type summary
module Summary : Datatype.S with type t = summary
val filter_by_bases: Base.Hptset.t -> t -> t
val reuse: current_input:t -> previous_output:t -> t
end
module ResultFromCallback =
State_builder.Option_ref(Datatype.Pair(Value_types.Callstack)(Inout_type))
(struct
let dependencies = [Db.Value.self]
let name = "Mem_exec2.ResultFromCallback"
end)
let register_callback () =
if Value_parameters.MemExecAll.get () then
Db.Operational_inputs.Record_Inout_Callbacks.extend_once
(fun (_stack, _inout as v) ->
ResultFromCallback.set v)
let () = Cmdline.run_after_configuring_stage register_callback
module SaveCounter =
State_builder.SharedCounter(struct let name = "Mem_exec2.save_counter" end)
let new_counter, current_counter =
let cur = ref (-1) in
(fun () -> cur := SaveCounter.next (); !cur),
(fun () -> !cur)
let cleanup_ref = ref (fun () -> ())
let cleanup_results () =
ResultFromCallback.clear ();
!cleanup_ref ()
exception TooImprecise
let bases = function
| Locations.Zone.Top (Base.SetLattice.Top, _) -> raise TooImprecise
| Locations.Zone.Top (Base.SetLattice.Set s, _) -> s
| Locations.Zone.Map m -> Base.Hptset.from_shape (Locations.Zone.shape m)
let counter = ref 0
module Make
(Value : Datatype.S)
(Domain : Domain)
= struct
incr counter;
module ReturnedValue =
Datatype.Triple
module CallOutput =
Datatype.List
(Datatype.Triple
(Domain) (Domain.Summary) (Datatype.Option (ReturnedValue)))
module StoredResult =
Datatype.Pair
(CallOutput)
module CallEffect = Domain.Hashtbl.Make (StoredResult)
module InputBasesToCallEffect = Base.Hptset.Hashtbl.Make (CallEffect)
module ActualArgs =
(struct let module_name = "Mem_exec2.ActualArgs("
^ string_of_int !counter ^ ")"
end)
module ArgsToStoredCalls = ActualArgs.Map.Make (InputBasesToCallEffect)
module PreviousCalls =
Kernel_function.Make_Table
(ArgsToStoredCalls)
(struct
let size = 17
let dependencies = [Db.Value.self]
let name = "Mem_exec2.PreviousCalls(" ^ string_of_int !counter ^ ")"
end)
let cleanup = !cleanup_ref
let () = cleanup_ref := fun () -> cleanup (); PreviousCalls.clear ()
let result_to_output result =
let open Eval in
let return = result.returned_value in
let returned_value = match return with
| None -> None
| Some return ->
let value = match return.v with
| `Bottom -> None
| `Value v -> Some v
in
Some (value, return.initialized, return.escaping)
in
result.post_state, result.summary, returned_value
let output_to_result output =
let open Eval in
let post_state, summary, return = output in
let returned_value = match return with
| None -> None
| Some (value, initialized, escaping) ->
Some
{ v = (match value with None -> `Bottom | Some v -> `Value v);
initialized;
escaping;
}
in
{post_state; summary; returned_value}
let map_to_outputs f =
List.map
(fun ((state: Domain.t),
(summary: Domain.Summary.t),
(value : ReturnedValue.t option)) ->
(f state, summary, value))
let diff_base_full_zone =
let cache = Hptmap_sig.PersistentCache "Mem_exec2.diff_base_full_zone" in
let both b range = begin
match Base.validity b with
| Base.Invalid -> assert false
| Base.Empty -> Base.Hptset.empty
| Base.Variable _ -> Base.Hptset.singleton b
| Base.Known (min, max) | Base.Unknown (min, _, max) ->
match Int_Intervals.project_singleton range with
| Some (min', max') ->
if Integer.equal min min' && Integer.equal max max' then
Base.Hptset.empty
else
Base.Hptset.singleton b
| None -> Base.Hptset.singleton b
end in
let join = Base.Hptset.union in
let empty = Base.Hptset.empty in
let f = Base.Hptset.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun bases z ->
match z with
| Locations.Zone.Map m -> f bases (Locations.Zone.shape m)
let store_computed_call kf input_state args
(call_result: (Domain.t, Domain.Summary.t, Value.t) Eval.call_result) =
match ResultFromCallback.get_option () with
| None -> ()
| Some (_stack, inout) ->
try
let output_bases = bases inout.Inout_type.over_outputs_if_termination
and input_bases = bases inout.Inout_type.over_inputs in
There are two strategies to compute the ' inputs ' for a memexec
function : either we take all inputs_bases+outputs_bases
( outputs_bases are important because of weak updates ) , or we
remove the sure outputs from the outputs , as sure outputs by
definition strong updated . The latter will enable to fire
more often , but requires more computations .
function: either we take all inputs_bases+outputs_bases
(outputs_bases are important because of weak updates), or we
remove the sure outputs from the outputs, as sure outputs by
definition strong updated. The latter will enable memexec to fire
more often, but requires more computations. *)
let remove_sure_outputs = true in
let input_bases =
if remove_sure_outputs then
let uncertain_output_bases =
diff_base_full_zone
output_bases inout.Inout_type.under_outputs_if_termination
in
Base.Hptset.union input_bases uncertain_output_bases
else
Base.Hptset.union input_bases output_bases
in
let state_input = Domain.filter_by_bases input_bases input_state in
let all_output_bases =
if remove_sure_outputs
then Base.Hptset.union input_bases output_bases
else input_bases
in
let clear state = Domain.filter_by_bases all_output_bases state in
let call_result = match call_result with
| `Bottom -> []
| `Value list -> list
in
let outputs = List.map result_to_output call_result in
let outputs = map_to_outputs clear outputs in
let call_number = current_counter () in
let map_a =
try PreviousCalls.find kf
with Not_found -> ActualArgs.Map.empty
in
let hkf =
let args =
List.map (function `Bottom -> None | `Value v -> Some v) args in
try ActualArgs.Map.find args map_a
with Not_found ->
let h = Base.Hptset.Hashtbl.create 11 in
let map_a = ActualArgs.Map.add args h map_a in
PreviousCalls.replace kf map_a;
h
in
let hkb =
try Base.Hptset.Hashtbl.find hkf input_bases
with Not_found ->
let h = Domain.Hashtbl.create 11 in
Base.Hptset.Hashtbl.add hkf input_bases h;
h
in
Domain.Hashtbl.add hkb state_input
(outputs, call_number);
ResultFromCallback.clear ()
with
| TooImprecise
| Kernel_function.No_Statement
| Not_found -> ResultFromCallback.clear ()
exception Result_found of CallOutput.t * int
let find_match_in_previous (map_inputs: InputBasesToCallEffect.t) state =
let aux_previous_call binputs hstates =
let st_filtered = Domain.filter_by_bases binputs state in
try
let outputs, i = Domain.Hashtbl.find hstates st_filtered in
let process output =
Domain.reuse ~current_input:state ~previous_output:output in
let outputs = map_to_outputs process outputs in
raise (Result_found (outputs, i))
with Not_found -> ()
in
Base.Hptset.Hashtbl.iter aux_previous_call map_inputs
let reuse_previous_call kf state args =
try
let previous_kf = PreviousCalls.find kf in
let args = List.map (function `Bottom -> None | `Value v -> Some v) args in
let previous = ActualArgs.Map.find args previous_kf in
find_match_in_previous previous state;
None
with
| Not_found -> None
| Result_found (outputs, i) ->
let call_result = List.map output_to_result outputs in
Some (Bottom.bot_of_list call_result, i)
end
|
7500db36eaa180638a4896a13e61af15323efcdc86cfdf063b4daab6b942d448 | hatsugai/SyncStitch | det.ml | open Printf
open Error
open Col
open Event
open EventCol
open Lts
module IntSetHashtbl =
Hashtbl.Make (
struct
type t = IntSet.t
let equal = IntSet.equal
let hash = IntSet.hash
end)
let tau_closure lts ss =
let que = Queue.create () in
let rec loop ss =
if Queue.is_empty que then
ss
else
let s = Queue.take que in
let sprop = lts.v.(s) in
let ss =
IntSet.fold
(fun t ss ->
if IntSet.mem t ss then
ss
else
(Queue.add t que; IntSet.add t ss))
sprop.tau_targets ss
in loop ss
in
IntSet.iter (fun s -> Queue.add s que) ss;
loop ss
let update_evht ht u tt =
if Hashtbl.mem ht u then
Hashtbl.replace ht u (IntSet.union tt (Hashtbl.find ht u))
else
Hashtbl.add ht u tt
let make_next_ss lts =
(fun ss _ _ ->
let ht = Hashtbl.create 0 in
IntSet.iter
(fun s ->
EventMap.iter
(fun u tt ->
match u with
Tau | HiddenEvent _ -> ()
| Tick | Event _ -> update_evht ht u tt)
lts.v.(s).trans_map)
ss;
Hashtbl.fold
(fun u tt trans ->
let tc = tau_closure lts tt in
(u, tc)::trans)
ht [])
let determinize lts =
(if !Option.debug then
printf "determinize %s\n" (Id.show lts.process_name));
calc_tau_targets lts;
let s0 = tau_closure lts (IntSet.singleton 0) in
let next = make_next_ss lts in
let ht =
Bfs.bfs
(fun () -> IntSetHashtbl.create 0)
IntSetHashtbl.replace
IntSetHashtbl.mem
IntSetHashtbl.length
s0 next
in
let v =
Unfold.conv
IntSetHashtbl.length
IntSetHashtbl.iter
IntSetHashtbl.find
s0 ht
in
{
process_name = lts.process_name;
v = v;
minacc_vec = [||];
print_state = (fun sep s -> sprintf "%d" s);
b_tau_targets = false;
b_initials = false;
b_minacc = false;
}
| null | https://raw.githubusercontent.com/hatsugai/SyncStitch/cbf0d28aa77a6f4579233ff64227fd7150e300e0/src/det.ml | ocaml | open Printf
open Error
open Col
open Event
open EventCol
open Lts
module IntSetHashtbl =
Hashtbl.Make (
struct
type t = IntSet.t
let equal = IntSet.equal
let hash = IntSet.hash
end)
let tau_closure lts ss =
let que = Queue.create () in
let rec loop ss =
if Queue.is_empty que then
ss
else
let s = Queue.take que in
let sprop = lts.v.(s) in
let ss =
IntSet.fold
(fun t ss ->
if IntSet.mem t ss then
ss
else
(Queue.add t que; IntSet.add t ss))
sprop.tau_targets ss
in loop ss
in
IntSet.iter (fun s -> Queue.add s que) ss;
loop ss
let update_evht ht u tt =
if Hashtbl.mem ht u then
Hashtbl.replace ht u (IntSet.union tt (Hashtbl.find ht u))
else
Hashtbl.add ht u tt
let make_next_ss lts =
(fun ss _ _ ->
let ht = Hashtbl.create 0 in
IntSet.iter
(fun s ->
EventMap.iter
(fun u tt ->
match u with
Tau | HiddenEvent _ -> ()
| Tick | Event _ -> update_evht ht u tt)
lts.v.(s).trans_map)
ss;
Hashtbl.fold
(fun u tt trans ->
let tc = tau_closure lts tt in
(u, tc)::trans)
ht [])
let determinize lts =
(if !Option.debug then
printf "determinize %s\n" (Id.show lts.process_name));
calc_tau_targets lts;
let s0 = tau_closure lts (IntSet.singleton 0) in
let next = make_next_ss lts in
let ht =
Bfs.bfs
(fun () -> IntSetHashtbl.create 0)
IntSetHashtbl.replace
IntSetHashtbl.mem
IntSetHashtbl.length
s0 next
in
let v =
Unfold.conv
IntSetHashtbl.length
IntSetHashtbl.iter
IntSetHashtbl.find
s0 ht
in
{
process_name = lts.process_name;
v = v;
minacc_vec = [||];
print_state = (fun sep s -> sprintf "%d" s);
b_tau_targets = false;
b_initials = false;
b_minacc = false;
}
|
|
a0f46ac784dfeb7990e2a979d6b6b470ea079b84f1788873e86c3810b02a33b3 | ghcjs/jsaddle-dom | SVGPathSegCurvetoQuadraticSmoothAbs.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.SVGPathSegCurvetoQuadraticSmoothAbs
(setX, getX, setY, getY, SVGPathSegCurvetoQuadraticSmoothAbs(..),
gTypeSVGPathSegCurvetoQuadraticSmoothAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.x Mozilla documentation >
setX ::
(MonadDOM m) =>
SVGPathSegCurvetoQuadraticSmoothAbs -> Float -> m ()
setX self val = liftDOM (self ^. jss "x" (toJSVal val))
| < -US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.x Mozilla documentation >
getX ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticSmoothAbs -> m Float
getX self
= liftDOM (realToFrac <$> ((self ^. js "x") >>= valToNumber))
-- | <-US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.y Mozilla SVGPathSegCurvetoQuadraticSmoothAbs.y documentation>
setY ::
(MonadDOM m) =>
SVGPathSegCurvetoQuadraticSmoothAbs -> Float -> m ()
setY self val = liftDOM (self ^. jss "y" (toJSVal val))
-- | <-US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.y Mozilla SVGPathSegCurvetoQuadraticSmoothAbs.y documentation>
getY ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticSmoothAbs -> m Float
getY self
= liftDOM (realToFrac <$> ((self ^. js "y") >>= valToNumber))
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/SVGPathSegCurvetoQuadraticSmoothAbs.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #
| <-US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.y Mozilla SVGPathSegCurvetoQuadraticSmoothAbs.y documentation>
| <-US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.y Mozilla SVGPathSegCurvetoQuadraticSmoothAbs.y documentation> | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.SVGPathSegCurvetoQuadraticSmoothAbs
(setX, getX, setY, getY, SVGPathSegCurvetoQuadraticSmoothAbs(..),
gTypeSVGPathSegCurvetoQuadraticSmoothAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.x Mozilla documentation >
setX ::
(MonadDOM m) =>
SVGPathSegCurvetoQuadraticSmoothAbs -> Float -> m ()
setX self val = liftDOM (self ^. jss "x" (toJSVal val))
| < -US/docs/Web/API/SVGPathSegCurvetoQuadraticSmoothAbs.x Mozilla documentation >
getX ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticSmoothAbs -> m Float
getX self
= liftDOM (realToFrac <$> ((self ^. js "x") >>= valToNumber))
setY ::
(MonadDOM m) =>
SVGPathSegCurvetoQuadraticSmoothAbs -> Float -> m ()
setY self val = liftDOM (self ^. jss "y" (toJSVal val))
getY ::
(MonadDOM m) => SVGPathSegCurvetoQuadraticSmoothAbs -> m Float
getY self
= liftDOM (realToFrac <$> ((self ^. js "y") >>= valToNumber))
|
4d7979e0e344e63e2e556eadb38a9da44df9a8a58ad5fb80469bd93870709f04 | mitchellwrosen/tasty-hspec | Hspec.hs | | @hspec@ and @tasty@ serve similar purposes ; consider using one or the
-- other.
--
-- However, in a pinch, this module allows you to run an @hspec@ 'H.Spec' as a
@tasty@ ' T.TestTree ' .
module Test.Tasty.Hspec
( -- * Tests
testSpec,
testSpecs,
-- * Options
TreatPendingAs (..),
-- * Examples
-- $examples
)
where
import Control.Monad (guard)
import Data.Maybe (catMaybes, fromMaybe, mapMaybe)
import Data.Proxy
import Data.Typeable (Typeable)
import qualified Test.Hspec as Hspec
import qualified Test.Hspec.Core.Formatters as Hspec.Core.Formatters
import qualified Test.Hspec.Core.Spec as Hspec.Core.Spec
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.Options as Tasty.Options
import qualified Test.Tasty.Providers as Tasty.Providers
import qualified Test.Tasty.QuickCheck as Tasty.QuickCheck
import qualified Test.Tasty.Runners as Tasty.Runners
import qualified Test.Tasty.SmallCheck as Tasty.SmallCheck
-- $examples
--
The simplest usage of this library involves first creating a ' T.TestTree ' in , then running it with
-- 'T.defaultMain'.
--
-- @
-- main = do
-- spec <- 'testSpec' "spec" mySpec
-- 'T.defaultMain'
-- ('T.testGroup' "tests"
-- [ spec
-- , ...
-- ])
-- @
--
-- You can treat an 'H.pending'/'H.pendingWith' test as a success instead of a
-- failure (the default):
--
-- @
-- tests :: TestTree
-- tests =
-- localOption TreatPendingAsSuccess $ testGroup "My Hspec TestTree"
[ ( testSpec " My first Hspec test " spec_firstHspecTest )
-- , ...
-- ]
-- @
--
If you do n't do any during ' Spec ' creation , or the need
not be performed at any particular time relative to other actions , it 's
-- perfectly fine to use 'System.IO.unsafePerformIO'.
--
-- @
-- main = do
-- 'T.defaultMain'
-- ('T.testGroup' "tests"
-- [ 'System.IO.unsafePerformIO' ('testSpec' "spec" mySpec)
-- , ...
-- ])
-- @
-- | Create a < tasty> 'T.TestTree' from an
-- < hspec> 'H.Spec'.
testSpec :: Tasty.TestName -> Hspec.Spec -> IO Tasty.TestTree
testSpec name spec = do
trees <- testSpecs spec
pure (Tasty.testGroup name trees)
-- | Create a list of < tasty> 'T.TestTree' from an
-- < hspec> 'H.Spec'. This returns the same tests as 'testSpec'
-- but doesn't create a < tasty> test group from them.
testSpecs :: Hspec.Spec -> IO [Tasty.TestTree]
testSpecs spec = do
Here we do as hspec does , which is pre - process a spec by focusing the whole thing , which is a no - op if
-- anything inside is already focused, but otherwise focuses every item. Then, when creating a tasty test tree,
-- we just toss the unfocused items.
(_configBuilder, trees) <- Hspec.Core.Spec.runSpecM (Hspec.focus spec)
pure (mapMaybe specTreeToTestTree trees)
specTreeToTestTree :: Hspec.Core.Spec.SpecTree () -> Maybe Tasty.TestTree
specTreeToTestTree = \case
Hspec.Core.Spec.Node name trees -> pure (Tasty.testGroup name (mapMaybe specTreeToTestTree trees))
Hspec.Core.Spec.NodeWithCleanup _loc cleanup trees -> do
tree <- specTreeToTestTree (Hspec.Core.Spec.Node "(unnamed)" trees)
pure (Tasty.Runners.WithResource (Tasty.Runners.ResourceSpec (pure ()) (const cleanup)) (const tree))
Hspec.Core.Spec.Leaf item -> do
guard (Hspec.Core.Spec.itemIsFocused item)
pure (Tasty.Providers.singleTest (Hspec.Core.Spec.itemRequirement item) (Item item))
newtype Item
= Item (Hspec.Core.Spec.Item ())
deriving (Typeable)
instance Tasty.Providers.IsTest Item where
run opts (Item item) progress = do
(_, qcArgs) <- Tasty.QuickCheck.optionSetToArgs opts
optionSetToQuickCheckArgs : : Tasty . OptionSet - > IO QuickCheck .
-- optionSetToQuickCheckArgs opts =
-- snd <$> Tasty.QuickCheck.optionSetToArgs opts
let params =
Hspec.Core.Spec.Params
{ Hspec.Core.Spec.paramsQuickCheckArgs = qcArgs,
Hspec.Core.Spec.paramsSmallCheckDepth =
case Tasty.Options.lookupOption opts of
Tasty.SmallCheck.SmallCheckDepth depth -> Just depth
}
Hspec.Core.Spec.Result _ result <- Hspec.Core.Spec.itemExample item params ($ ()) progress'
pure
( case result of
Hspec.Core.Spec.Success -> Tasty.Providers.testPassed ""
Hspec.Core.Spec.Pending _ reason ->
case Tasty.Options.lookupOption opts of
TreatPendingAsFailure -> Tasty.Providers.testFailed reason'
TreatPendingAsSuccess -> Tasty.Providers.testPassed reason'
where
reason' = "# PENDING: " ++ fromMaybe "No reason given" reason
Hspec.Core.Spec.Failure _ reason ->
case reason of
Hspec.Core.Spec.NoReason -> Tasty.Providers.testFailed ""
Hspec.Core.Spec.Reason x -> Tasty.Providers.testFailed x
Hspec.Core.Spec.ExpectedButGot preface expected actual ->
Tasty.Providers.testFailed . unlines . catMaybes $
[ preface,
Just ("expected: " ++ expected),
Just (" but got: " ++ actual)
]
Hspec.Core.Spec.Error _ exception ->
Tasty.Providers.testFailed ("uncaught exception: " ++ Hspec.Core.Formatters.formatException exception)
)
where
progress' (x, y) =
progress
Tasty.Runners.Progress
{ Tasty.Runners.progressText = "",
Tasty.Runners.progressPercent = fromIntegral x / fromIntegral y
}
testOptions =
pure
[ Tasty.Options.Option (Proxy :: Proxy TreatPendingAs),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckTests),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckReplay),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckMaxSize),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckMaxRatio),
Tasty.Options.Option (Proxy :: Proxy Tasty.SmallCheck.SmallCheckDepth)
]
-- | How to treat @hspec@ pending tests.
--
@tasty@ does not have the concept of pending tests , so we must map them to
-- either successes or failures. By default, they are treated as failures.
--
-- Set via the command line flag @--treat-pending-as (success|failure)@.
data TreatPendingAs
= -- | Default.
TreatPendingAsFailure
| TreatPendingAsSuccess
instance Tasty.Options.IsOption TreatPendingAs where
defaultValue =
TreatPendingAsFailure
parseValue = \case
"failure" -> Just TreatPendingAsFailure
"success" -> Just TreatPendingAsSuccess
_ -> Nothing
optionName =
pure "treat-pending-as"
optionHelp =
pure "How to treat pending hspec tests ('failure' or 'success')"
showDefaultValue _ =
Just "failure"
| null | https://raw.githubusercontent.com/mitchellwrosen/tasty-hspec/5b5e1eb4c6d0952f0227895ef72f68921ac3e1cb/src/Test/Tasty/Hspec.hs | haskell | other.
However, in a pinch, this module allows you to run an @hspec@ 'H.Spec' as a
* Tests
* Options
* Examples
$examples
$examples
'T.defaultMain'.
@
main = do
spec <- 'testSpec' "spec" mySpec
'T.defaultMain'
('T.testGroup' "tests"
[ spec
, ...
])
@
You can treat an 'H.pending'/'H.pendingWith' test as a success instead of a
failure (the default):
@
tests :: TestTree
tests =
localOption TreatPendingAsSuccess $ testGroup "My Hspec TestTree"
, ...
]
@
perfectly fine to use 'System.IO.unsafePerformIO'.
@
main = do
'T.defaultMain'
('T.testGroup' "tests"
[ 'System.IO.unsafePerformIO' ('testSpec' "spec" mySpec)
, ...
])
@
| Create a < tasty> 'T.TestTree' from an
< hspec> 'H.Spec'.
| Create a list of < tasty> 'T.TestTree' from an
< hspec> 'H.Spec'. This returns the same tests as 'testSpec'
but doesn't create a < tasty> test group from them.
anything inside is already focused, but otherwise focuses every item. Then, when creating a tasty test tree,
we just toss the unfocused items.
optionSetToQuickCheckArgs opts =
snd <$> Tasty.QuickCheck.optionSetToArgs opts
| How to treat @hspec@ pending tests.
either successes or failures. By default, they are treated as failures.
Set via the command line flag @--treat-pending-as (success|failure)@.
| Default. | | @hspec@ and @tasty@ serve similar purposes ; consider using one or the
@tasty@ ' T.TestTree ' .
module Test.Tasty.Hspec
testSpec,
testSpecs,
TreatPendingAs (..),
)
where
import Control.Monad (guard)
import Data.Maybe (catMaybes, fromMaybe, mapMaybe)
import Data.Proxy
import Data.Typeable (Typeable)
import qualified Test.Hspec as Hspec
import qualified Test.Hspec.Core.Formatters as Hspec.Core.Formatters
import qualified Test.Hspec.Core.Spec as Hspec.Core.Spec
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.Options as Tasty.Options
import qualified Test.Tasty.Providers as Tasty.Providers
import qualified Test.Tasty.QuickCheck as Tasty.QuickCheck
import qualified Test.Tasty.Runners as Tasty.Runners
import qualified Test.Tasty.SmallCheck as Tasty.SmallCheck
The simplest usage of this library involves first creating a ' T.TestTree ' in , then running it with
[ ( testSpec " My first Hspec test " spec_firstHspecTest )
If you do n't do any during ' Spec ' creation , or the need
not be performed at any particular time relative to other actions , it 's
testSpec :: Tasty.TestName -> Hspec.Spec -> IO Tasty.TestTree
testSpec name spec = do
trees <- testSpecs spec
pure (Tasty.testGroup name trees)
testSpecs :: Hspec.Spec -> IO [Tasty.TestTree]
testSpecs spec = do
Here we do as hspec does , which is pre - process a spec by focusing the whole thing , which is a no - op if
(_configBuilder, trees) <- Hspec.Core.Spec.runSpecM (Hspec.focus spec)
pure (mapMaybe specTreeToTestTree trees)
specTreeToTestTree :: Hspec.Core.Spec.SpecTree () -> Maybe Tasty.TestTree
specTreeToTestTree = \case
Hspec.Core.Spec.Node name trees -> pure (Tasty.testGroup name (mapMaybe specTreeToTestTree trees))
Hspec.Core.Spec.NodeWithCleanup _loc cleanup trees -> do
tree <- specTreeToTestTree (Hspec.Core.Spec.Node "(unnamed)" trees)
pure (Tasty.Runners.WithResource (Tasty.Runners.ResourceSpec (pure ()) (const cleanup)) (const tree))
Hspec.Core.Spec.Leaf item -> do
guard (Hspec.Core.Spec.itemIsFocused item)
pure (Tasty.Providers.singleTest (Hspec.Core.Spec.itemRequirement item) (Item item))
newtype Item
= Item (Hspec.Core.Spec.Item ())
deriving (Typeable)
instance Tasty.Providers.IsTest Item where
run opts (Item item) progress = do
(_, qcArgs) <- Tasty.QuickCheck.optionSetToArgs opts
optionSetToQuickCheckArgs : : Tasty . OptionSet - > IO QuickCheck .
let params =
Hspec.Core.Spec.Params
{ Hspec.Core.Spec.paramsQuickCheckArgs = qcArgs,
Hspec.Core.Spec.paramsSmallCheckDepth =
case Tasty.Options.lookupOption opts of
Tasty.SmallCheck.SmallCheckDepth depth -> Just depth
}
Hspec.Core.Spec.Result _ result <- Hspec.Core.Spec.itemExample item params ($ ()) progress'
pure
( case result of
Hspec.Core.Spec.Success -> Tasty.Providers.testPassed ""
Hspec.Core.Spec.Pending _ reason ->
case Tasty.Options.lookupOption opts of
TreatPendingAsFailure -> Tasty.Providers.testFailed reason'
TreatPendingAsSuccess -> Tasty.Providers.testPassed reason'
where
reason' = "# PENDING: " ++ fromMaybe "No reason given" reason
Hspec.Core.Spec.Failure _ reason ->
case reason of
Hspec.Core.Spec.NoReason -> Tasty.Providers.testFailed ""
Hspec.Core.Spec.Reason x -> Tasty.Providers.testFailed x
Hspec.Core.Spec.ExpectedButGot preface expected actual ->
Tasty.Providers.testFailed . unlines . catMaybes $
[ preface,
Just ("expected: " ++ expected),
Just (" but got: " ++ actual)
]
Hspec.Core.Spec.Error _ exception ->
Tasty.Providers.testFailed ("uncaught exception: " ++ Hspec.Core.Formatters.formatException exception)
)
where
progress' (x, y) =
progress
Tasty.Runners.Progress
{ Tasty.Runners.progressText = "",
Tasty.Runners.progressPercent = fromIntegral x / fromIntegral y
}
testOptions =
pure
[ Tasty.Options.Option (Proxy :: Proxy TreatPendingAs),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckTests),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckReplay),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckMaxSize),
Tasty.Options.Option (Proxy :: Proxy Tasty.QuickCheck.QuickCheckMaxRatio),
Tasty.Options.Option (Proxy :: Proxy Tasty.SmallCheck.SmallCheckDepth)
]
@tasty@ does not have the concept of pending tests , so we must map them to
data TreatPendingAs
TreatPendingAsFailure
| TreatPendingAsSuccess
instance Tasty.Options.IsOption TreatPendingAs where
defaultValue =
TreatPendingAsFailure
parseValue = \case
"failure" -> Just TreatPendingAsFailure
"success" -> Just TreatPendingAsSuccess
_ -> Nothing
optionName =
pure "treat-pending-as"
optionHelp =
pure "How to treat pending hspec tests ('failure' or 'success')"
showDefaultValue _ =
Just "failure"
|
6990858b2dff38acffd4d16de43f2eb8e1c8e9c81a9ca1910d9a2216bd361bdd | TerrorJack/ghc-alter | C.hs | {-# LANGUAGE Safe #-}
# LANGUAGE NoImplicitPrelude #
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.C
Copyright : ( c ) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : provisional
-- Portability : portable
--
Bundles the C specific FFI library functionality
--
-----------------------------------------------------------------------------
module Foreign.C
( module Foreign.C.Types
, module Foreign.C.String
, module Foreign.C.Error
) where
import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/base/Foreign/C.hs | haskell | # LANGUAGE Safe #
---------------------------------------------------------------------------
|
Module : Foreign.C
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
--------------------------------------------------------------------------- | # LANGUAGE NoImplicitPrelude #
Copyright : ( c ) The FFI task force 2001
Maintainer :
Bundles the C specific FFI library functionality
module Foreign.C
( module Foreign.C.Types
, module Foreign.C.String
, module Foreign.C.Error
) where
import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error
|
c6f17971a954a212b071e30c2027f247df9c0372690b9cf5227dd44b77c53e7a | ktakashi/sagittarius-scheme | parser-parameters.scm | -*- mode : scheme ; coding : utf-8 ; -*-
;;;
;;; text/json/parser-parameters.scm - JSON parser parameters
;;;
Copyright ( c ) 2020 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; For flexibile object construction
(library (text json parser-parameters)
(export *json:array-handler*
*json:object-handler*
*json:null-handler*
*json:boolean-handler*
*json:number-handler*
*json:string-handler*)
(import (rnrs)
(srfi :39 parameters))
(define *json:array-handler* (make-parameter (lambda (v) v)))
(define *json:object-handler* (make-parameter (lambda (v) (list->vector v))))
(define *json:null-handler* (make-parameter (lambda () 'null)))
(define *json:boolean-handler* (make-parameter (lambda (v) v)))
(define *json:number-handler* (make-parameter (lambda (v) v)))
(define *json:string-handler* (make-parameter (lambda (v) v)))
;; maybe null and boolean handler?
)
| null | https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/3971b131d463696297c320dbe595dffd08867dac/sitelib/text/json/parser-parameters.scm | scheme | coding : utf-8 ; -*-
text/json/parser-parameters.scm - JSON parser parameters
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
For flexibile object construction
maybe null and boolean handler? | Copyright ( c ) 2020 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
(library (text json parser-parameters)
(export *json:array-handler*
*json:object-handler*
*json:null-handler*
*json:boolean-handler*
*json:number-handler*
*json:string-handler*)
(import (rnrs)
(srfi :39 parameters))
(define *json:array-handler* (make-parameter (lambda (v) v)))
(define *json:object-handler* (make-parameter (lambda (v) (list->vector v))))
(define *json:null-handler* (make-parameter (lambda () 'null)))
(define *json:boolean-handler* (make-parameter (lambda (v) v)))
(define *json:number-handler* (make-parameter (lambda (v) v)))
(define *json:string-handler* (make-parameter (lambda (v) v)))
)
|
1ddb3fa8ab71601330fe25442f44d6865323a5fbc2e4101f390a19bb7765b727 | vincenthz/hs-crypto-pubkey | DH.hs | module Crypto.PubKey.ECC.DH (
Curve
, PublicPoint
, PrivateNumber
, SharedKey(..)
, generatePrivate
, calculatePublic
, getShared
) where
import Crypto.Number.Generate (generateMax)
import Crypto.PubKey.ECC.Prim (pointMul)
import Crypto.Random (CPRG)
import Crypto.Types.PubKey.DH (SharedKey(..))
import Crypto.Types.PubKey.ECC (PublicPoint, PrivateNumber, Curve, Point(..))
import Crypto.Types.PubKey.ECC (ecc_n, ecc_g, common_curve)
| Generating a private number d.
generatePrivate :: CPRG g => g -> Curve -> (PrivateNumber, g)
generatePrivate rng curve = generateMax rng n
where
n = ecc_n $ common_curve curve
-- | Generating a public point Q.
calculatePublic :: Curve -> PrivateNumber -> PublicPoint
calculatePublic curve d = q
where
g = ecc_g $ common_curve curve
q = pointMul curve d g
-- | Generating a shared key using our private number and
-- the other party public point.
getShared :: Curve -> PrivateNumber -> PublicPoint -> SharedKey
getShared curve db qa = SharedKey x
where
Point x _ = pointMul curve db qa
| null | https://raw.githubusercontent.com/vincenthz/hs-crypto-pubkey/c4192a87a03c329eadceb11c3406b4db14b56f11/Crypto/PubKey/ECC/DH.hs | haskell | | Generating a public point Q.
| Generating a shared key using our private number and
the other party public point. | module Crypto.PubKey.ECC.DH (
Curve
, PublicPoint
, PrivateNumber
, SharedKey(..)
, generatePrivate
, calculatePublic
, getShared
) where
import Crypto.Number.Generate (generateMax)
import Crypto.PubKey.ECC.Prim (pointMul)
import Crypto.Random (CPRG)
import Crypto.Types.PubKey.DH (SharedKey(..))
import Crypto.Types.PubKey.ECC (PublicPoint, PrivateNumber, Curve, Point(..))
import Crypto.Types.PubKey.ECC (ecc_n, ecc_g, common_curve)
| Generating a private number d.
generatePrivate :: CPRG g => g -> Curve -> (PrivateNumber, g)
generatePrivate rng curve = generateMax rng n
where
n = ecc_n $ common_curve curve
calculatePublic :: Curve -> PrivateNumber -> PublicPoint
calculatePublic curve d = q
where
g = ecc_g $ common_curve curve
q = pointMul curve d g
getShared :: Curve -> PrivateNumber -> PublicPoint -> SharedKey
getShared curve db qa = SharedKey x
where
Point x _ = pointMul curve db qa
|
6f30762694b30ed5095107553bdcbcf15d281e539100671cefd226f41de7709e | romstad/clj-chess | project.clj | (defproject chessboard "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.671"]
[clj-chess "0.5.0"]
[reagent "0.7.0"]
[re-frame "0.9.4"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:source-paths ["src/clj"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:figwheel {:css-dirs ["resources/public/css"]}
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}
:profiles
{:dev
{:dependencies [[binaryage/devtools "0.9.4"]
[figwheel-sidecar "0.5.11"]
[com.cemerick/piggieback "0.2.2"]]
:plugins [[lein-figwheel "0.5.11"]]}}
:cljsbuild
{:builds
[{:id "dev"
:source-paths ["src/cljs"]
:figwheel {:on-jsload "chessboard.core/mount-root"}
:compiler {:main chessboard.core
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:asset-path "js/compiled/out"
:source-map-timestamp true
:preloads [devtools.preload]
:external-config {:devtools/config {:features-to-install :all}}
}}
{:id "min"
:source-paths ["src/cljs"]
:compiler {:main chessboard.core
:output-to "resources/public/js/compiled/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false}
:pretty-print false}}]})
| null | https://raw.githubusercontent.com/romstad/clj-chess/1f7d4d0217c7299d49386a1e5dca404a37441728/examples/chessboard/project.clj | clojure | (defproject chessboard "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.671"]
[clj-chess "0.5.0"]
[reagent "0.7.0"]
[re-frame "0.9.4"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:source-paths ["src/clj"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:figwheel {:css-dirs ["resources/public/css"]}
:repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}
:profiles
{:dev
{:dependencies [[binaryage/devtools "0.9.4"]
[figwheel-sidecar "0.5.11"]
[com.cemerick/piggieback "0.2.2"]]
:plugins [[lein-figwheel "0.5.11"]]}}
:cljsbuild
{:builds
[{:id "dev"
:source-paths ["src/cljs"]
:figwheel {:on-jsload "chessboard.core/mount-root"}
:compiler {:main chessboard.core
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:asset-path "js/compiled/out"
:source-map-timestamp true
:preloads [devtools.preload]
:external-config {:devtools/config {:features-to-install :all}}
}}
{:id "min"
:source-paths ["src/cljs"]
:compiler {:main chessboard.core
:output-to "resources/public/js/compiled/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false}
:pretty-print false}}]})
|
|
2691bf7aa6339d6ce152c758cfd77f2563a051075db415016de9b2d2b77452ee | vdloo/kodictl | stop.rkt | #!/usr/bin/env racket
#lang racket
(require json)
(require "../action.rkt")
(require "active-players.rkt")
(provide kodictl-stop)
; stop all active players
(define kodi-json-rpc-stop
(λ ()
(kodi-json-rpc-map-active-players
(λ (playerid)
(kodi-json-rpc-action "Player.Stop" "playerid"
(number->string playerid))))))
; stop active players and blackhole output
(define kodictl-stop
(λ ()
(for-each
(λ (item) empty)
(kodi-json-rpc-stop))))
| null | https://raw.githubusercontent.com/vdloo/kodictl/31c775a0889c06fcf65a0d91d15937144eb6a30a/kodictl/commands/stop.rkt | racket | stop all active players
stop active players and blackhole output | #!/usr/bin/env racket
#lang racket
(require json)
(require "../action.rkt")
(require "active-players.rkt")
(provide kodictl-stop)
(define kodi-json-rpc-stop
(λ ()
(kodi-json-rpc-map-active-players
(λ (playerid)
(kodi-json-rpc-action "Player.Stop" "playerid"
(number->string playerid))))))
(define kodictl-stop
(λ ()
(for-each
(λ (item) empty)
(kodi-json-rpc-stop))))
|
205a0a868b7396df6f87d81b5e54428d7dd3ecdd2365502ba2b97f607113bf1c | ku-fpg/haskino-examples | DigitalCmds.hs | -------------------------------------------------------------------------------
-- |
Module :
Copyright : ( c ) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- TBD.
-------------------------------------------------------------------------------
module DigitalCmds where
import System.Hardware.Haskino
import Data.Word
import Comms
import FirmwareCmds
processDigitalCommand :: [Word8] -> Arduino ()
processDigitalCommand m =
case head m of
c | c == firmwareCmdVal DIG_CMD_READ_PIN -> processReadPin $ tail m
| c == firmwareCmdVal DIG_CMD_WRITE_PIN -> processWritePin $ tail m
| c == firmwareCmdVal DIG_CMD_READ_PORT -> processReadPort $ tail m
| c == firmwareCmdVal DIG_CMD_WRITE_PORT -> processWritePort $ tail m
_ -> return ()
processReadPin :: [Word8] -> Arduino ()
processReadPin m = do
if (m !! 1== exprTypeVal EXPR_WORD8) && (m !! 2 == 0)
then do
b <- digitalRead $ m !! 3
sendReply (firmwareReplyVal DIG_RESP_READ_PIN) $ ( exprTypeVal EXPR_BOOL ) :
( exprOpVal EXPR_LIT ) :
( if b then 1 else 0 ) : []
else return ()
processWritePin :: [Word8] -> Arduino ()
processWritePin m = do
if (head m == exprTypeVal EXPR_WORD8) && (m !! 1 == 0) &&
(m !! 3 == exprTypeVal EXPR_BOOL ) && (m !! 4 == 0)
then digitalWrite (m !! 2) $ if m !! 5 == 0 then False else True
else return ()
processReadPort :: [Word8] -> Arduino ()
processReadPort m = do
if (m !! 1 == exprTypeVal EXPR_WORD8) && (m !! 2 == 0) &&
(m !! 4 == exprTypeVal EXPR_WORD8) && (m !! 5 == 0)
then do
p <- digitalPortRead (m !! 3) (m !! 6)
sendReply (firmwareReplyVal DIG_RESP_READ_PORT) $ ( exprTypeVal EXPR_WORD8 ) :
( exprOpVal EXPR_LIT ) :
p : []
else return ()
processWritePort :: [Word8] -> Arduino ()
processWritePort m = do
if (head m == exprTypeVal EXPR_WORD8) && (m !! 1 == 0) &&
(m !! 3 == exprTypeVal EXPR_WORD8) && (m !! 4 == 0) &&
(m !! 6 == exprTypeVal EXPR_WORD8) && (m !! 7 == 0)
then digitalPortWrite (m !! 2) (m !! 5) (m !! 8)
else return ()
| null | https://raw.githubusercontent.com/ku-fpg/haskino-examples/cdf10e43e5c8b5a38bc0bcbfb0e0faccfa9924e2/firmware/DigitalCmds.hs | haskell | -----------------------------------------------------------------------------
|
License : BSD3
Stability : experimental
TBD.
----------------------------------------------------------------------------- | Module :
Copyright : ( c ) University of Kansas
module DigitalCmds where
import System.Hardware.Haskino
import Data.Word
import Comms
import FirmwareCmds
processDigitalCommand :: [Word8] -> Arduino ()
processDigitalCommand m =
case head m of
c | c == firmwareCmdVal DIG_CMD_READ_PIN -> processReadPin $ tail m
| c == firmwareCmdVal DIG_CMD_WRITE_PIN -> processWritePin $ tail m
| c == firmwareCmdVal DIG_CMD_READ_PORT -> processReadPort $ tail m
| c == firmwareCmdVal DIG_CMD_WRITE_PORT -> processWritePort $ tail m
_ -> return ()
processReadPin :: [Word8] -> Arduino ()
processReadPin m = do
if (m !! 1== exprTypeVal EXPR_WORD8) && (m !! 2 == 0)
then do
b <- digitalRead $ m !! 3
sendReply (firmwareReplyVal DIG_RESP_READ_PIN) $ ( exprTypeVal EXPR_BOOL ) :
( exprOpVal EXPR_LIT ) :
( if b then 1 else 0 ) : []
else return ()
processWritePin :: [Word8] -> Arduino ()
processWritePin m = do
if (head m == exprTypeVal EXPR_WORD8) && (m !! 1 == 0) &&
(m !! 3 == exprTypeVal EXPR_BOOL ) && (m !! 4 == 0)
then digitalWrite (m !! 2) $ if m !! 5 == 0 then False else True
else return ()
processReadPort :: [Word8] -> Arduino ()
processReadPort m = do
if (m !! 1 == exprTypeVal EXPR_WORD8) && (m !! 2 == 0) &&
(m !! 4 == exprTypeVal EXPR_WORD8) && (m !! 5 == 0)
then do
p <- digitalPortRead (m !! 3) (m !! 6)
sendReply (firmwareReplyVal DIG_RESP_READ_PORT) $ ( exprTypeVal EXPR_WORD8 ) :
( exprOpVal EXPR_LIT ) :
p : []
else return ()
processWritePort :: [Word8] -> Arduino ()
processWritePort m = do
if (head m == exprTypeVal EXPR_WORD8) && (m !! 1 == 0) &&
(m !! 3 == exprTypeVal EXPR_WORD8) && (m !! 4 == 0) &&
(m !! 6 == exprTypeVal EXPR_WORD8) && (m !! 7 == 0)
then digitalPortWrite (m !! 2) (m !! 5) (m !! 8)
else return ()
|
8951bb5edf76ccbc87935641ab4d6ad845c58a8af63a9931dfd3d584bc988c3b | REMath/mit_16.399 | red123.mli | (* red123.mli *)
open Avalues1
open Avalues2
open Avalues3
val reduce : (Avalues1.t * Avalues2.t * Avalues3.t) -> (Avalues1.t * Avalues2.t * Avalues3.t)
| null | https://raw.githubusercontent.com/REMath/mit_16.399/3f395d6a9dfa1ed232d307c3c542df3dbd5b614a/project/Generic-FW-Abstract-Interpreter/red123.mli | ocaml | red123.mli | open Avalues1
open Avalues2
open Avalues3
val reduce : (Avalues1.t * Avalues2.t * Avalues3.t) -> (Avalues1.t * Avalues2.t * Avalues3.t)
|
ac8f96f84fe3c9a831df81bbaa6f72fb8bb316568411f65e10dbad865d291f50 | haskell-game/sdl2 | Filesystem.hs | module SDL.Raw.Filesystem (
-- * Filesystem Paths
getBasePath,
getPrefPath,
-- * File I/O Abstraction
allocRW,
freeRW,
rwFromConstMem,
rwFromFP,
rwFromFile,
rwFromMem,
rwClose,
rwRead,
rwSeek,
rwTell,
rwWrite,
readBE16,
readBE32,
readBE64,
readLE16,
readLE32,
readLE64,
writeBE16,
writeBE32,
writeBE64,
writeLE16,
writeLE32,
writeLE64
) where
import Control.Monad.IO.Class
import Data.Int
import Data.Word
import Foreign.C.String
import Foreign.C.Types
import Foreign.Ptr
import SDL.Raw.Types
foreign import ccall "SDL.h SDL_GetBasePath" getBasePathFFI :: IO CString
foreign import ccall "SDL.h SDL_GetPrefPath" getPrefPathFFI :: CString -> CString -> IO CString
foreign import ccall "SDL.h SDL_AllocRW" allocRWFFI :: IO (Ptr RWops)
foreign import ccall "SDL.h SDL_FreeRW" freeRWFFI :: Ptr RWops -> IO ()
foreign import ccall "SDL.h SDL_RWFromConstMem" rwFromConstMemFFI :: Ptr () -> CInt -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromFP" rwFromFPFFI :: Ptr () -> Bool -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromFile" rwFromFileFFI :: CString -> CString -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromMem" rwFromMemFFI :: Ptr () -> CInt -> IO (Ptr RWops)
foreign import ccall "sdlhelper.h SDLHelper_RWclose" rwCloseFFI :: Ptr RWops -> IO CInt
foreign import ccall "sdlhelper.h SDLHelper_RWread" rwReadFFI :: Ptr RWops -> Ptr () -> CSize -> CSize -> IO CSize
foreign import ccall "sdlhelper.h SDLHelper_RWseek" rwSeekFFI :: Ptr RWops -> Int64 -> CInt -> IO Int64
foreign import ccall "sdlhelper.h SDLHelper_RWtell" rwTellFFI :: Ptr RWops -> IO Int64
foreign import ccall "sdlhelper.h SDLHelper_RWwrite" rwWriteFFI :: Ptr RWops -> Ptr () -> CSize -> CSize -> IO CSize
foreign import ccall "SDL.h SDL_ReadBE16" readBE16FFI :: Ptr RWops -> IO Word16
foreign import ccall "SDL.h SDL_ReadBE32" readBE32FFI :: Ptr RWops -> IO Word32
foreign import ccall "SDL.h SDL_ReadBE64" readBE64FFI :: Ptr RWops -> IO Word64
foreign import ccall "SDL.h SDL_ReadLE16" readLE16FFI :: Ptr RWops -> IO Word16
foreign import ccall "SDL.h SDL_ReadLE32" readLE32FFI :: Ptr RWops -> IO Word32
foreign import ccall "SDL.h SDL_ReadLE64" readLE64FFI :: Ptr RWops -> IO Word64
foreign import ccall "SDL.h SDL_WriteBE16" writeBE16FFI :: Ptr RWops -> Word16 -> IO CSize
foreign import ccall "SDL.h SDL_WriteBE32" writeBE32FFI :: Ptr RWops -> Word32 -> IO CSize
foreign import ccall "SDL.h SDL_WriteBE64" writeBE64FFI :: Ptr RWops -> Word64 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE16" writeLE16FFI :: Ptr RWops -> Word16 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE32" writeLE32FFI :: Ptr RWops -> Word32 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE64" writeLE64FFI :: Ptr RWops -> Word64 -> IO CSize
getBasePath :: MonadIO m => m CString
getBasePath = liftIO getBasePathFFI
# INLINE getBasePath #
getPrefPath :: MonadIO m => CString -> CString -> m CString
getPrefPath v1 v2 = liftIO $ getPrefPathFFI v1 v2
# INLINE getPrefPath #
allocRW :: MonadIO m => m (Ptr RWops)
allocRW = liftIO allocRWFFI
# INLINE allocRW #
freeRW :: MonadIO m => Ptr RWops -> m ()
freeRW v1 = liftIO $ freeRWFFI v1
{-# INLINE freeRW #-}
rwFromConstMem :: MonadIO m => Ptr () -> CInt -> m (Ptr RWops)
rwFromConstMem v1 v2 = liftIO $ rwFromConstMemFFI v1 v2
# INLINE rwFromConstMem #
rwFromFP :: MonadIO m => Ptr () -> Bool -> m (Ptr RWops)
rwFromFP v1 v2 = liftIO $ rwFromFPFFI v1 v2
# INLINE rwFromFP #
rwFromFile :: MonadIO m => CString -> CString -> m (Ptr RWops)
rwFromFile v1 v2 = liftIO $ rwFromFileFFI v1 v2
# INLINE rwFromFile #
rwFromMem :: MonadIO m => Ptr () -> CInt -> m (Ptr RWops)
rwFromMem v1 v2 = liftIO $ rwFromMemFFI v1 v2
# INLINE rwFromMem #
rwClose :: MonadIO m => Ptr RWops -> m CInt
rwClose v1 = liftIO $ rwCloseFFI v1
# INLINE rwClose #
rwRead :: MonadIO m => Ptr RWops -> Ptr () -> CSize -> CSize -> m CSize
rwRead v1 v2 v3 v4 = liftIO $ rwReadFFI v1 v2 v3 v4
# INLINE rwRead #
rwSeek :: MonadIO m => Ptr RWops -> Int64 -> CInt -> m Int64
rwSeek v1 v2 v3 = liftIO $ rwSeekFFI v1 v2 v3
{-# INLINE rwSeek #-}
rwTell :: MonadIO m => Ptr RWops -> m Int64
rwTell v1 = liftIO $ rwTellFFI v1
# INLINE rwTell #
rwWrite :: MonadIO m => Ptr RWops -> Ptr () -> CSize -> CSize -> m CSize
rwWrite v1 v2 v3 v4 = liftIO $ rwWriteFFI v1 v2 v3 v4
# INLINE rwWrite #
readBE16 :: MonadIO m => Ptr RWops -> m Word16
readBE16 v1 = liftIO $ readBE16FFI v1
# INLINE readBE16 #
readBE32 :: MonadIO m => Ptr RWops -> m Word32
readBE32 v1 = liftIO $ readBE32FFI v1
# INLINE readBE32 #
readBE64 :: MonadIO m => Ptr RWops -> m Word64
readBE64 v1 = liftIO $ readBE64FFI v1
# INLINE readBE64 #
readLE16 :: MonadIO m => Ptr RWops -> m Word16
readLE16 v1 = liftIO $ readLE16FFI v1
# INLINE readLE16 #
readLE32 :: MonadIO m => Ptr RWops -> m Word32
readLE32 v1 = liftIO $ readLE32FFI v1
# INLINE readLE32 #
readLE64 :: MonadIO m => Ptr RWops -> m Word64
readLE64 v1 = liftIO $ readLE64FFI v1
# INLINE readLE64 #
writeBE16 :: MonadIO m => Ptr RWops -> Word16 -> m CSize
writeBE16 v1 v2 = liftIO $ writeBE16FFI v1 v2
# INLINE writeBE16 #
writeBE32 :: MonadIO m => Ptr RWops -> Word32 -> m CSize
writeBE32 v1 v2 = liftIO $ writeBE32FFI v1 v2
# INLINE writeBE32 #
writeBE64 :: MonadIO m => Ptr RWops -> Word64 -> m CSize
writeBE64 v1 v2 = liftIO $ writeBE64FFI v1 v2
# INLINE writeBE64 #
writeLE16 :: MonadIO m => Ptr RWops -> Word16 -> m CSize
writeLE16 v1 v2 = liftIO $ writeLE16FFI v1 v2
# INLINE writeLE16 #
writeLE32 :: MonadIO m => Ptr RWops -> Word32 -> m CSize
writeLE32 v1 v2 = liftIO $ writeLE32FFI v1 v2
# INLINE writeLE32 #
writeLE64 :: MonadIO m => Ptr RWops -> Word64 -> m CSize
writeLE64 v1 v2 = liftIO $ writeLE64FFI v1 v2
# INLINE writeLE64 #
| null | https://raw.githubusercontent.com/haskell-game/sdl2/a2646ede53d98aebf8f7c49b01f99236701b7e44/src/SDL/Raw/Filesystem.hs | haskell | * Filesystem Paths
* File I/O Abstraction
# INLINE freeRW #
# INLINE rwSeek # | module SDL.Raw.Filesystem (
getBasePath,
getPrefPath,
allocRW,
freeRW,
rwFromConstMem,
rwFromFP,
rwFromFile,
rwFromMem,
rwClose,
rwRead,
rwSeek,
rwTell,
rwWrite,
readBE16,
readBE32,
readBE64,
readLE16,
readLE32,
readLE64,
writeBE16,
writeBE32,
writeBE64,
writeLE16,
writeLE32,
writeLE64
) where
import Control.Monad.IO.Class
import Data.Int
import Data.Word
import Foreign.C.String
import Foreign.C.Types
import Foreign.Ptr
import SDL.Raw.Types
foreign import ccall "SDL.h SDL_GetBasePath" getBasePathFFI :: IO CString
foreign import ccall "SDL.h SDL_GetPrefPath" getPrefPathFFI :: CString -> CString -> IO CString
foreign import ccall "SDL.h SDL_AllocRW" allocRWFFI :: IO (Ptr RWops)
foreign import ccall "SDL.h SDL_FreeRW" freeRWFFI :: Ptr RWops -> IO ()
foreign import ccall "SDL.h SDL_RWFromConstMem" rwFromConstMemFFI :: Ptr () -> CInt -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromFP" rwFromFPFFI :: Ptr () -> Bool -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromFile" rwFromFileFFI :: CString -> CString -> IO (Ptr RWops)
foreign import ccall "SDL.h SDL_RWFromMem" rwFromMemFFI :: Ptr () -> CInt -> IO (Ptr RWops)
foreign import ccall "sdlhelper.h SDLHelper_RWclose" rwCloseFFI :: Ptr RWops -> IO CInt
foreign import ccall "sdlhelper.h SDLHelper_RWread" rwReadFFI :: Ptr RWops -> Ptr () -> CSize -> CSize -> IO CSize
foreign import ccall "sdlhelper.h SDLHelper_RWseek" rwSeekFFI :: Ptr RWops -> Int64 -> CInt -> IO Int64
foreign import ccall "sdlhelper.h SDLHelper_RWtell" rwTellFFI :: Ptr RWops -> IO Int64
foreign import ccall "sdlhelper.h SDLHelper_RWwrite" rwWriteFFI :: Ptr RWops -> Ptr () -> CSize -> CSize -> IO CSize
foreign import ccall "SDL.h SDL_ReadBE16" readBE16FFI :: Ptr RWops -> IO Word16
foreign import ccall "SDL.h SDL_ReadBE32" readBE32FFI :: Ptr RWops -> IO Word32
foreign import ccall "SDL.h SDL_ReadBE64" readBE64FFI :: Ptr RWops -> IO Word64
foreign import ccall "SDL.h SDL_ReadLE16" readLE16FFI :: Ptr RWops -> IO Word16
foreign import ccall "SDL.h SDL_ReadLE32" readLE32FFI :: Ptr RWops -> IO Word32
foreign import ccall "SDL.h SDL_ReadLE64" readLE64FFI :: Ptr RWops -> IO Word64
foreign import ccall "SDL.h SDL_WriteBE16" writeBE16FFI :: Ptr RWops -> Word16 -> IO CSize
foreign import ccall "SDL.h SDL_WriteBE32" writeBE32FFI :: Ptr RWops -> Word32 -> IO CSize
foreign import ccall "SDL.h SDL_WriteBE64" writeBE64FFI :: Ptr RWops -> Word64 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE16" writeLE16FFI :: Ptr RWops -> Word16 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE32" writeLE32FFI :: Ptr RWops -> Word32 -> IO CSize
foreign import ccall "SDL.h SDL_WriteLE64" writeLE64FFI :: Ptr RWops -> Word64 -> IO CSize
getBasePath :: MonadIO m => m CString
getBasePath = liftIO getBasePathFFI
# INLINE getBasePath #
getPrefPath :: MonadIO m => CString -> CString -> m CString
getPrefPath v1 v2 = liftIO $ getPrefPathFFI v1 v2
# INLINE getPrefPath #
allocRW :: MonadIO m => m (Ptr RWops)
allocRW = liftIO allocRWFFI
# INLINE allocRW #
freeRW :: MonadIO m => Ptr RWops -> m ()
freeRW v1 = liftIO $ freeRWFFI v1
rwFromConstMem :: MonadIO m => Ptr () -> CInt -> m (Ptr RWops)
rwFromConstMem v1 v2 = liftIO $ rwFromConstMemFFI v1 v2
# INLINE rwFromConstMem #
rwFromFP :: MonadIO m => Ptr () -> Bool -> m (Ptr RWops)
rwFromFP v1 v2 = liftIO $ rwFromFPFFI v1 v2
# INLINE rwFromFP #
rwFromFile :: MonadIO m => CString -> CString -> m (Ptr RWops)
rwFromFile v1 v2 = liftIO $ rwFromFileFFI v1 v2
# INLINE rwFromFile #
rwFromMem :: MonadIO m => Ptr () -> CInt -> m (Ptr RWops)
rwFromMem v1 v2 = liftIO $ rwFromMemFFI v1 v2
# INLINE rwFromMem #
rwClose :: MonadIO m => Ptr RWops -> m CInt
rwClose v1 = liftIO $ rwCloseFFI v1
# INLINE rwClose #
rwRead :: MonadIO m => Ptr RWops -> Ptr () -> CSize -> CSize -> m CSize
rwRead v1 v2 v3 v4 = liftIO $ rwReadFFI v1 v2 v3 v4
# INLINE rwRead #
rwSeek :: MonadIO m => Ptr RWops -> Int64 -> CInt -> m Int64
rwSeek v1 v2 v3 = liftIO $ rwSeekFFI v1 v2 v3
rwTell :: MonadIO m => Ptr RWops -> m Int64
rwTell v1 = liftIO $ rwTellFFI v1
# INLINE rwTell #
rwWrite :: MonadIO m => Ptr RWops -> Ptr () -> CSize -> CSize -> m CSize
rwWrite v1 v2 v3 v4 = liftIO $ rwWriteFFI v1 v2 v3 v4
# INLINE rwWrite #
readBE16 :: MonadIO m => Ptr RWops -> m Word16
readBE16 v1 = liftIO $ readBE16FFI v1
# INLINE readBE16 #
readBE32 :: MonadIO m => Ptr RWops -> m Word32
readBE32 v1 = liftIO $ readBE32FFI v1
# INLINE readBE32 #
readBE64 :: MonadIO m => Ptr RWops -> m Word64
readBE64 v1 = liftIO $ readBE64FFI v1
# INLINE readBE64 #
readLE16 :: MonadIO m => Ptr RWops -> m Word16
readLE16 v1 = liftIO $ readLE16FFI v1
# INLINE readLE16 #
readLE32 :: MonadIO m => Ptr RWops -> m Word32
readLE32 v1 = liftIO $ readLE32FFI v1
# INLINE readLE32 #
readLE64 :: MonadIO m => Ptr RWops -> m Word64
readLE64 v1 = liftIO $ readLE64FFI v1
# INLINE readLE64 #
writeBE16 :: MonadIO m => Ptr RWops -> Word16 -> m CSize
writeBE16 v1 v2 = liftIO $ writeBE16FFI v1 v2
# INLINE writeBE16 #
writeBE32 :: MonadIO m => Ptr RWops -> Word32 -> m CSize
writeBE32 v1 v2 = liftIO $ writeBE32FFI v1 v2
# INLINE writeBE32 #
writeBE64 :: MonadIO m => Ptr RWops -> Word64 -> m CSize
writeBE64 v1 v2 = liftIO $ writeBE64FFI v1 v2
# INLINE writeBE64 #
writeLE16 :: MonadIO m => Ptr RWops -> Word16 -> m CSize
writeLE16 v1 v2 = liftIO $ writeLE16FFI v1 v2
# INLINE writeLE16 #
writeLE32 :: MonadIO m => Ptr RWops -> Word32 -> m CSize
writeLE32 v1 v2 = liftIO $ writeLE32FFI v1 v2
# INLINE writeLE32 #
writeLE64 :: MonadIO m => Ptr RWops -> Word64 -> m CSize
writeLE64 v1 v2 = liftIO $ writeLE64FFI v1 v2
# INLINE writeLE64 #
|
88bd3cf5b30eb5d824ec762fff6e2c3672f1f30e3ee1de5b59b77e78586bf295 | wilkerlucio/mazes | project.clj | (defproject mazes "0.1.0-SNAPSHOT"
:description "FIXME: write this!"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.7.0-RC1"]
[org.clojure/clojurescript "0.0-3269"]
[org.clojure/core.async "0.1.346.0-17112a-alpha"]
[org.omcljs/om "0.8.8"]
[racehub/om-bootstrap "0.4.2"]]
:profiles {:dev {:dependencies [[figwheel "0.3.3"]]
:plugins [[lein-cljsbuild "1.0.5"]
[lein-figwheel "0.3.3"]]}}
:source-paths ["src/cljs" "src/dev"]
:clean-targets ^{:protect false} ["resources/public/js/compiled"]
:cljsbuild {
:builds [{:id "dev"
:source-paths ["src/cljs" "src/dev" "test"]
:compiler {:output-to "resources/public/js/compiled/mazes.js"
:output-dir "resources/public/js/compiled/out"
:optimizations :none
:main mazes.dev
:asset-path "js/compiled/out"
:source-map true
;; :source-map-timestamp true
:cache-analysis true }}
{:id "test"
:source-paths ["src/cljs" "test"]
:compiler {:output-to "resources/public/js/test/mazes-test.js"
:output-dir "resources/public/js/test/out"
:optimizations :none
:main mazes.test-runner
:asset-path "js/test/out"
:source-map true
;;: source-map-timestamp true
:cache-analysis true }}
{:id "whitespace"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/white/mazes.js"
:main mazes.core
:optimizations :whitespace
:pretty-print false}}
{:id "min"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/min/mazes.js"
:main mazes.core
:optimizations :advanced
:pretty-print false}}]}
:figwheel {
:http-server-root "public" ;; default and assumes "resources"
:server-port 3449 ;; default
:css-dirs ["resources/public/css"] ;; watch and update CSS
Start an nREPL server into the running figwheel process
:nrepl-port 7888
Server Ring Handler ( optional )
;; if you want to embed a ring handler into the figwheel http-kit
;; server, this is simple ring servers, if this
;; doesn't work for you just run your own server :)
;; :ring-handler hello_world.server/handler
;; To be able to open files in your editor from the heads up display
;; you will need to put a script on your path.
;; that script will have to take a file path and a line number
;; ie. in ~/bin/myfile-opener
;; #! /bin/sh
emacsclient -n + $ 2 $ 1
;;
;; :open-file-command "myfile-opener"
;; if you want to disable the REPL
;; :repl false
;; to configure a different figwheel logfile path
;; :server-logfile "tmp/logs/figwheel-logfile.log"
})
| null | https://raw.githubusercontent.com/wilkerlucio/mazes/36bed0791cf1ab778db6d0b91a63bdb9fcbcf8bb/project.clj | clojure | :source-map-timestamp true
: source-map-timestamp true
default and assumes "resources"
default
watch and update CSS
if you want to embed a ring handler into the figwheel http-kit
server, this is simple ring servers, if this
doesn't work for you just run your own server :)
:ring-handler hello_world.server/handler
To be able to open files in your editor from the heads up display
you will need to put a script on your path.
that script will have to take a file path and a line number
ie. in ~/bin/myfile-opener
#! /bin/sh
:open-file-command "myfile-opener"
if you want to disable the REPL
:repl false
to configure a different figwheel logfile path
:server-logfile "tmp/logs/figwheel-logfile.log" | (defproject mazes "0.1.0-SNAPSHOT"
:description "FIXME: write this!"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.7.0-RC1"]
[org.clojure/clojurescript "0.0-3269"]
[org.clojure/core.async "0.1.346.0-17112a-alpha"]
[org.omcljs/om "0.8.8"]
[racehub/om-bootstrap "0.4.2"]]
:profiles {:dev {:dependencies [[figwheel "0.3.3"]]
:plugins [[lein-cljsbuild "1.0.5"]
[lein-figwheel "0.3.3"]]}}
:source-paths ["src/cljs" "src/dev"]
:clean-targets ^{:protect false} ["resources/public/js/compiled"]
:cljsbuild {
:builds [{:id "dev"
:source-paths ["src/cljs" "src/dev" "test"]
:compiler {:output-to "resources/public/js/compiled/mazes.js"
:output-dir "resources/public/js/compiled/out"
:optimizations :none
:main mazes.dev
:asset-path "js/compiled/out"
:source-map true
:cache-analysis true }}
{:id "test"
:source-paths ["src/cljs" "test"]
:compiler {:output-to "resources/public/js/test/mazes-test.js"
:output-dir "resources/public/js/test/out"
:optimizations :none
:main mazes.test-runner
:asset-path "js/test/out"
:source-map true
:cache-analysis true }}
{:id "whitespace"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/white/mazes.js"
:main mazes.core
:optimizations :whitespace
:pretty-print false}}
{:id "min"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/min/mazes.js"
:main mazes.core
:optimizations :advanced
:pretty-print false}}]}
:figwheel {
Start an nREPL server into the running figwheel process
:nrepl-port 7888
Server Ring Handler ( optional )
emacsclient -n + $ 2 $ 1
})
|
ffb147b4e0bb9705d1841ef8f95fc0edbcdbc15588ffe592260b6127657b3b22 | input-output-hk/plutus | Time.hs | -- editorconfig-checker-disable-file
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
-- Otherwise we get a complaint about the 'fromIntegral' call in the generated instance of 'Integral' for 'Ada'
# OPTIONS_GHC -Wno - identities #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
# OPTIONS_GHC -fno - omit - interface - pragmas #
-- | UTCTime and UTCTime ranges.
module PlutusLedgerApi.V1.Time
( POSIXTime(..)
, POSIXTimeRange
, DiffMilliSeconds(..)
, fromMilliSeconds
) where
import Control.DeepSeq (NFData)
import GHC.Generics (Generic)
import PlutusLedgerApi.V1.Interval
import PlutusTx qualified
import PlutusTx.Lift (makeLift)
import PlutusTx.Prelude
import Prelude qualified as Haskell
import Prettyprinter (Pretty (pretty), (<+>))
-- | This is a length of time, as measured by a number of milliseconds.
newtype DiffMilliSeconds = DiffMilliSeconds Integer
deriving stock (Haskell.Eq, Haskell.Ord, Haskell.Show, Generic)
deriving anyclass (NFData)
deriving newtype (Haskell.Num, AdditiveSemigroup, AdditiveMonoid, AdditiveGroup, Haskell.Enum, Eq, Ord, Haskell.Real, Haskell.Integral, PlutusTx.ToData, PlutusTx.FromData, PlutusTx.UnsafeFromData)
makeLift ''DiffMilliSeconds
| POSIX time is measured as the number of /milliseconds/ since 1970 - 01 - 01T00:00:00Z.
This is not the same as 's ` Data . Time . Clock . POSIX.POSIXTime `
newtype POSIXTime = POSIXTime { getPOSIXTime :: Integer }
deriving stock (Haskell.Eq, Haskell.Ord, Haskell.Show, Generic)
deriving anyclass (NFData)
deriving newtype (AdditiveSemigroup, AdditiveMonoid, AdditiveGroup, Eq, Ord, Enum, PlutusTx.ToData, PlutusTx.FromData, PlutusTx.UnsafeFromData)
deriving newtype (Haskell.Num, Haskell.Enum, Haskell.Real, Haskell.Integral)
makeLift ''POSIXTime
instance Pretty POSIXTime where
pretty (POSIXTime i) = "POSIXTime" <+> pretty i
-- | An 'Interval' of 'POSIXTime's.
type POSIXTimeRange = Interval POSIXTime
-- | Simple conversion from 'DiffMilliSeconds' to 'POSIXTime'.
# INLINABLE fromMilliSeconds #
fromMilliSeconds :: DiffMilliSeconds -> POSIXTime
fromMilliSeconds (DiffMilliSeconds s) = POSIXTime s
| null | https://raw.githubusercontent.com/input-output-hk/plutus/00531085f482546267b964d08ae4ed89328de929/plutus-ledger-api/src/PlutusLedgerApi/V1/Time.hs | haskell | editorconfig-checker-disable-file
# LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings #
Otherwise we get a complaint about the 'fromIntegral' call in the generated instance of 'Integral' for 'Ada'
| UTCTime and UTCTime ranges.
| This is a length of time, as measured by a number of milliseconds.
| An 'Interval' of 'POSIXTime's.
| Simple conversion from 'DiffMilliSeconds' to 'POSIXTime'. | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -Wno - identities #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
# OPTIONS_GHC -fno - omit - interface - pragmas #
module PlutusLedgerApi.V1.Time
( POSIXTime(..)
, POSIXTimeRange
, DiffMilliSeconds(..)
, fromMilliSeconds
) where
import Control.DeepSeq (NFData)
import GHC.Generics (Generic)
import PlutusLedgerApi.V1.Interval
import PlutusTx qualified
import PlutusTx.Lift (makeLift)
import PlutusTx.Prelude
import Prelude qualified as Haskell
import Prettyprinter (Pretty (pretty), (<+>))
newtype DiffMilliSeconds = DiffMilliSeconds Integer
deriving stock (Haskell.Eq, Haskell.Ord, Haskell.Show, Generic)
deriving anyclass (NFData)
deriving newtype (Haskell.Num, AdditiveSemigroup, AdditiveMonoid, AdditiveGroup, Haskell.Enum, Eq, Ord, Haskell.Real, Haskell.Integral, PlutusTx.ToData, PlutusTx.FromData, PlutusTx.UnsafeFromData)
makeLift ''DiffMilliSeconds
| POSIX time is measured as the number of /milliseconds/ since 1970 - 01 - 01T00:00:00Z.
This is not the same as 's ` Data . Time . Clock . POSIX.POSIXTime `
newtype POSIXTime = POSIXTime { getPOSIXTime :: Integer }
deriving stock (Haskell.Eq, Haskell.Ord, Haskell.Show, Generic)
deriving anyclass (NFData)
deriving newtype (AdditiveSemigroup, AdditiveMonoid, AdditiveGroup, Eq, Ord, Enum, PlutusTx.ToData, PlutusTx.FromData, PlutusTx.UnsafeFromData)
deriving newtype (Haskell.Num, Haskell.Enum, Haskell.Real, Haskell.Integral)
makeLift ''POSIXTime
instance Pretty POSIXTime where
pretty (POSIXTime i) = "POSIXTime" <+> pretty i
type POSIXTimeRange = Interval POSIXTime
# INLINABLE fromMilliSeconds #
fromMilliSeconds :: DiffMilliSeconds -> POSIXTime
fromMilliSeconds (DiffMilliSeconds s) = POSIXTime s
|
e7999853dbf991add26cc78a197c026fa56e9575909c90fd8b7fd86bf2e132fa | unison-code/unison | CleanPragmas.hs | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module Unison.Transformations.CleanPragmas (cleanPragmas)
where
import Data.Maybe
import Unison.Base
import Unison.Util
cleanPragmas ts f @ Function {fComments = comments} _ =
let comments' = foldl removePragma comments ts
in f {fComments = comments'}
removePragma comments t = mapMaybe (removePragmaInComment t) comments
removePragmaInComment tool comment =
case splitPragmaComment comment of
hdr : _ -> if hdr == (pragmaHeader tool) then Nothing else Just comment
_ -> Just comment
| null | https://raw.githubusercontent.com/unison-code/unison/9f8caf78230f956a57b50a327f8d1dca5839bf64/src/unison/src/Unison/Transformations/CleanPragmas.hs | haskell | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module Unison.Transformations.CleanPragmas (cleanPragmas)
where
import Data.Maybe
import Unison.Base
import Unison.Util
cleanPragmas ts f @ Function {fComments = comments} _ =
let comments' = foldl removePragma comments ts
in f {fComments = comments'}
removePragma comments t = mapMaybe (removePragmaInComment t) comments
removePragmaInComment tool comment =
case splitPragmaComment comment of
hdr : _ -> if hdr == (pragmaHeader tool) then Nothing else Just comment
_ -> Just comment
|
|
571e33e75716c943baedb59d5b1072c8a5ae06f4efd968f50a8629947c00a2fc | namin/inc | tests-6.1-req.scm | (add-tests-with-string-output "define function"
[(let ()
(define (x) 3)
(define (y) 4)
(fx+ (x) (y)))
=> "7\n"]
[(let ()
(define (f x y) (fx+ x y))
(f 3 4))
=> "7\n"]
[(let ()
(define (f x) (fx+ x x))
(f 3))
=> "6\n"])
| null | https://raw.githubusercontent.com/namin/inc/3f683935e290848485f8d4d165a4f727f6658d1d/src/tests-6.1-req.scm | scheme | (add-tests-with-string-output "define function"
[(let ()
(define (x) 3)
(define (y) 4)
(fx+ (x) (y)))
=> "7\n"]
[(let ()
(define (f x y) (fx+ x y))
(f 3 4))
=> "7\n"]
[(let ()
(define (f x) (fx+ x x))
(f 3))
=> "6\n"])
|
|
89fc49e68412fe88fff28eb834f2a30efbeb125d535ca60418e5f7880779eb24 | mzp/coq-ide-for-ios | index.ml | (* -*- compile-command: "make -C ../.. bin/coqdoc" -*- *)
(************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
i $ I d : index.ml 13676 2010 - 12 - 04 10:34:21Z herbelin $ i
open Filename
open Lexing
open Printf
open Cdglobals
type loc = int
type entry_type =
| Library
| Module
| Definition
| Inductive
| Constructor
| Lemma
| Record
| Projection
| Instance
| Class
| Method
| Variable
| Axiom
| TacticDefinition
| Abbreviation
| Notation
| Section
type index_entry =
| Def of string * entry_type
| Ref of coq_module * string * entry_type
| Mod of coq_module * string
let current_type : entry_type ref = ref Library
let current_library = ref ""
(** refers to the file being parsed *)
(** [deftable] stores only definitions and is used to interpolate idents
inside comments, which are not globalized otherwise. *)
let deftable = Hashtbl.create 97
(** [reftable] stores references and definitions *)
let reftable = Hashtbl.create 97
let full_ident sp id =
if sp <> "<>" then
if id <> "<>" then
sp ^ "." ^ id
else sp
else if id <> "<>"
then id
else ""
let add_def loc ty sp id =
Hashtbl.add reftable (!current_library, loc) (Def (full_ident sp id, ty));
Hashtbl.add deftable id (Ref (!current_library, full_ident sp id, ty))
let add_ref m loc m' sp id ty =
if Hashtbl.mem reftable (m, loc) then ()
else Hashtbl.add reftable (m, loc) (Ref (m', full_ident sp id, ty));
let idx = if id = "<>" then m' else id in
if Hashtbl.mem deftable idx then ()
else Hashtbl.add deftable idx (Ref (m', full_ident sp id, ty))
let add_mod m loc m' id =
Hashtbl.add reftable (m, loc) (Mod (m', id));
Hashtbl.add deftable m (Mod (m', id))
let find m l = Hashtbl.find reftable (m, l)
let find_string m s = Hashtbl.find deftable s
(*s Manipulating path prefixes *)
type stack = string list
let rec string_of_stack st =
match st with
| [] -> ""
| x::[] -> x
| x::tl -> (string_of_stack tl) ^ "." ^ x
let empty_stack = []
let module_stack = ref empty_stack
let section_stack = ref empty_stack
let init_stack () =
module_stack := empty_stack; section_stack := empty_stack
let push st p = st := p::!st
let pop st =
match !st with
| [] -> ()
| _::tl -> st := tl
let head st =
match st with
| [] -> ""
| x::_ -> x
let begin_module m = push module_stack m
let begin_section s = push section_stack s
let end_block id =
(** determines if it ends a module or a section and pops the stack *)
if ((String.compare (head !module_stack) id ) == 0) then
pop module_stack
else if ((String.compare (head !section_stack) id) == 0) then
pop section_stack
else
()
let make_fullid id =
(** prepends the current module path to an id *)
let path = string_of_stack !module_stack in
if String.length path > 0 then
path ^ "." ^ id
else
id
Coq modules
let split_sp s =
try
let i = String.rindex s '.' in
String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1)
with
Not_found -> "", s
let modules = Hashtbl.create 97
let local_modules = Hashtbl.create 97
let add_module m =
let _,id = split_sp m in
Hashtbl.add modules id m;
Hashtbl.add local_modules m ()
type module_kind = Local | External of string | Unknown
let external_libraries = ref []
let add_external_library logicalpath url =
external_libraries := (logicalpath,url) :: !external_libraries
let find_external_library logicalpath =
let rec aux = function
| [] -> raise Not_found
| (l,u)::rest ->
if String.length logicalpath > String.length l &
String.sub logicalpath 0 (String.length l + 1) = l ^"."
then u
else aux rest
in aux !external_libraries
let init_coqlib_library () = add_external_library "Coq" !coqlib
let find_module m =
if Hashtbl.mem local_modules m then
Local
else
try External (Filename.concat (find_external_library m) m)
with Not_found -> Unknown
(* Building indexes *)
type 'a index = {
idx_name : string;
idx_entries : (char * (string * 'a) list) list;
idx_size : int }
let map f i =
{ i with idx_entries =
List.map
(fun (c,l) -> (c, List.map (fun (s,x) -> (s,f s x)) l))
i.idx_entries }
let compare_entries (s1,_) (s2,_) = Alpha.compare_string s1 s2
let sort_entries el =
let t = Hashtbl.create 97 in
List.iter
(fun c -> Hashtbl.add t c [])
['A'; 'B'; 'C'; 'D'; 'E'; 'F'; 'G'; 'H'; 'I'; 'J'; 'K'; 'L'; 'M'; 'N';
'O'; 'P'; 'Q'; 'R'; 'S'; 'T'; 'U'; 'V'; 'W'; 'X'; 'Y'; 'Z'; '_'; '*'];
List.iter
(fun ((s,_) as e) ->
let c = Alpha.norm_char s.[0] in
let c,l =
try c,Hashtbl.find t c with Not_found -> '*',Hashtbl.find t '*' in
Hashtbl.replace t c (e :: l))
el;
let res = ref [] in
Hashtbl.iter (fun c l -> res := (c, List.sort compare_entries l) :: !res) t;
List.sort (fun (c1,_) (c2,_) -> Alpha.compare_char c1 c2) !res
let display_letter c = if c = '*' then "other" else String.make 1 c
let index_size = List.fold_left (fun s (_,l) -> s + List.length l) 0
let hashtbl_elements h = Hashtbl.fold (fun x y l -> (x,y)::l) h []
let type_name = function
| Library ->
let ln = !lib_name in
if ln <> "" then String.lowercase ln else "library"
| Module -> "module"
| Definition -> "definition"
| Inductive -> "inductive"
| Constructor -> "constructor"
| Lemma -> "lemma"
| Record -> "record"
| Projection -> "projection"
| Instance -> "instance"
| Class -> "class"
| Method -> "method"
| Variable -> "variable"
| Axiom -> "axiom"
| TacticDefinition -> "tactic"
| Abbreviation -> "abbreviation"
| Notation -> "notation"
| Section -> "section"
let prepare_entry s = function
| Notation ->
(* We decode the encoding done in Dumpglob.cook_notation of coqtop *)
(* Encoded notations have the form section:sc:x_'++'_x where: *)
(* - the section, if any, ends with a "." *)
(* - the scope can be empty *)
(* - tokens are separated with "_" *)
(* - non-terminal symbols are conventionally represented by "x" *)
(* - terminals are enclosed within simple quotes *)
(* - existing simple quotes (that necessarily are parts of terminals) *)
(* are doubled *)
(* (as a consequence, when a terminal contains "_" or "x", these *)
(* necessarily appear enclosed within non-doubled simple quotes) *)
(* Example: "x ' %x _% y %'x %'_' z" is encoded as *)
(* "x_''''_'%x'_'_%'_x_'%''x'_'%''_'''_x" *)
let err () = eprintf "Invalid notation in globalization file\n"; exit 1 in
let h = try String.index_from s 0 ':' with _ -> err () in
let i = try String.index_from s (h+1) ':' with _ -> err () in
let sc = String.sub s (h+1) (i-h-1) in
let ntn = String.make (String.length s - i) ' ' in
let k = ref 0 in
let j = ref (i+1) in
let quoted = ref false in
let l = String.length s - 1 in
while !j <= l do
if not !quoted then begin
(match s.[!j] with
| '_' -> ntn.[!k] <- ' '; incr k
| 'x' -> ntn.[!k] <- '_'; incr k
| '\'' -> quoted := true
| _ -> assert false)
end
else
if s.[!j] = '\'' then begin
if (!j = l || s.[!j+1] <> '\'') then quoted := false
else (ntn.[!k] <- s.[!j]; incr k; incr j)
end else begin
ntn.[!k] <- s.[!j];
incr k
end;
incr j
done;
let ntn = String.sub ntn 0 !k in
if sc = "" then ntn else ntn ^ " (" ^ sc ^ ")"
| _ ->
s
let all_entries () =
let gl = ref [] in
let add_g s m t = gl := (s,(m,t)) :: !gl in
let bt = Hashtbl.create 11 in
let add_bt t s m =
let l = try Hashtbl.find bt t with Not_found -> [] in
Hashtbl.replace bt t ((s,m) :: l)
in
let classify (m,_) e = match e with
| Def (s,t) -> add_g s m t; add_bt t s m
| Ref _ | Mod _ -> ()
in
Hashtbl.iter classify reftable;
Hashtbl.iter (fun id m -> add_g id m Library; add_bt Library id m) modules;
{ idx_name = "global";
idx_entries = sort_entries !gl;
idx_size = List.length !gl },
Hashtbl.fold (fun t e l -> (t, { idx_name = type_name t;
idx_entries = sort_entries e;
idx_size = List.length e }) :: l) bt []
let type_of_string = function
| "def" | "coe" | "subclass" | "canonstruc" | "fix" | "cofix"
| "ex" | "scheme" -> Definition
| "prf" | "thm" -> Lemma
| "ind" | "coind" -> Inductive
| "constr" -> Constructor
| "rec" | "corec" -> Record
| "proj" -> Projection
| "class" -> Class
| "meth" -> Method
| "inst" -> Instance
| "var" -> Variable
| "defax" | "prfax" | "ax" -> Axiom
| "syndef" -> Abbreviation
| "not" -> Notation
| "lib" -> Library
| "mod" | "modtype" -> Module
| "tac" -> TacticDefinition
| "sec" -> Section
| s -> raise (Invalid_argument ("type_of_string:" ^ s))
let read_glob f =
let c = open_in f in
let cur_mod = ref "" in
try
while true do
let s = input_line c in
let n = String.length s in
if n > 0 then begin
match s.[0] with
| 'F' ->
cur_mod := String.sub s 1 (n - 1);
current_library := !cur_mod
| 'R' ->
(try
Scanf.sscanf s "R%d:%d %s %s %s %s"
(fun loc1 loc2 lib_dp sp id ty ->
for loc=loc1 to loc2 do
add_ref !cur_mod loc lib_dp sp id (type_of_string ty)
done)
with _ ->
try
Scanf.sscanf s "R%d %s %s %s %s"
(fun loc lib_dp sp id ty ->
add_ref !cur_mod loc lib_dp sp id (type_of_string ty))
with _ -> ())
| _ ->
try Scanf.sscanf s "%s %d %s %s"
(fun ty loc sp id -> add_def loc (type_of_string ty) sp id)
with Scanf.Scan_failure _ -> ()
end
done; assert false
with End_of_file ->
close_in c
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/tools/coqdoc/index.ml | ocaml | -*- compile-command: "make -C ../.. bin/coqdoc" -*-
**********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* refers to the file being parsed
* [deftable] stores only definitions and is used to interpolate idents
inside comments, which are not globalized otherwise.
* [reftable] stores references and definitions
s Manipulating path prefixes
* determines if it ends a module or a section and pops the stack
* prepends the current module path to an id
Building indexes
We decode the encoding done in Dumpglob.cook_notation of coqtop
Encoded notations have the form section:sc:x_'++'_x where:
- the section, if any, ends with a "."
- the scope can be empty
- tokens are separated with "_"
- non-terminal symbols are conventionally represented by "x"
- terminals are enclosed within simple quotes
- existing simple quotes (that necessarily are parts of terminals)
are doubled
(as a consequence, when a terminal contains "_" or "x", these
necessarily appear enclosed within non-doubled simple quotes)
Example: "x ' %x _% y %'x %'_' z" is encoded as
"x_''''_'%x'_'_%'_x_'%''x'_'%''_'''_x" | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
i $ I d : index.ml 13676 2010 - 12 - 04 10:34:21Z herbelin $ i
open Filename
open Lexing
open Printf
open Cdglobals
type loc = int
type entry_type =
| Library
| Module
| Definition
| Inductive
| Constructor
| Lemma
| Record
| Projection
| Instance
| Class
| Method
| Variable
| Axiom
| TacticDefinition
| Abbreviation
| Notation
| Section
type index_entry =
| Def of string * entry_type
| Ref of coq_module * string * entry_type
| Mod of coq_module * string
let current_type : entry_type ref = ref Library
let current_library = ref ""
let deftable = Hashtbl.create 97
let reftable = Hashtbl.create 97
let full_ident sp id =
if sp <> "<>" then
if id <> "<>" then
sp ^ "." ^ id
else sp
else if id <> "<>"
then id
else ""
let add_def loc ty sp id =
Hashtbl.add reftable (!current_library, loc) (Def (full_ident sp id, ty));
Hashtbl.add deftable id (Ref (!current_library, full_ident sp id, ty))
let add_ref m loc m' sp id ty =
if Hashtbl.mem reftable (m, loc) then ()
else Hashtbl.add reftable (m, loc) (Ref (m', full_ident sp id, ty));
let idx = if id = "<>" then m' else id in
if Hashtbl.mem deftable idx then ()
else Hashtbl.add deftable idx (Ref (m', full_ident sp id, ty))
let add_mod m loc m' id =
Hashtbl.add reftable (m, loc) (Mod (m', id));
Hashtbl.add deftable m (Mod (m', id))
let find m l = Hashtbl.find reftable (m, l)
let find_string m s = Hashtbl.find deftable s
type stack = string list
let rec string_of_stack st =
match st with
| [] -> ""
| x::[] -> x
| x::tl -> (string_of_stack tl) ^ "." ^ x
let empty_stack = []
let module_stack = ref empty_stack
let section_stack = ref empty_stack
let init_stack () =
module_stack := empty_stack; section_stack := empty_stack
let push st p = st := p::!st
let pop st =
match !st with
| [] -> ()
| _::tl -> st := tl
let head st =
match st with
| [] -> ""
| x::_ -> x
let begin_module m = push module_stack m
let begin_section s = push section_stack s
let end_block id =
if ((String.compare (head !module_stack) id ) == 0) then
pop module_stack
else if ((String.compare (head !section_stack) id) == 0) then
pop section_stack
else
()
let make_fullid id =
let path = string_of_stack !module_stack in
if String.length path > 0 then
path ^ "." ^ id
else
id
Coq modules
let split_sp s =
try
let i = String.rindex s '.' in
String.sub s 0 i, String.sub s (i + 1) (String.length s - i - 1)
with
Not_found -> "", s
let modules = Hashtbl.create 97
let local_modules = Hashtbl.create 97
let add_module m =
let _,id = split_sp m in
Hashtbl.add modules id m;
Hashtbl.add local_modules m ()
type module_kind = Local | External of string | Unknown
let external_libraries = ref []
let add_external_library logicalpath url =
external_libraries := (logicalpath,url) :: !external_libraries
let find_external_library logicalpath =
let rec aux = function
| [] -> raise Not_found
| (l,u)::rest ->
if String.length logicalpath > String.length l &
String.sub logicalpath 0 (String.length l + 1) = l ^"."
then u
else aux rest
in aux !external_libraries
let init_coqlib_library () = add_external_library "Coq" !coqlib
let find_module m =
if Hashtbl.mem local_modules m then
Local
else
try External (Filename.concat (find_external_library m) m)
with Not_found -> Unknown
type 'a index = {
idx_name : string;
idx_entries : (char * (string * 'a) list) list;
idx_size : int }
let map f i =
{ i with idx_entries =
List.map
(fun (c,l) -> (c, List.map (fun (s,x) -> (s,f s x)) l))
i.idx_entries }
let compare_entries (s1,_) (s2,_) = Alpha.compare_string s1 s2
let sort_entries el =
let t = Hashtbl.create 97 in
List.iter
(fun c -> Hashtbl.add t c [])
['A'; 'B'; 'C'; 'D'; 'E'; 'F'; 'G'; 'H'; 'I'; 'J'; 'K'; 'L'; 'M'; 'N';
'O'; 'P'; 'Q'; 'R'; 'S'; 'T'; 'U'; 'V'; 'W'; 'X'; 'Y'; 'Z'; '_'; '*'];
List.iter
(fun ((s,_) as e) ->
let c = Alpha.norm_char s.[0] in
let c,l =
try c,Hashtbl.find t c with Not_found -> '*',Hashtbl.find t '*' in
Hashtbl.replace t c (e :: l))
el;
let res = ref [] in
Hashtbl.iter (fun c l -> res := (c, List.sort compare_entries l) :: !res) t;
List.sort (fun (c1,_) (c2,_) -> Alpha.compare_char c1 c2) !res
let display_letter c = if c = '*' then "other" else String.make 1 c
let index_size = List.fold_left (fun s (_,l) -> s + List.length l) 0
let hashtbl_elements h = Hashtbl.fold (fun x y l -> (x,y)::l) h []
let type_name = function
| Library ->
let ln = !lib_name in
if ln <> "" then String.lowercase ln else "library"
| Module -> "module"
| Definition -> "definition"
| Inductive -> "inductive"
| Constructor -> "constructor"
| Lemma -> "lemma"
| Record -> "record"
| Projection -> "projection"
| Instance -> "instance"
| Class -> "class"
| Method -> "method"
| Variable -> "variable"
| Axiom -> "axiom"
| TacticDefinition -> "tactic"
| Abbreviation -> "abbreviation"
| Notation -> "notation"
| Section -> "section"
let prepare_entry s = function
| Notation ->
let err () = eprintf "Invalid notation in globalization file\n"; exit 1 in
let h = try String.index_from s 0 ':' with _ -> err () in
let i = try String.index_from s (h+1) ':' with _ -> err () in
let sc = String.sub s (h+1) (i-h-1) in
let ntn = String.make (String.length s - i) ' ' in
let k = ref 0 in
let j = ref (i+1) in
let quoted = ref false in
let l = String.length s - 1 in
while !j <= l do
if not !quoted then begin
(match s.[!j] with
| '_' -> ntn.[!k] <- ' '; incr k
| 'x' -> ntn.[!k] <- '_'; incr k
| '\'' -> quoted := true
| _ -> assert false)
end
else
if s.[!j] = '\'' then begin
if (!j = l || s.[!j+1] <> '\'') then quoted := false
else (ntn.[!k] <- s.[!j]; incr k; incr j)
end else begin
ntn.[!k] <- s.[!j];
incr k
end;
incr j
done;
let ntn = String.sub ntn 0 !k in
if sc = "" then ntn else ntn ^ " (" ^ sc ^ ")"
| _ ->
s
let all_entries () =
let gl = ref [] in
let add_g s m t = gl := (s,(m,t)) :: !gl in
let bt = Hashtbl.create 11 in
let add_bt t s m =
let l = try Hashtbl.find bt t with Not_found -> [] in
Hashtbl.replace bt t ((s,m) :: l)
in
let classify (m,_) e = match e with
| Def (s,t) -> add_g s m t; add_bt t s m
| Ref _ | Mod _ -> ()
in
Hashtbl.iter classify reftable;
Hashtbl.iter (fun id m -> add_g id m Library; add_bt Library id m) modules;
{ idx_name = "global";
idx_entries = sort_entries !gl;
idx_size = List.length !gl },
Hashtbl.fold (fun t e l -> (t, { idx_name = type_name t;
idx_entries = sort_entries e;
idx_size = List.length e }) :: l) bt []
let type_of_string = function
| "def" | "coe" | "subclass" | "canonstruc" | "fix" | "cofix"
| "ex" | "scheme" -> Definition
| "prf" | "thm" -> Lemma
| "ind" | "coind" -> Inductive
| "constr" -> Constructor
| "rec" | "corec" -> Record
| "proj" -> Projection
| "class" -> Class
| "meth" -> Method
| "inst" -> Instance
| "var" -> Variable
| "defax" | "prfax" | "ax" -> Axiom
| "syndef" -> Abbreviation
| "not" -> Notation
| "lib" -> Library
| "mod" | "modtype" -> Module
| "tac" -> TacticDefinition
| "sec" -> Section
| s -> raise (Invalid_argument ("type_of_string:" ^ s))
let read_glob f =
let c = open_in f in
let cur_mod = ref "" in
try
while true do
let s = input_line c in
let n = String.length s in
if n > 0 then begin
match s.[0] with
| 'F' ->
cur_mod := String.sub s 1 (n - 1);
current_library := !cur_mod
| 'R' ->
(try
Scanf.sscanf s "R%d:%d %s %s %s %s"
(fun loc1 loc2 lib_dp sp id ty ->
for loc=loc1 to loc2 do
add_ref !cur_mod loc lib_dp sp id (type_of_string ty)
done)
with _ ->
try
Scanf.sscanf s "R%d %s %s %s %s"
(fun loc lib_dp sp id ty ->
add_ref !cur_mod loc lib_dp sp id (type_of_string ty))
with _ -> ())
| _ ->
try Scanf.sscanf s "%s %d %s %s"
(fun ty loc sp id -> add_def loc (type_of_string ty) sp id)
with Scanf.Scan_failure _ -> ()
end
done; assert false
with End_of_file ->
close_in c
|
c50438035d15cfde2a29b60a5f280c8f324887bbdfa6c79b7592a34746c32ab0 | tschady/advent-of-code | d13_test.clj | (ns aoc.2022.d13-test
(:require
[aoc.2022.d13 :as sut]
[clojure.test :refer :all]))
(def ex "[1,1,3,1,1]
[1,1,5,1,1]
[[1],[2,3,4]]
[[1],4]
[9]
[[8,7,6]]
[[4,4],4,4]
[[4,4],4,4,4]
[7,7,7,7]
[7,7,7]
[]
[3]
[[[]]]
[[]]
[1,[2,[3,[4,[5,6,7]]]],8,9]
[1,[2,[3,[4,[5,6,0]]]],8,9]")
(deftest challenges
(is (= 6420 (sut/part-1 sut/input)))
(is (= 22000 (sut/part-2 sut/input))))
| null | https://raw.githubusercontent.com/tschady/advent-of-code/53efcafb3099d2cddf953a07606179c756abf547/test/aoc/2022/d13_test.clj | clojure | (ns aoc.2022.d13-test
(:require
[aoc.2022.d13 :as sut]
[clojure.test :refer :all]))
(def ex "[1,1,3,1,1]
[1,1,5,1,1]
[[1],[2,3,4]]
[[1],4]
[9]
[[8,7,6]]
[[4,4],4,4]
[[4,4],4,4,4]
[7,7,7,7]
[7,7,7]
[]
[3]
[[[]]]
[[]]
[1,[2,[3,[4,[5,6,7]]]],8,9]
[1,[2,[3,[4,[5,6,0]]]],8,9]")
(deftest challenges
(is (= 6420 (sut/part-1 sut/input)))
(is (= 22000 (sut/part-2 sut/input))))
|
|
e3002d1f5cac82cf17a03cfd085cf5ffeb63c1c9c6474d4ce29a328e97ccb1c8 | jameshaydon/lawvere | Expr.hs | module Lawvere.Expr where
import Control.Lens
import Control.Monad.Combinators.Expr
import Data.Generics.Labels ()
import Data.List (foldr1)
import Lawvere.Core
import Lawvere.Disp
import Lawvere.Parse
import Lawvere.Scalar
import Prettyprinter
import Protolude hiding (many, try)
import Text.Megaparsec
import qualified Text.Megaparsec.Char as Char
import qualified Text.Megaparsec.Char.Lexer as L
data PrimFn = PrimIdentity | PrimApp | PrimIncr | PrimAbs | PrimShow | PrimConcat
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp PrimFn where
disp = pretty . map toLower . drop 4 . show
data Prim = Pfn PrimFn | PrimOp BinOp
deriving stock (Eq, Ord, Show)
instance Fin Prim where
enumerate = (Pfn <$> enumerate) ++ (PrimOp <$> enumerate)
instance Disp Prim where
disp = \case
Pfn p -> disp p
PrimOp o -> case o of
NumOp no -> case no of
OpPlus -> "plus"
OpMinus -> "minus"
OpTimes -> "mult"
CompOp co -> case co of
OpEq -> "equal"
OpLt -> "less_than"
OpLte -> "less_than_equal"
OpGt -> "greater_than"
OpGte -> "greater_than_equal"
instance Parsed Prim where
parsed = choice [p <$ try (chunk (render p) >> notFollowedBy (satisfy nonFirstIdentChar)) | p <- enumerate]
data ComponentDecorator = Eff | Pure
deriving stock (Show, Eq)
data ConeComponent = ConeComponent ComponentDecorator Label
deriving stock (Show, Eq)
purPos :: Int -> ConeComponent
purPos = ConeComponent Pure . LPos
purNam :: LcIdent -> ConeComponent
purNam = ConeComponent Pure . LNam
instance Parsed ConeComponent where
parsed = do
eff_ <- optional (single '!')
ConeComponent (if isJust eff_ then Eff else Pure) <$> parsed
instance Disp ConeComponent where
disp (ConeComponent Pure lab) = disp lab
disp (ConeComponent Eff lab) = "!" <> disp lab
componentLabel :: ConeComponent -> Label
componentLabel (ConeComponent _ lab) = lab
data ISPart = ISRaw Text | ISExpr Expr
deriving stock (Show, Eq, Generic)
data NumOp = OpPlus | OpMinus | OpTimes
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp NumOp where
disp = \case
OpPlus -> "+"
OpMinus -> "-"
OpTimes -> "*"
evNumOp :: (Num a) => NumOp -> a -> a -> a
evNumOp OpPlus = (+)
evNumOp OpMinus = (-)
evNumOp OpTimes = (*)
data CompOp = OpEq | OpLt | OpLte | OpGt | OpGte
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp CompOp where
disp = \case
OpEq -> "=="
OpLt -> "<"
OpLte -> "<="
OpGt -> ">"
OpGte -> ">="
data BinOp = NumOp NumOp | CompOp CompOp
deriving stock (Eq, Ord, Show)
instance Fin BinOp where
enumerate = (NumOp <$> enumerate) ++ (CompOp <$> enumerate)
instance Disp BinOp where
disp (NumOp o) = disp o
disp (CompOp o) = disp o
binOp :: (Sca -> p) -> (Bool -> p) -> BinOp -> Sca -> Sca -> p
binOp sca _ (NumOp o) (Int x) (Int y) = sca (Int (evNumOp o x y))
binOp sca _ (NumOp o) (Float x) (Float y) = sca (Float (evNumOp o x y))
binOp _ tf (CompOp o) (Int x) (Int y) = tf $ compa o x y
binOp _ tf (CompOp o) (Float x) (Float y) = tf $ compa o x y
binOp _ _ _ _ _ = panic "bad binop"
compa :: (Ord a) => CompOp -> a -> a -> Bool
compa OpEq = (==)
compa OpLt = (<)
compa OpLte = (<=)
compa OpGt = (>)
compa OpGte = (>=)
data Expr
= EId
| BinComp Expr Expr
| Cone [(ConeComponent, Expr)]
| ELim [(Label, Expr)]
| Tuple [Expr]
| CoCone [(Label, Expr)]
| ECoLim [(Label, Expr)]
| InterpolatedString [ISPart]
| Lit Sca
| Proj Label
| Inj Label
| Comp [Expr]
| Top LcIdent
| Distr Label
| EConst Expr
| EPrim Prim
| EFunApp LcIdent Expr
| -- | Curry LcIdent Expr
Object UcIdent
| CanonicalInj Expr
| Side LcIdent Expr
| SidePrep Label
| SideUnprep Label
| BinOp BinOp Expr Expr
| SumInjLabelVar LcIdent
| SumUniCoconeVar LcIdent
| ESketchInterp SketchInterp
| InitInterp UcIdent Expr
| FromInit LcIdent UcIdent
| Curry Label Expr
| UnCurry Label Expr
| Fix Label Expr
| EApp Expr Expr
deriving stock (Show, Eq)
data SketchInterp = SketchInterp
{ sketchName :: UcIdent,
obs :: [(UcIdent, Expr)],
ars :: [(LcIdent, Expr)]
}
deriving stock (Eq, Show, Generic)
instance Disp SketchInterp where
disp SketchInterp {..} =
braces . vsep . punctuate comma $
(("ob" <+>) . dispMapping <$> obs)
++ (("ar" <+>) . dispMapping <$> ars)
where
dispMapping (x, e) = disp x <+> "|->" <+> disp e
instance Parsed SketchInterp where
parsed = do
kwSketchInterp
sketchName <- lexeme parsed
mappings <- pCommaSep '{' '}' pMapping
let (obs, ars) = partitionEithers mappings
pure SketchInterp {..}
where
pMapping = (Left <$> (kwOb *> pMapsto)) <|> (Right <$> (kwAr *> pMapsto))
pMapsto :: (Parsed a, Parsed b) => Parser (a, b)
pMapsto = (,) <$> lexeme parsed <*> (symbol "|->" *> parsed)
instance Plated Expr where
plate _ EId = pure EId
plate f (BinComp a b) = BinComp <$> f a <*> f b
plate f (Cone cone) = Cone <$> (each . _2) f cone
plate f (ELim diag) = ELim <$> (each . _2) f diag
plate f (Tuple as) = Tuple <$> each f as
plate f (CoCone cocone) = CoCone <$> (each . _2) f cocone
plate f (ECoLim diag) = ECoLim <$> (each . _2) f diag
plate f (InterpolatedString fs) = InterpolatedString <$> (each . #_ISExpr) f fs
plate _ l@(Lit _) = pure l
plate _ p@(Proj _) = pure p
plate _ i@(Inj _) = pure i
plate f (Comp fs) = Comp <$> each f fs
plate _ t@(Top _) = pure t
plate _ d@(Distr _) = pure d
plate f (EConst e) = f e
plate _ p@(EPrim _) = pure p
plate f (EFunApp name e) = EFunApp name <$> f e
plate _ o@(Object _) = pure o
plate f (CanonicalInj e) = CanonicalInj <$> f e
plate f (Side lab e) = Side lab <$> f e
plate f (BinOp o x y) = BinOp o <$> f x <*> f y
plate _ lv@(SumInjLabelVar _) = pure lv
plate _ cv@(SumUniCoconeVar _) = pure cv
plate _ sp@(SidePrep _) = pure sp
plate _ su@(SideUnprep _) = pure su
plate f (ESketchInterp (SketchInterp name obs ars)) = ESketchInterp <$> (SketchInterp name <$> (each . _2) f obs <*> (each . _2) f ars)
plate f (InitInterp sk e) = InitInterp sk <$> f e
plate _ fi@FromInit {} = pure fi
plate f (Curry lbl e) = Curry lbl <$> f e
plate f (UnCurry lbl e) = UnCurry lbl <$> f e
plate f (Fix lbl e) = Fix lbl <$> f e
plate f (EApp g e) = EApp <$> f g <*> f e
-- Tuples are just shorthand for records.
tupleToCone :: [Expr] -> Expr
tupleToCone fs = Cone [(ConeComponent Pure (LPos i), f) | (i, f) <- zip [1 :: Int ..] fs]
kwCall :: Parsed a => Parser () -> Parser a
kwCall kw = kw *> wrapped '(' ')' parsed
pApp :: Parser Expr
pApp = do
f <- parsed
e <- wrapped '(' ')' parsed
pure (EFunApp f e)
pCurry : :
-- pCurry = do
-- kwCurry
-- lab <- lexeme parsed
lab < $ > parsed
pSide :: Parser Expr
pSide = do
lab <- single '!' *> lexeme parsed
e <- wrapped '(' ')' parsed
pure (Side lab e)
pInterpolated :: Parser Expr
pInterpolated = Char.char '"' *> (InterpolatedString <$> manyTill (pE <|> try pRaw) (Char.char '"'))
where
pRaw = ISRaw . toS <$> escapedString
pE = ISExpr <$> (Char.char '{' *> parsed <* Char.char '}')
escapedString = catMaybes <$> someTill ch (lookAhead (Char.char '"' <|> Char.char '{'))
ch =
choice
[ Just <$> L.charLiteral,
Nothing <$ Char.string "\\&",
Just '{' <$ Char.string "\\{",
Just '}' <$ Char.string "\\}"
]
pTupledOrParensed :: Parser Expr
pTupledOrParensed = do
xs <- pTuple parsed
pure $ case xs of
[x] -> x
_ -> Tuple xs
pList :: Parser Expr
pList = do
es <- between (symbol "#(") (single ')') (sepBy (lexeme parsed) (lexChar ','))
pure $
foldr
( \hd tl ->
Comp
[ Cone
[ (ConeComponent Pure (LNam "head"), hd),
(ConeComponent Pure (LNam "tail"), tl)
],
Inj (LNam "cons")
]
)
(Inj (LNam "empty"))
es
pIfThenElse :: Parser Expr
pIfThenElse = do
kwIf
cond <- parsed
kwThen
tt <- parsed
kwElse
ff <- parsed
pure $
Comp
[ Cone
[ (ConeComponent Pure (LNam "v"), Comp []),
(ConeComponent Pure (LNam "case"), cond)
],
Distr (LNam "case"),
CoCone
[ (LNam "true", Comp [Proj (LNam "v"), tt]),
(LNam "false", Comp [Proj (LNam "v"), ff])
]
]
pCanInj :: Parser Expr
pCanInj = CanonicalInj <$> (single '~' *> pAtom)
pProdOp :: Parser () -> (Label -> Expr -> Expr) -> Parser Expr
pProdOp kw combo = do
kw
_ <- single '.'
lbl <- lexeme parsed
e <- wrapped '{' '}' parsed
pure (combo lbl e)
pAtom :: Parser Expr
pAtom =
choice
[ pSide,
pProdOp kwCurry Curry,
pProdOp kwUncurry UnCurry,
pProdOp kwFix Fix,
pIfThenElse,
InitInterp <$> kwCall kwInitInterp <*> wrapped '(' ')' parsed,
FromInit <$> kwCall kwFromInit <*> wrapped '(' ')' parsed,
try pApp,
pInterpolated,
EPrim <$> parsed,
Lit <$> parsed,
Proj <$> ("." *> parsed),
try (Inj <$> (parsed <* ".")), -- we need to look ahead for the dot
Top <$> parsed,
pCanInj,
SumUniCoconeVar <$> kwCall kwSumUni,
pList,
pTupledOrParensed,
TODO : try to get rid of the ' try ' by committing on the first
-- label/seperator pair encountered.
Cone <$> try (pBracedFields '=' conePunner),
ELim <$> pBracedFields ':' Nothing,
TODO : try to get rid of the ' try ' by committing on the first
-- label/seperator pair encountered.
CoCone <$> try (pBracketedFields '=' coconePunner),
ECoLim <$> pBracketedFields ':' Nothing,
Distr <$> (single '@' *> parsed),
EConst <$> kwCall kwConst,
Object <$> parsed,
ESketchInterp <$> parsed
]
where
conePunner :: Maybe (ConeComponent -> Expr)
conePunner = Just $ \case
ConeComponent Pure lab -> Proj lab
ConeComponent Eff lab -> CanonicalInj (Proj lab)
coconePunner = Just Inj
operatorTable :: [[Operator Parser Expr]]
operatorTable =
[ [numOp OpTimes "*"],
[numOp OpMinus "-", numOp OpPlus "+"],
[compOp OpEq "==", compOp OpLte "<=", compOp OpLt "<", compOp OpGte ">=", compOp OpGt ">"],
[InfixR (EApp <$ lexChar '$')]
]
where
infixR o t = InfixR (BinOp o <$ symbol t)
numOp = infixR . NumOp
compOp = infixR . CompOp
pComposition :: Parser Expr
pComposition = do
xs <- many (lexeme pAtom)
pure $ case xs of
[] -> EId
[x] -> x
[x, y] -> BinComp x y
_ -> Comp xs
instance Parsed Expr where
parsed = makeExprParser pComposition operatorTable
instance Disp Expr where
disp = \case
EId -> ""
BinComp f g -> disp f <+> disp g
Object o -> disp o
CanonicalInj e -> "i" <> parens (disp e)
EFunApp f e -> disp f <> parens (disp e)
EPrim p -> disp p
EConst e -> "const" <> parens (disp e)
Lit s -> disp s
Proj p -> "." <> disp p
Inj i -> disp i <> "."
Distr l -> "@" <> disp l
Top t -> disp t
Comp fs -> align $ sep (disp <$> fs)
Cone ps -> commaBrace '=' ps
ELim ps -> commaBrace ':' ps
CoCone ps -> commaBracket '=' ps
ECoLim ps -> commaBracket ':' ps
Tuple ps -> dispTup ps
Side lab f -> "!" <> disp lab <> braces (disp f)
InterpolatedString ps -> dquotes (foldMap go ps)
where
go (ISRaw t) = pretty t
go (ISExpr e) = braces (disp e)
BinOp o x y -> parens (disp x <+> disp o <+> disp y)
Curry lbl e -> "curry." <> disp lbl <> parens (disp e)
UnCurry lbl e -> "uncurry." <> disp lbl <> parens (disp e)
Fix lbl e -> "fix." <> disp lbl <> parens (disp e)
_ -> "TODO"
desugar :: Expr -> Expr
desugar = \case
Comp [] -> EId
Comp [x] -> x
Comp [x, y] -> BinComp x y
Comp xs -> foldr1 BinComp xs
Tuple fs -> tupleToCone fs
BinOp o f g -> binPrim (PrimOp o) f g
InterpolatedString ps -> foldr go (Lit (Str "")) ps
where
go :: ISPart -> Expr -> Expr
go part e =
binPrim
(Pfn PrimConcat)
( case part of
ISRaw t -> Lit (Str t)
ISExpr f -> f
)
e
EApp e e' -> binPrim (Pfn PrimApp) e e'
e -> e
binPrim :: Prim -> Expr -> Expr -> Expr
binPrim = binApp . EPrim
binApp :: Expr -> Expr -> Expr -> Expr
binApp f x y = Comp [Cone [(purPos 1, x), (purPos 2, y)], f]
| null | https://raw.githubusercontent.com/jameshaydon/lawvere/2d7d12347658b502b317fece26d97984d280226c/src/Lawvere/Expr.hs | haskell | | Curry LcIdent Expr
Tuples are just shorthand for records.
pCurry = do
kwCurry
lab <- lexeme parsed
we need to look ahead for the dot
label/seperator pair encountered.
label/seperator pair encountered. | module Lawvere.Expr where
import Control.Lens
import Control.Monad.Combinators.Expr
import Data.Generics.Labels ()
import Data.List (foldr1)
import Lawvere.Core
import Lawvere.Disp
import Lawvere.Parse
import Lawvere.Scalar
import Prettyprinter
import Protolude hiding (many, try)
import Text.Megaparsec
import qualified Text.Megaparsec.Char as Char
import qualified Text.Megaparsec.Char.Lexer as L
data PrimFn = PrimIdentity | PrimApp | PrimIncr | PrimAbs | PrimShow | PrimConcat
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp PrimFn where
disp = pretty . map toLower . drop 4 . show
data Prim = Pfn PrimFn | PrimOp BinOp
deriving stock (Eq, Ord, Show)
instance Fin Prim where
enumerate = (Pfn <$> enumerate) ++ (PrimOp <$> enumerate)
instance Disp Prim where
disp = \case
Pfn p -> disp p
PrimOp o -> case o of
NumOp no -> case no of
OpPlus -> "plus"
OpMinus -> "minus"
OpTimes -> "mult"
CompOp co -> case co of
OpEq -> "equal"
OpLt -> "less_than"
OpLte -> "less_than_equal"
OpGt -> "greater_than"
OpGte -> "greater_than_equal"
instance Parsed Prim where
parsed = choice [p <$ try (chunk (render p) >> notFollowedBy (satisfy nonFirstIdentChar)) | p <- enumerate]
data ComponentDecorator = Eff | Pure
deriving stock (Show, Eq)
data ConeComponent = ConeComponent ComponentDecorator Label
deriving stock (Show, Eq)
purPos :: Int -> ConeComponent
purPos = ConeComponent Pure . LPos
purNam :: LcIdent -> ConeComponent
purNam = ConeComponent Pure . LNam
instance Parsed ConeComponent where
parsed = do
eff_ <- optional (single '!')
ConeComponent (if isJust eff_ then Eff else Pure) <$> parsed
instance Disp ConeComponent where
disp (ConeComponent Pure lab) = disp lab
disp (ConeComponent Eff lab) = "!" <> disp lab
componentLabel :: ConeComponent -> Label
componentLabel (ConeComponent _ lab) = lab
data ISPart = ISRaw Text | ISExpr Expr
deriving stock (Show, Eq, Generic)
data NumOp = OpPlus | OpMinus | OpTimes
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp NumOp where
disp = \case
OpPlus -> "+"
OpMinus -> "-"
OpTimes -> "*"
evNumOp :: (Num a) => NumOp -> a -> a -> a
evNumOp OpPlus = (+)
evNumOp OpMinus = (-)
evNumOp OpTimes = (*)
data CompOp = OpEq | OpLt | OpLte | OpGt | OpGte
deriving stock (Eq, Ord, Show, Bounded, Enum)
instance Disp CompOp where
disp = \case
OpEq -> "=="
OpLt -> "<"
OpLte -> "<="
OpGt -> ">"
OpGte -> ">="
data BinOp = NumOp NumOp | CompOp CompOp
deriving stock (Eq, Ord, Show)
instance Fin BinOp where
enumerate = (NumOp <$> enumerate) ++ (CompOp <$> enumerate)
instance Disp BinOp where
disp (NumOp o) = disp o
disp (CompOp o) = disp o
binOp :: (Sca -> p) -> (Bool -> p) -> BinOp -> Sca -> Sca -> p
binOp sca _ (NumOp o) (Int x) (Int y) = sca (Int (evNumOp o x y))
binOp sca _ (NumOp o) (Float x) (Float y) = sca (Float (evNumOp o x y))
binOp _ tf (CompOp o) (Int x) (Int y) = tf $ compa o x y
binOp _ tf (CompOp o) (Float x) (Float y) = tf $ compa o x y
binOp _ _ _ _ _ = panic "bad binop"
compa :: (Ord a) => CompOp -> a -> a -> Bool
compa OpEq = (==)
compa OpLt = (<)
compa OpLte = (<=)
compa OpGt = (>)
compa OpGte = (>=)
data Expr
= EId
| BinComp Expr Expr
| Cone [(ConeComponent, Expr)]
| ELim [(Label, Expr)]
| Tuple [Expr]
| CoCone [(Label, Expr)]
| ECoLim [(Label, Expr)]
| InterpolatedString [ISPart]
| Lit Sca
| Proj Label
| Inj Label
| Comp [Expr]
| Top LcIdent
| Distr Label
| EConst Expr
| EPrim Prim
| EFunApp LcIdent Expr
Object UcIdent
| CanonicalInj Expr
| Side LcIdent Expr
| SidePrep Label
| SideUnprep Label
| BinOp BinOp Expr Expr
| SumInjLabelVar LcIdent
| SumUniCoconeVar LcIdent
| ESketchInterp SketchInterp
| InitInterp UcIdent Expr
| FromInit LcIdent UcIdent
| Curry Label Expr
| UnCurry Label Expr
| Fix Label Expr
| EApp Expr Expr
deriving stock (Show, Eq)
data SketchInterp = SketchInterp
{ sketchName :: UcIdent,
obs :: [(UcIdent, Expr)],
ars :: [(LcIdent, Expr)]
}
deriving stock (Eq, Show, Generic)
instance Disp SketchInterp where
disp SketchInterp {..} =
braces . vsep . punctuate comma $
(("ob" <+>) . dispMapping <$> obs)
++ (("ar" <+>) . dispMapping <$> ars)
where
dispMapping (x, e) = disp x <+> "|->" <+> disp e
instance Parsed SketchInterp where
parsed = do
kwSketchInterp
sketchName <- lexeme parsed
mappings <- pCommaSep '{' '}' pMapping
let (obs, ars) = partitionEithers mappings
pure SketchInterp {..}
where
pMapping = (Left <$> (kwOb *> pMapsto)) <|> (Right <$> (kwAr *> pMapsto))
pMapsto :: (Parsed a, Parsed b) => Parser (a, b)
pMapsto = (,) <$> lexeme parsed <*> (symbol "|->" *> parsed)
instance Plated Expr where
plate _ EId = pure EId
plate f (BinComp a b) = BinComp <$> f a <*> f b
plate f (Cone cone) = Cone <$> (each . _2) f cone
plate f (ELim diag) = ELim <$> (each . _2) f diag
plate f (Tuple as) = Tuple <$> each f as
plate f (CoCone cocone) = CoCone <$> (each . _2) f cocone
plate f (ECoLim diag) = ECoLim <$> (each . _2) f diag
plate f (InterpolatedString fs) = InterpolatedString <$> (each . #_ISExpr) f fs
plate _ l@(Lit _) = pure l
plate _ p@(Proj _) = pure p
plate _ i@(Inj _) = pure i
plate f (Comp fs) = Comp <$> each f fs
plate _ t@(Top _) = pure t
plate _ d@(Distr _) = pure d
plate f (EConst e) = f e
plate _ p@(EPrim _) = pure p
plate f (EFunApp name e) = EFunApp name <$> f e
plate _ o@(Object _) = pure o
plate f (CanonicalInj e) = CanonicalInj <$> f e
plate f (Side lab e) = Side lab <$> f e
plate f (BinOp o x y) = BinOp o <$> f x <*> f y
plate _ lv@(SumInjLabelVar _) = pure lv
plate _ cv@(SumUniCoconeVar _) = pure cv
plate _ sp@(SidePrep _) = pure sp
plate _ su@(SideUnprep _) = pure su
plate f (ESketchInterp (SketchInterp name obs ars)) = ESketchInterp <$> (SketchInterp name <$> (each . _2) f obs <*> (each . _2) f ars)
plate f (InitInterp sk e) = InitInterp sk <$> f e
plate _ fi@FromInit {} = pure fi
plate f (Curry lbl e) = Curry lbl <$> f e
plate f (UnCurry lbl e) = UnCurry lbl <$> f e
plate f (Fix lbl e) = Fix lbl <$> f e
plate f (EApp g e) = EApp <$> f g <*> f e
tupleToCone :: [Expr] -> Expr
tupleToCone fs = Cone [(ConeComponent Pure (LPos i), f) | (i, f) <- zip [1 :: Int ..] fs]
kwCall :: Parsed a => Parser () -> Parser a
kwCall kw = kw *> wrapped '(' ')' parsed
pApp :: Parser Expr
pApp = do
f <- parsed
e <- wrapped '(' ')' parsed
pure (EFunApp f e)
pCurry : :
lab < $ > parsed
pSide :: Parser Expr
pSide = do
lab <- single '!' *> lexeme parsed
e <- wrapped '(' ')' parsed
pure (Side lab e)
pInterpolated :: Parser Expr
pInterpolated = Char.char '"' *> (InterpolatedString <$> manyTill (pE <|> try pRaw) (Char.char '"'))
where
pRaw = ISRaw . toS <$> escapedString
pE = ISExpr <$> (Char.char '{' *> parsed <* Char.char '}')
escapedString = catMaybes <$> someTill ch (lookAhead (Char.char '"' <|> Char.char '{'))
ch =
choice
[ Just <$> L.charLiteral,
Nothing <$ Char.string "\\&",
Just '{' <$ Char.string "\\{",
Just '}' <$ Char.string "\\}"
]
pTupledOrParensed :: Parser Expr
pTupledOrParensed = do
xs <- pTuple parsed
pure $ case xs of
[x] -> x
_ -> Tuple xs
pList :: Parser Expr
pList = do
es <- between (symbol "#(") (single ')') (sepBy (lexeme parsed) (lexChar ','))
pure $
foldr
( \hd tl ->
Comp
[ Cone
[ (ConeComponent Pure (LNam "head"), hd),
(ConeComponent Pure (LNam "tail"), tl)
],
Inj (LNam "cons")
]
)
(Inj (LNam "empty"))
es
pIfThenElse :: Parser Expr
pIfThenElse = do
kwIf
cond <- parsed
kwThen
tt <- parsed
kwElse
ff <- parsed
pure $
Comp
[ Cone
[ (ConeComponent Pure (LNam "v"), Comp []),
(ConeComponent Pure (LNam "case"), cond)
],
Distr (LNam "case"),
CoCone
[ (LNam "true", Comp [Proj (LNam "v"), tt]),
(LNam "false", Comp [Proj (LNam "v"), ff])
]
]
pCanInj :: Parser Expr
pCanInj = CanonicalInj <$> (single '~' *> pAtom)
pProdOp :: Parser () -> (Label -> Expr -> Expr) -> Parser Expr
pProdOp kw combo = do
kw
_ <- single '.'
lbl <- lexeme parsed
e <- wrapped '{' '}' parsed
pure (combo lbl e)
pAtom :: Parser Expr
pAtom =
choice
[ pSide,
pProdOp kwCurry Curry,
pProdOp kwUncurry UnCurry,
pProdOp kwFix Fix,
pIfThenElse,
InitInterp <$> kwCall kwInitInterp <*> wrapped '(' ')' parsed,
FromInit <$> kwCall kwFromInit <*> wrapped '(' ')' parsed,
try pApp,
pInterpolated,
EPrim <$> parsed,
Lit <$> parsed,
Proj <$> ("." *> parsed),
Top <$> parsed,
pCanInj,
SumUniCoconeVar <$> kwCall kwSumUni,
pList,
pTupledOrParensed,
TODO : try to get rid of the ' try ' by committing on the first
Cone <$> try (pBracedFields '=' conePunner),
ELim <$> pBracedFields ':' Nothing,
TODO : try to get rid of the ' try ' by committing on the first
CoCone <$> try (pBracketedFields '=' coconePunner),
ECoLim <$> pBracketedFields ':' Nothing,
Distr <$> (single '@' *> parsed),
EConst <$> kwCall kwConst,
Object <$> parsed,
ESketchInterp <$> parsed
]
where
conePunner :: Maybe (ConeComponent -> Expr)
conePunner = Just $ \case
ConeComponent Pure lab -> Proj lab
ConeComponent Eff lab -> CanonicalInj (Proj lab)
coconePunner = Just Inj
operatorTable :: [[Operator Parser Expr]]
operatorTable =
[ [numOp OpTimes "*"],
[numOp OpMinus "-", numOp OpPlus "+"],
[compOp OpEq "==", compOp OpLte "<=", compOp OpLt "<", compOp OpGte ">=", compOp OpGt ">"],
[InfixR (EApp <$ lexChar '$')]
]
where
infixR o t = InfixR (BinOp o <$ symbol t)
numOp = infixR . NumOp
compOp = infixR . CompOp
pComposition :: Parser Expr
pComposition = do
xs <- many (lexeme pAtom)
pure $ case xs of
[] -> EId
[x] -> x
[x, y] -> BinComp x y
_ -> Comp xs
instance Parsed Expr where
parsed = makeExprParser pComposition operatorTable
instance Disp Expr where
disp = \case
EId -> ""
BinComp f g -> disp f <+> disp g
Object o -> disp o
CanonicalInj e -> "i" <> parens (disp e)
EFunApp f e -> disp f <> parens (disp e)
EPrim p -> disp p
EConst e -> "const" <> parens (disp e)
Lit s -> disp s
Proj p -> "." <> disp p
Inj i -> disp i <> "."
Distr l -> "@" <> disp l
Top t -> disp t
Comp fs -> align $ sep (disp <$> fs)
Cone ps -> commaBrace '=' ps
ELim ps -> commaBrace ':' ps
CoCone ps -> commaBracket '=' ps
ECoLim ps -> commaBracket ':' ps
Tuple ps -> dispTup ps
Side lab f -> "!" <> disp lab <> braces (disp f)
InterpolatedString ps -> dquotes (foldMap go ps)
where
go (ISRaw t) = pretty t
go (ISExpr e) = braces (disp e)
BinOp o x y -> parens (disp x <+> disp o <+> disp y)
Curry lbl e -> "curry." <> disp lbl <> parens (disp e)
UnCurry lbl e -> "uncurry." <> disp lbl <> parens (disp e)
Fix lbl e -> "fix." <> disp lbl <> parens (disp e)
_ -> "TODO"
desugar :: Expr -> Expr
desugar = \case
Comp [] -> EId
Comp [x] -> x
Comp [x, y] -> BinComp x y
Comp xs -> foldr1 BinComp xs
Tuple fs -> tupleToCone fs
BinOp o f g -> binPrim (PrimOp o) f g
InterpolatedString ps -> foldr go (Lit (Str "")) ps
where
go :: ISPart -> Expr -> Expr
go part e =
binPrim
(Pfn PrimConcat)
( case part of
ISRaw t -> Lit (Str t)
ISExpr f -> f
)
e
EApp e e' -> binPrim (Pfn PrimApp) e e'
e -> e
binPrim :: Prim -> Expr -> Expr -> Expr
binPrim = binApp . EPrim
binApp :: Expr -> Expr -> Expr -> Expr
binApp f x y = Comp [Cone [(purPos 1, x), (purPos 2, y)], f]
|
207a7f3204b7301cfd1f35da1ef8e4c04c34bbd7d8f4f6c0a9672fd1c8878ac0 | janestreet/hardcaml | side.mli | (** Used to specify when an operation should be performed - before or after an event like
a clock edge. *)
type t =
| Before
| After
[@@deriving compare, sexp_of]
| null | https://raw.githubusercontent.com/janestreet/hardcaml/4126f65f39048fef5853ba9b8d766143f678a9e4/src/side.mli | ocaml | * Used to specify when an operation should be performed - before or after an event like
a clock edge. |
type t =
| Before
| After
[@@deriving compare, sexp_of]
|
fa1f615396bb7f773ef3c1fd7ac07e48387863da79c42b63e040e631ce092d38 | larskuhtz/wai-cors | Server.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UnicodeSyntax #
-- |
-- Module: Server
Description : Test HTTP server for
Copyright : © 2015 - 2018 < > .
License : MIT
Maintainer : < >
-- Stability: experimental
--
module Server
( main
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Network.Socket (withSocketsDo)
import Network.Wai.Middleware.Cors
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai as WAI
import qualified Network.Wai.Handler.Warp as WARP
import qualified Network.Wai.Handler.WebSockets as WS
import qualified Network.WebSockets as WS
import qualified Data.Text as T
main ∷ IO ()
main = withSocketsDo . WARP.run 8080 $ server
-- -------------------------------------------------------------------------- --
-- Server application
server ∷ WAI.Application
server = cors corsPolicy $ \request →
case WAI.pathInfo request of
"cors":_ → corsapp request
_ → testapp
where
testapp respond = respond $ WAI.responseFile HTTP.status200 [] "index.html" Nothing
corsapp = WS.websocketsOr WS.defaultConnectionOptions wsserver $ \_ respond →
respond $ WAI.responseLBS HTTP.status200 [] "Success"
-- -------------------------------------------------------------------------- --
-- CORS Policy
corsPolicy ∷ WAI.Request → Maybe CorsResourcePolicy
corsPolicy request = case WAI.pathInfo request of
"cors" : "non-simple":_ → Just nonSimplePolicy
"cors" : "simple":_ → Just simpleCorsResourcePolicy
_ → Nothing
-- -------------------------------------------------------------------------- --
Websockets Server
wsserver ∷ WS.ServerApp
wsserver pc = do
c ← WS.acceptRequest pc
forever (go c) `catch` \case
WS.CloseRequest _code _msg → WS.sendClose c ("closed" ∷ T.Text)
e → throwIO e
where
go c = do
msg ← WS.receiveDataMessage c
forkIO $ WS.sendDataMessage c msg
-- -------------------------------------------------------------------------- --
-- Non Simple Policy
-- | Perform the following tests the following with this policy:
--
-- * @Variy: Origin@ header is set on responses
-- * @X-cors-test@ header is accepted
-- * @X-cors-test@ header is exposed on response
-- * @Access-Control-Allow-Origin@ header is set on responses to the request host
-- * @DELETE@ requests are not allowed
-- * @PUT@ requests are allowed
* Requests that do n't include an @Origin@ header result in 400 responses
-- (it's not clear how to test this with a browser client)
--
Note that Chrome sends @Origin : null@ when loaded from a " file:// ... " URL ,
-- PhantomJS sends "file://".
--
nonSimplePolicy ∷ CorsResourcePolicy
nonSimplePolicy = CorsResourcePolicy
{ corsOrigins = Just ([":8080", "null", "file://"], False)
, corsMethods = ["PUT"]
, corsRequestHeaders = ["X-cors-test"]
, corsExposedHeaders = Just ["X-cors-test", "Vary"]
, corsMaxAge = Nothing
, corsVaryOrigin = True
, corsRequireOrigin = True
, corsIgnoreFailures = False
}
| null | https://raw.githubusercontent.com/larskuhtz/wai-cors/7af2f8acff5ddd3557f86d1759c4f6a8ea0ad17b/test/Server.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module: Server
Stability: experimental
-------------------------------------------------------------------------- --
Server application
-------------------------------------------------------------------------- --
CORS Policy
-------------------------------------------------------------------------- --
-------------------------------------------------------------------------- --
Non Simple Policy
| Perform the following tests the following with this policy:
* @Variy: Origin@ header is set on responses
* @X-cors-test@ header is accepted
* @X-cors-test@ header is exposed on response
* @Access-Control-Allow-Origin@ header is set on responses to the request host
* @DELETE@ requests are not allowed
* @PUT@ requests are allowed
(it's not clear how to test this with a browser client)
PhantomJS sends "file://".
| # LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UnicodeSyntax #
Description : Test HTTP server for
Copyright : © 2015 - 2018 < > .
License : MIT
Maintainer : < >
module Server
( main
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Network.Socket (withSocketsDo)
import Network.Wai.Middleware.Cors
import qualified Network.HTTP.Types as HTTP
import qualified Network.Wai as WAI
import qualified Network.Wai.Handler.Warp as WARP
import qualified Network.Wai.Handler.WebSockets as WS
import qualified Network.WebSockets as WS
import qualified Data.Text as T
main ∷ IO ()
main = withSocketsDo . WARP.run 8080 $ server
server ∷ WAI.Application
server = cors corsPolicy $ \request →
case WAI.pathInfo request of
"cors":_ → corsapp request
_ → testapp
where
testapp respond = respond $ WAI.responseFile HTTP.status200 [] "index.html" Nothing
corsapp = WS.websocketsOr WS.defaultConnectionOptions wsserver $ \_ respond →
respond $ WAI.responseLBS HTTP.status200 [] "Success"
corsPolicy ∷ WAI.Request → Maybe CorsResourcePolicy
corsPolicy request = case WAI.pathInfo request of
"cors" : "non-simple":_ → Just nonSimplePolicy
"cors" : "simple":_ → Just simpleCorsResourcePolicy
_ → Nothing
Websockets Server
wsserver ∷ WS.ServerApp
wsserver pc = do
c ← WS.acceptRequest pc
forever (go c) `catch` \case
WS.CloseRequest _code _msg → WS.sendClose c ("closed" ∷ T.Text)
e → throwIO e
where
go c = do
msg ← WS.receiveDataMessage c
forkIO $ WS.sendDataMessage c msg
* Requests that do n't include an @Origin@ header result in 400 responses
Note that Chrome sends @Origin : null@ when loaded from a " file:// ... " URL ,
nonSimplePolicy ∷ CorsResourcePolicy
nonSimplePolicy = CorsResourcePolicy
{ corsOrigins = Just ([":8080", "null", "file://"], False)
, corsMethods = ["PUT"]
, corsRequestHeaders = ["X-cors-test"]
, corsExposedHeaders = Just ["X-cors-test", "Vary"]
, corsMaxAge = Nothing
, corsVaryOrigin = True
, corsRequireOrigin = True
, corsIgnoreFailures = False
}
|
0534c15743114c83d7a384b253d79283fa68b835b6910d9acc489404bc450614 | B-Lang-org/bsc | IfcBetterInfo.hs | # LANGUAGE CPP #
module IfcBetterInfo(
BetterInfo(..),
extractMethodInfo,
matchMethodName,
noMethodInfo
) where
#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 804)
import Prelude hiding ((<>))
#endif
import Flags(Flags)
import SymTab
import Id
import Pragma
import PPrint
import IdPrint
import VModInfo
import FStringCompat(mkFString)
import ISyntax
import IConv(iConvT)
-- import Util(traces)
-- module for extracting "better" method argument names and types from the symbol table
-- This information is used by IExpand for populating the VModInfo
-- and for recording the types of external method ports
data BetterInfo = BetterMethodInfo
{ mi_id :: Id, -- method Id
mi_result :: VPort, -- possible rename for method result
mi_ready :: VPort, -- for ready signal
mi_enable :: VPort, -- for enable signal
mi_prefix :: Id, -- default prefix for arguments (which are not found in classic)
mi_args :: [Id], -- for arguments
mi_orig_type :: Maybe IType -- original (unwrapped) field type
}
-- XXX Note that the following are unused
-- XXX (this package needs re-thinking)
| BetterClockInfo
{ ci_id :: Id
}
| BetterResetInfo
{ ri_id :: Id
}
| BetterInoutInfo
{ io_id :: Id
}
-- utilitity comparison function for use in lookup/find
matchMethodName :: Id -> BetterInfo -> Bool
matchMethodName id mn = qualEq id (mi_id mn)
-- creates a basic method remaing
noMethodInfo :: Id -> BetterInfo
noMethodInfo fieldId = BetterMethodInfo {mi_id = fieldId,
mi_result = id_to_vPort fieldId,
mi_ready = id_to_vPort $ mkRdyId fieldId,
mi_enable = id_to_vPort $ mkEnableId fieldId,
mi_prefix = fieldId,
mi_args = [],
mi_orig_type = Nothing
}
instance PPrint BetterInfo where
pPrint d i info = (text "methodNames") <> ppId d (mi_id info) <> equals <> braces
( printMaybe d i "Result:" (mi_result info) <>
printMaybe d i "Ready:" (mi_ready info) <>
printMaybe d i "Enable:" (mi_enable info) <>
text "Prefix:" <> pPrint d i (mi_prefix info) <>
text "Args:" <> pPrint d i (mi_args info) <>
printMaybe d i "Original type:" (mi_orig_type info)
)
printMaybe :: PPrint a => PDetail -> Int -> String -> a -> Doc
printMaybe d i str x = text str <> pPrint d i x
-- this function pulls the method info from an interface
extractMethodInfo :: Flags -> SymTab -> Id -> [BetterInfo]
extractMethodInfo = genBetterInfoFromIfc
genBetterInfoFromIfc :: Flags -> SymTab -> Id -> [BetterInfo]
genBetterInfoFromIfc flags symbolTable ifcId =
-- traces("GBN ifcId: " ++ ppReadable ifcId) $
-- traces("GBN methFields: " ++ ppReadable methFields) $
-- traces("GBN result: " ++ ppReadable props) $
props
where
-- Get method names and associated field infos
methIds = getIfcFieldNames symbolTable ifcId
methFields :: [ (Id,Maybe FieldInfo) ]
methFields = zip methIds $ map (findFieldInfo symbolTable ifcId) methIds
--
-- covert the information to to IfcBetterName
props = map (fieldInfoToBetterInfo flags symbolTable) methFields
fieldInfoToBetterInfo :: Flags -> SymTab -> (Id,Maybe FieldInfo) -> BetterInfo
fieldInfoToBetterInfo flags symTab (fieldId, Nothing) = noMethodInfo fieldId
fieldInfoToBetterInfo flags symTab (fieldId, Just fi) =
BetterMethodInfo {mi_id = fieldId,
mi_result = maybe (id_to_vPort fieldId) (str_to_vPort) mres,
mi_ready = maybe (id_to_vPort $ mkRdyId fieldId) str_to_vPort mrdy,
mi_enable = maybe (id_to_vPort $ mkEnableId fieldId) str_to_vPort men,
mi_prefix = maybe fieldId (setIdBaseString fieldId) mprefix,
mi_args = args,
mi_orig_type = fmap (iConvT flags symTab) (fi_orig_type fi)
}
where prags = fi_pragmas fi
(mprefix,mres,mrdy,men,rawargs,_,_) = getMethodPragmaInfo prags
args = genArgNames mprefix fieldId rawargs
-- Create a list of Ids for method argument names
Used by IExpand thru IfcbetterNames maybe move it here
-- Note that this only uses IPrefixStr and iArgNames, which must be
kept on the FieldInfo in the SymTab
genArgNames :: Maybe String -> Id -> [Id] -> [Id]
genArgNames mprefix fieldId ids = map (addPrefix mprefix fieldId) ids
where addPrefix :: Maybe String -> Id -> Id -> Id
addPrefix Nothing fid aid = mkUSId fid aid
addPrefix (Just "") _ aid = aid
addPrefix (Just pstr) _ aid = mkIdPre (mkFString $ pstr ++ "_" ) aid
| null | https://raw.githubusercontent.com/B-Lang-org/bsc/bd141b505394edc5a4bdd3db442a9b0a8c101f0f/src/comp/IfcBetterInfo.hs | haskell | import Util(traces)
module for extracting "better" method argument names and types from the symbol table
This information is used by IExpand for populating the VModInfo
and for recording the types of external method ports
method Id
possible rename for method result
for ready signal
for enable signal
default prefix for arguments (which are not found in classic)
for arguments
original (unwrapped) field type
XXX Note that the following are unused
XXX (this package needs re-thinking)
utilitity comparison function for use in lookup/find
creates a basic method remaing
this function pulls the method info from an interface
traces("GBN ifcId: " ++ ppReadable ifcId) $
traces("GBN methFields: " ++ ppReadable methFields) $
traces("GBN result: " ++ ppReadable props) $
Get method names and associated field infos
covert the information to to IfcBetterName
Create a list of Ids for method argument names
Note that this only uses IPrefixStr and iArgNames, which must be | # LANGUAGE CPP #
module IfcBetterInfo(
BetterInfo(..),
extractMethodInfo,
matchMethodName,
noMethodInfo
) where
#if defined(__GLASGOW_HASKELL__) && (__GLASGOW_HASKELL__ >= 804)
import Prelude hiding ((<>))
#endif
import Flags(Flags)
import SymTab
import Id
import Pragma
import PPrint
import IdPrint
import VModInfo
import FStringCompat(mkFString)
import ISyntax
import IConv(iConvT)
data BetterInfo = BetterMethodInfo
}
| BetterClockInfo
{ ci_id :: Id
}
| BetterResetInfo
{ ri_id :: Id
}
| BetterInoutInfo
{ io_id :: Id
}
matchMethodName :: Id -> BetterInfo -> Bool
matchMethodName id mn = qualEq id (mi_id mn)
noMethodInfo :: Id -> BetterInfo
noMethodInfo fieldId = BetterMethodInfo {mi_id = fieldId,
mi_result = id_to_vPort fieldId,
mi_ready = id_to_vPort $ mkRdyId fieldId,
mi_enable = id_to_vPort $ mkEnableId fieldId,
mi_prefix = fieldId,
mi_args = [],
mi_orig_type = Nothing
}
instance PPrint BetterInfo where
pPrint d i info = (text "methodNames") <> ppId d (mi_id info) <> equals <> braces
( printMaybe d i "Result:" (mi_result info) <>
printMaybe d i "Ready:" (mi_ready info) <>
printMaybe d i "Enable:" (mi_enable info) <>
text "Prefix:" <> pPrint d i (mi_prefix info) <>
text "Args:" <> pPrint d i (mi_args info) <>
printMaybe d i "Original type:" (mi_orig_type info)
)
printMaybe :: PPrint a => PDetail -> Int -> String -> a -> Doc
printMaybe d i str x = text str <> pPrint d i x
extractMethodInfo :: Flags -> SymTab -> Id -> [BetterInfo]
extractMethodInfo = genBetterInfoFromIfc
genBetterInfoFromIfc :: Flags -> SymTab -> Id -> [BetterInfo]
genBetterInfoFromIfc flags symbolTable ifcId =
props
where
methIds = getIfcFieldNames symbolTable ifcId
methFields :: [ (Id,Maybe FieldInfo) ]
methFields = zip methIds $ map (findFieldInfo symbolTable ifcId) methIds
props = map (fieldInfoToBetterInfo flags symbolTable) methFields
fieldInfoToBetterInfo :: Flags -> SymTab -> (Id,Maybe FieldInfo) -> BetterInfo
fieldInfoToBetterInfo flags symTab (fieldId, Nothing) = noMethodInfo fieldId
fieldInfoToBetterInfo flags symTab (fieldId, Just fi) =
BetterMethodInfo {mi_id = fieldId,
mi_result = maybe (id_to_vPort fieldId) (str_to_vPort) mres,
mi_ready = maybe (id_to_vPort $ mkRdyId fieldId) str_to_vPort mrdy,
mi_enable = maybe (id_to_vPort $ mkEnableId fieldId) str_to_vPort men,
mi_prefix = maybe fieldId (setIdBaseString fieldId) mprefix,
mi_args = args,
mi_orig_type = fmap (iConvT flags symTab) (fi_orig_type fi)
}
where prags = fi_pragmas fi
(mprefix,mres,mrdy,men,rawargs,_,_) = getMethodPragmaInfo prags
args = genArgNames mprefix fieldId rawargs
Used by IExpand thru IfcbetterNames maybe move it here
kept on the FieldInfo in the SymTab
genArgNames :: Maybe String -> Id -> [Id] -> [Id]
genArgNames mprefix fieldId ids = map (addPrefix mprefix fieldId) ids
where addPrefix :: Maybe String -> Id -> Id -> Id
addPrefix Nothing fid aid = mkUSId fid aid
addPrefix (Just "") _ aid = aid
addPrefix (Just pstr) _ aid = mkIdPre (mkFString $ pstr ++ "_" ) aid
|
2fa46b117282dd4253d0f63ebe0d006005cc81b972ce6fd4be5eb514f06f22d6 | termite-analyser/termite | main.ml | open Debug
open Llvm
open Smt
module SMTg = Smtgraph.Make (Smt.ZZ)
module Llvm2Smt = Llvm2smt.Init (Smt.ZZ) (SMTg)
type algos =
| Algo1
| Monodimensional
| Multidimensional
| MonodimMultiPc
let algo_to_string = function
| Algo1 -> "Algo1"
| Monodimensional -> "Monodimensional"
| Multidimensional -> "Multidimensional"
| MonodimMultiPc -> "Monodimensional, Multiple control points"
type file =
| C_file of string
| BC_file of string
let print s =
(if !Config.debug then Printf.fprintf else Printf.ifprintf) stdout s
exception External_error of string * string * int
* Read the bitcode , output a list of functions .
let read_bitcode pagai pagai_o clang clang_o file =
let file = match file with
| BC_file s -> s
| C_file c_file ->
print "Compiling C file %s to llvm bytecode.\n%!" c_file ;
let make_file base suffix = let open Filename in
concat (get_temp_dir_name ()) ((basename @@ chop_extension base) ^ suffix)
in
let clang_file = make_file c_file ".bc" in
let clang_log = make_file c_file ".clang.log" in
let pagai_file = make_file c_file ".pagai.bc" in
let pagai_log = make_file c_file ".pagai.log" in
let clang_command =
Printf.sprintf "%s %s -c -emit-llvm -o %s %s &> %s"
clang clang_o clang_file c_file clang_log
in
let clang_ret = Sys.command clang_command in
if clang_ret <> 0 then raise @@ External_error(clang_command, clang_log, clang_ret) ;
let pagai_command =
Printf.sprintf "%s %s -b %s -i %s &> %s"
pagai pagai_o pagai_file clang_file pagai_log
in
let pagai_ret = Sys.command pagai_command in
if pagai_ret <> 0 then raise @@ External_error(pagai_command, pagai_log, pagai_ret) ;
pagai_file
in
print "Reading %s\n%!" file ;
let ctx = Llvm.create_context () in
(* Parse the bitcode. *)
let mem = Llvm.MemoryBuffer.of_file file in
let m = Llvm_bitreader.parse_bitcode ctx mem in
Llvm.MemoryBuffer.dispose mem ;
(* Apply the mem2reg pass, just in case. *)
let pass = PassManager.create () in
Llvm_scalar_opts.add_memory_to_register_promotion pass ;
ignore @@ PassManager.run_module m pass ;
Llvm.fold_left_functions (fun l x -> x :: l) [] m
let get_invariants_of_cpoint invariants cpoint =
try List.find (fun b -> b.Invariants.control_point = cpoint) invariants
with Not_found ->
failwith
(Printf.sprintf "Couldn't find invariants for the block %s."
(value_name @@ value_of_block cpoint))
(** Transform a bitcode straight to an smt formula. *)
let llvm2smt llfun =
print "Transforming %s into an smt formula.\n%!" (Llvm.value_name llfun) ;
let open Llvm2Smt in
let module L = Llvmcfg in
(* Get the graph *)
let llb2node, llg = L.of_llfunction llfun in
(* Get pagai's control points *)
let cpoints = Invariants.(get_pagai_control_points @@ get_invariant_metadatas llfun) in
print "%i control points:" @@ List.length cpoints ;
List.iter (fun b -> print " %s" @@ string_of_llvalue @@ value_of_block b) cpoints ;
print "\n%!" ;
(* Get pagai's invariants. *)
let invariants = Invariants.from_llfun llfun in
(* Filter out the invariants to get only the control_points. *)
let cp_invariants = List.map (get_invariants_of_cpoint invariants) cpoints in
(* Break down the graph. *)
let llg' = L.break_by_list llg @@ L.basicblocks_to_vertices llg cpoints in
if !Config.debug then begin
let file = open_out (value_name llfun ^ ".term.dot") in
L.Dot.output_graph file llg' ;
close_out file ;
end ;
Transform the CFG to SMT .
let smtg = llvm2smt llfun cpoints llg' in
if !Config.debug then begin
let file = open_out (value_name llfun ^ ".term.smt.dot") in
SMTg.Dot.output_graph file smtg ;
close_out file ;
end ;
let smt_cfg = SMTg.to_smt smtg in
print "CFG Formula:\n%s\n%!" ZZ.T.(to_string smt_cfg) ;
let encode_block inv =
ZZ.T.imply
(get_block false inv.Invariants.control_point)
(Invariants.to_smt (get_var ~primed:false) inv)
in
let smt_inv = ZZ.T.and_ @@ List.map encode_block invariants in
print "Invariants Formula:\n%s\n%!" ZZ.T.(to_string smt_inv) ;
(* The end. *)
let smt = ZZ.T.and_ [smt_cfg ; smt_inv] in
smt, cp_invariants
let get_unique_invariant ~llf = function
| [] -> failwith @@ Printf.sprintf "No invariants for function %s." @@ value_name llf
| [ cp ] -> cp
| _ -> failwith @@
Printf.sprintf "Algo 1, 2 and 3 only accept function with one control points. %s has multiple control points" @@ value_name llf
(** Do I really need to tell you what it's doing ? :) *)
let compute_ranking_function ~llf algo block_dict dictionnary invariants tau =
print "Computing ranking functions with the algorithm: %s \n%!" (algo_to_string algo) ;
match algo with
| Algo1 ->
let inv = get_unique_invariant ~llf invariants in
let res =
Algo1.algo1 ~verbose:!Config.debug block_dict dictionnary inv tau
in
let l, c = res.result in
let vars = Array.map (dictionnary false) inv.variables in
{res with result = [ (l, c, vars, true) ]}
| Monodimensional ->
let inv = get_unique_invariant ~llf invariants in
let res =
Monodimensional.monodimensional
~verbose:!Config.debug block_dict dictionnary inv tau
in
let l, c, b = res.result in
let vars = Array.map (dictionnary false) inv.variables in
{res with result = [ (l, c, vars, b) ]}
| Multidimensional ->
let inv = get_unique_invariant ~llf invariants in
let res =
Multidimensional.multidimensional
~verbose:!Config.debug block_dict dictionnary inv tau in
let vars = Array.map (dictionnary false) inv.variables in
let result = List.map (fun (l,c,b) -> (l,c, vars, b)) res.result
in
{res with result }
| MonodimMultiPc ->
let res = MonodimMultiPc.monodimensional ~verbose:!Config.debug block_dict dictionnary invariants tau in
let l, c, b = res.result in
let vars, _, _ = Invariants.group_to_matrix invariants in
{res with result = [ (l, c, Array.map (dictionnary false) vars, b) ]}
let print_result fmt res =
let pp_strict fmt b =
Format.pp_print_string fmt (if b then "strict" else "not strict")
in
let pp fmt (l,c, vars, strict) =
Format.fprintf fmt "l = %[email protected] ranking function is %a"
pp_ranking_fun (c, l, vars)
pp_strict strict
in
pp_result (Format.pp_print_list pp) fmt res
let do_analysis pagai pagai_o clang clang_o algo file =
let time = Unix.gettimeofday () in
let nb_fun = ref 0 in
let results =
file
|> read_bitcode pagai pagai_o clang clang_o
|> BatList.filter_map (fun llfun ->
if Array.length (Llvm.basic_blocks llfun) = 0
then None (* If the function is empty, we just skip it. *)
else begin
let tau, invariants = llvm2smt llfun in
if List.length invariants = 0 then None
else begin
incr nb_fun ;
let res =
compute_ranking_function
llfun algo
(fun primed -> Llvm2Smt.get_block ~primed)
(fun primed -> Llvm2Smt.get_var ~primed)
invariants tau
in
Some (llfun, res)
end
end)
in
let all_strict =
List.fold_left
(fun b (_,{result}) -> List.fold_left (fun x (_,_,_,b) -> x && b) b result)
true results
in
let new_time = Unix.gettimeofday () in
if not !Config.quiet then begin
Format.pp_print_list
(fun fmt (llfun, res) ->
Format.fprintf fmt "@.--- %s ----@." (Llvm.value_name llfun) ;
print_result fmt res
) Format.std_formatter
results ;
Format.print_newline () ;
Format.printf "%i functions were analyzed.@." !nb_fun ;
Format.printf "This analysis took %f seconds.@." (new_time -. time) ;
end
else begin
if all_strict then print_string "YES"
else print_string "NO" ;
Format.printf " %f@." (new_time -. time) ;
end ;
all_strict
open Cmdliner
let debug_t =
let doc = "Print extra debugging information." in
Arg.(value & flag & info ["d";"debug"] ~doc)
let quiet_t =
let doc = "Print a compressed answer." in
Arg.(value & flag & info ["q";"quiet"] ~doc)
let algo_t =
let algos = [ "1", Algo1 ; "2", Monodimensional ;
"3", Multidimensional ; "4", MonodimMultiPc ] in
let doc =
"Which algorithm. $(docv) must be one of \
1 (Algo1), 2 (mono), 3 (multi) or 4 (multipc)." in
Arg.(value & opt (enum algos) MonodimMultiPc & info ["algo"] ~docv:"ALGO" ~doc)
let file_t =
let doc =
"File processed by termite. \
If it's a .c file, clang and pagai will be called on it to produce a llvm bitcode. \
Otherwise, if it's a .bc file, it is assumed to have been already preprocessed by pagai."
in
let c_or_bc_file =
let pa, pp = Arg.non_dir_file in
let pa s = match pa s with
| `Ok s when Filename.check_suffix s ".c" -> `Ok (C_file s)
| `Ok s when Filename.check_suffix s ".bc" -> `Ok (BC_file s)
| `Ok s -> `Error (Arg.doc_quote s ^" is neither a .c file nor a .bc file")
| `Error x -> `Error x
in
let pp fmt (C_file s | BC_file s) = pp fmt s in
(pa, pp)
in
Arg.(required & pos 0 (some c_or_bc_file) None & info [] ~doc ~docv:"FILE")
let pagai_t =
let doc = "Path to the pagai executable." in
Arg.(value & opt string "pagai" & info ["pagai"] ~doc)
let pagai_opt_t =
let doc = "Pagai options." in
Arg.(value & opt string "" & info ["pagai-opt"] ~doc)
let clang_t =
let doc = "Path to the clang executable." in
Arg.(value & opt string "clang" & info ["clang"] ~doc)
let clang_opt_t =
let doc = "Clang options." in
Arg.(value & opt string "" & info ["clang-opt"] ~doc)
let ret_error f = Printf.ksprintf (fun s -> `Error (false, s)) f
let termite_t debug quiet pagai pagai_o clang clang_o algo file =
Config.debug := debug ;
Config.quiet := quiet ;
try `Ok (do_analysis pagai pagai_o clang clang_o algo file)
with
| Sys_error s ->
ret_error "System error: %s\n%!" s
| Llvm2smt.Not_implemented llv ->
ret_error "%s\n%!" @@ Llvm2smt.sprint_exn llv
| External_error (cmd, log, code) ->
ret_error "\"%s\" failed with error %i. See %s for details." cmd code log
| Llvm2smt.Variable_not_found x as exn ->
Printf.eprintf "%s\n%!" @@ Llvm2smt.sprint_exn_var x ; raise exn
| Llvm2smt.Block_not_found x as exn ->
Printf.eprintf "%s\n%!" @@ Llvm2smt.sprint_exn_block x ; raise exn
let termite_info =
let doc = "A termination analyser." in
Term.info ~doc ~version:Config.version "termite"
let () =
let open Term in
let t = pure termite_t $ debug_t $ quiet_t
$ pagai_t $ pagai_opt_t $ clang_t $ clang_opt_t
$ algo_t $ file_t
in
exit @@ eval (ret t,termite_info)
| null | https://raw.githubusercontent.com/termite-analyser/termite/285d92215a28fcce55614f6d04b44886f253a894/src/main.ml | ocaml | Parse the bitcode.
Apply the mem2reg pass, just in case.
* Transform a bitcode straight to an smt formula.
Get the graph
Get pagai's control points
Get pagai's invariants.
Filter out the invariants to get only the control_points.
Break down the graph.
The end.
* Do I really need to tell you what it's doing ? :)
If the function is empty, we just skip it. | open Debug
open Llvm
open Smt
module SMTg = Smtgraph.Make (Smt.ZZ)
module Llvm2Smt = Llvm2smt.Init (Smt.ZZ) (SMTg)
type algos =
| Algo1
| Monodimensional
| Multidimensional
| MonodimMultiPc
let algo_to_string = function
| Algo1 -> "Algo1"
| Monodimensional -> "Monodimensional"
| Multidimensional -> "Multidimensional"
| MonodimMultiPc -> "Monodimensional, Multiple control points"
type file =
| C_file of string
| BC_file of string
let print s =
(if !Config.debug then Printf.fprintf else Printf.ifprintf) stdout s
exception External_error of string * string * int
* Read the bitcode , output a list of functions .
let read_bitcode pagai pagai_o clang clang_o file =
let file = match file with
| BC_file s -> s
| C_file c_file ->
print "Compiling C file %s to llvm bytecode.\n%!" c_file ;
let make_file base suffix = let open Filename in
concat (get_temp_dir_name ()) ((basename @@ chop_extension base) ^ suffix)
in
let clang_file = make_file c_file ".bc" in
let clang_log = make_file c_file ".clang.log" in
let pagai_file = make_file c_file ".pagai.bc" in
let pagai_log = make_file c_file ".pagai.log" in
let clang_command =
Printf.sprintf "%s %s -c -emit-llvm -o %s %s &> %s"
clang clang_o clang_file c_file clang_log
in
let clang_ret = Sys.command clang_command in
if clang_ret <> 0 then raise @@ External_error(clang_command, clang_log, clang_ret) ;
let pagai_command =
Printf.sprintf "%s %s -b %s -i %s &> %s"
pagai pagai_o pagai_file clang_file pagai_log
in
let pagai_ret = Sys.command pagai_command in
if pagai_ret <> 0 then raise @@ External_error(pagai_command, pagai_log, pagai_ret) ;
pagai_file
in
print "Reading %s\n%!" file ;
let ctx = Llvm.create_context () in
let mem = Llvm.MemoryBuffer.of_file file in
let m = Llvm_bitreader.parse_bitcode ctx mem in
Llvm.MemoryBuffer.dispose mem ;
let pass = PassManager.create () in
Llvm_scalar_opts.add_memory_to_register_promotion pass ;
ignore @@ PassManager.run_module m pass ;
Llvm.fold_left_functions (fun l x -> x :: l) [] m
let get_invariants_of_cpoint invariants cpoint =
try List.find (fun b -> b.Invariants.control_point = cpoint) invariants
with Not_found ->
failwith
(Printf.sprintf "Couldn't find invariants for the block %s."
(value_name @@ value_of_block cpoint))
let llvm2smt llfun =
print "Transforming %s into an smt formula.\n%!" (Llvm.value_name llfun) ;
let open Llvm2Smt in
let module L = Llvmcfg in
let llb2node, llg = L.of_llfunction llfun in
let cpoints = Invariants.(get_pagai_control_points @@ get_invariant_metadatas llfun) in
print "%i control points:" @@ List.length cpoints ;
List.iter (fun b -> print " %s" @@ string_of_llvalue @@ value_of_block b) cpoints ;
print "\n%!" ;
let invariants = Invariants.from_llfun llfun in
let cp_invariants = List.map (get_invariants_of_cpoint invariants) cpoints in
let llg' = L.break_by_list llg @@ L.basicblocks_to_vertices llg cpoints in
if !Config.debug then begin
let file = open_out (value_name llfun ^ ".term.dot") in
L.Dot.output_graph file llg' ;
close_out file ;
end ;
Transform the CFG to SMT .
let smtg = llvm2smt llfun cpoints llg' in
if !Config.debug then begin
let file = open_out (value_name llfun ^ ".term.smt.dot") in
SMTg.Dot.output_graph file smtg ;
close_out file ;
end ;
let smt_cfg = SMTg.to_smt smtg in
print "CFG Formula:\n%s\n%!" ZZ.T.(to_string smt_cfg) ;
let encode_block inv =
ZZ.T.imply
(get_block false inv.Invariants.control_point)
(Invariants.to_smt (get_var ~primed:false) inv)
in
let smt_inv = ZZ.T.and_ @@ List.map encode_block invariants in
print "Invariants Formula:\n%s\n%!" ZZ.T.(to_string smt_inv) ;
let smt = ZZ.T.and_ [smt_cfg ; smt_inv] in
smt, cp_invariants
let get_unique_invariant ~llf = function
| [] -> failwith @@ Printf.sprintf "No invariants for function %s." @@ value_name llf
| [ cp ] -> cp
| _ -> failwith @@
Printf.sprintf "Algo 1, 2 and 3 only accept function with one control points. %s has multiple control points" @@ value_name llf
let compute_ranking_function ~llf algo block_dict dictionnary invariants tau =
print "Computing ranking functions with the algorithm: %s \n%!" (algo_to_string algo) ;
match algo with
| Algo1 ->
let inv = get_unique_invariant ~llf invariants in
let res =
Algo1.algo1 ~verbose:!Config.debug block_dict dictionnary inv tau
in
let l, c = res.result in
let vars = Array.map (dictionnary false) inv.variables in
{res with result = [ (l, c, vars, true) ]}
| Monodimensional ->
let inv = get_unique_invariant ~llf invariants in
let res =
Monodimensional.monodimensional
~verbose:!Config.debug block_dict dictionnary inv tau
in
let l, c, b = res.result in
let vars = Array.map (dictionnary false) inv.variables in
{res with result = [ (l, c, vars, b) ]}
| Multidimensional ->
let inv = get_unique_invariant ~llf invariants in
let res =
Multidimensional.multidimensional
~verbose:!Config.debug block_dict dictionnary inv tau in
let vars = Array.map (dictionnary false) inv.variables in
let result = List.map (fun (l,c,b) -> (l,c, vars, b)) res.result
in
{res with result }
| MonodimMultiPc ->
let res = MonodimMultiPc.monodimensional ~verbose:!Config.debug block_dict dictionnary invariants tau in
let l, c, b = res.result in
let vars, _, _ = Invariants.group_to_matrix invariants in
{res with result = [ (l, c, Array.map (dictionnary false) vars, b) ]}
let print_result fmt res =
let pp_strict fmt b =
Format.pp_print_string fmt (if b then "strict" else "not strict")
in
let pp fmt (l,c, vars, strict) =
Format.fprintf fmt "l = %[email protected] ranking function is %a"
pp_ranking_fun (c, l, vars)
pp_strict strict
in
pp_result (Format.pp_print_list pp) fmt res
let do_analysis pagai pagai_o clang clang_o algo file =
let time = Unix.gettimeofday () in
let nb_fun = ref 0 in
let results =
file
|> read_bitcode pagai pagai_o clang clang_o
|> BatList.filter_map (fun llfun ->
if Array.length (Llvm.basic_blocks llfun) = 0
else begin
let tau, invariants = llvm2smt llfun in
if List.length invariants = 0 then None
else begin
incr nb_fun ;
let res =
compute_ranking_function
llfun algo
(fun primed -> Llvm2Smt.get_block ~primed)
(fun primed -> Llvm2Smt.get_var ~primed)
invariants tau
in
Some (llfun, res)
end
end)
in
let all_strict =
List.fold_left
(fun b (_,{result}) -> List.fold_left (fun x (_,_,_,b) -> x && b) b result)
true results
in
let new_time = Unix.gettimeofday () in
if not !Config.quiet then begin
Format.pp_print_list
(fun fmt (llfun, res) ->
Format.fprintf fmt "@.--- %s ----@." (Llvm.value_name llfun) ;
print_result fmt res
) Format.std_formatter
results ;
Format.print_newline () ;
Format.printf "%i functions were analyzed.@." !nb_fun ;
Format.printf "This analysis took %f seconds.@." (new_time -. time) ;
end
else begin
if all_strict then print_string "YES"
else print_string "NO" ;
Format.printf " %f@." (new_time -. time) ;
end ;
all_strict
open Cmdliner
let debug_t =
let doc = "Print extra debugging information." in
Arg.(value & flag & info ["d";"debug"] ~doc)
let quiet_t =
let doc = "Print a compressed answer." in
Arg.(value & flag & info ["q";"quiet"] ~doc)
let algo_t =
let algos = [ "1", Algo1 ; "2", Monodimensional ;
"3", Multidimensional ; "4", MonodimMultiPc ] in
let doc =
"Which algorithm. $(docv) must be one of \
1 (Algo1), 2 (mono), 3 (multi) or 4 (multipc)." in
Arg.(value & opt (enum algos) MonodimMultiPc & info ["algo"] ~docv:"ALGO" ~doc)
let file_t =
let doc =
"File processed by termite. \
If it's a .c file, clang and pagai will be called on it to produce a llvm bitcode. \
Otherwise, if it's a .bc file, it is assumed to have been already preprocessed by pagai."
in
let c_or_bc_file =
let pa, pp = Arg.non_dir_file in
let pa s = match pa s with
| `Ok s when Filename.check_suffix s ".c" -> `Ok (C_file s)
| `Ok s when Filename.check_suffix s ".bc" -> `Ok (BC_file s)
| `Ok s -> `Error (Arg.doc_quote s ^" is neither a .c file nor a .bc file")
| `Error x -> `Error x
in
let pp fmt (C_file s | BC_file s) = pp fmt s in
(pa, pp)
in
Arg.(required & pos 0 (some c_or_bc_file) None & info [] ~doc ~docv:"FILE")
let pagai_t =
let doc = "Path to the pagai executable." in
Arg.(value & opt string "pagai" & info ["pagai"] ~doc)
let pagai_opt_t =
let doc = "Pagai options." in
Arg.(value & opt string "" & info ["pagai-opt"] ~doc)
let clang_t =
let doc = "Path to the clang executable." in
Arg.(value & opt string "clang" & info ["clang"] ~doc)
let clang_opt_t =
let doc = "Clang options." in
Arg.(value & opt string "" & info ["clang-opt"] ~doc)
let ret_error f = Printf.ksprintf (fun s -> `Error (false, s)) f
let termite_t debug quiet pagai pagai_o clang clang_o algo file =
Config.debug := debug ;
Config.quiet := quiet ;
try `Ok (do_analysis pagai pagai_o clang clang_o algo file)
with
| Sys_error s ->
ret_error "System error: %s\n%!" s
| Llvm2smt.Not_implemented llv ->
ret_error "%s\n%!" @@ Llvm2smt.sprint_exn llv
| External_error (cmd, log, code) ->
ret_error "\"%s\" failed with error %i. See %s for details." cmd code log
| Llvm2smt.Variable_not_found x as exn ->
Printf.eprintf "%s\n%!" @@ Llvm2smt.sprint_exn_var x ; raise exn
| Llvm2smt.Block_not_found x as exn ->
Printf.eprintf "%s\n%!" @@ Llvm2smt.sprint_exn_block x ; raise exn
let termite_info =
let doc = "A termination analyser." in
Term.info ~doc ~version:Config.version "termite"
let () =
let open Term in
let t = pure termite_t $ debug_t $ quiet_t
$ pagai_t $ pagai_opt_t $ clang_t $ clang_opt_t
$ algo_t $ file_t
in
exit @@ eval (ret t,termite_info)
|
f0d2a7d6923dbc03b0e36a814e9c12c18c3d60712ffd83075e24e366ab40420b | namenu/advent-of-code | day10.clj | --- Day 10 : Monitoring Station ---
(ns aoc.year2019.day10
(:require [aoc.util :refer [input cart->polar]]
[aoc.grid :refer [parse-grid]]
[aoc.year2019.intcode :refer :all]))
pt.1
(defn already-hit? [[dx dy] hits]
(some (fn [[hx hy]]
(and (= (neg? dx) (neg? hx))
(= (neg? dy) (neg? hy))
(= (* dx hy) (* dy hx)))) hits))
(defn hits [asteroids [cx cy]]
(loop [targets asteroids
hits []]
(if-let [[tx ty] (first targets)]
(let [[dx dy] [(- tx cx) (- ty cy)]]
(if (already-hit? [dx dy] hits)
(recur (next targets) hits)
(recur (next targets) (conj hits [dx dy]))))
hits)))
(defn best-monitoring [asteroids]
(->> asteroids
(map #(vector % (count (hits asteroids %))))
(apply max-key second)))
(defn ->ray [[cx cy] [x y]]
(let [[dx dy] [(- x cx) (- y cy)]
[r theta] (cart->polar [dx (- dy)])
theta (- (/ Math/PI 2) theta)]
{:xy [x y]
:angle (if (neg? theta)
(+ theta Math/PI Math/PI) theta)
:length r}))
(let [in ".#..#\n.....\n#####\n....#\n...##"
in "......#.#.\n#..#.#....\n..#######.\n.#.#.###..\n.#..#.....\n..#....#.#\n#..#....#.\n.##.#..###\n##...#..#.\n.#....####"
in "#.#...#.#.\n.###....#.\n.#....#...\n##.#.#.#.#\n....#.#.#.\n.##..###.#\n..#...##..\n..##....##\n......#...\n.####.###.\n"
in ".#..#..###\n####.###.#\n....###.#.\n..###.##.#\n##.##.#.#.\n....###..#\n..#.#..#.#\n#..#.#.###\n.##...##.#\n.....#.#.."
in ".#..##.###...#######\n##.############..##.\n.#.######.########.#\n.###.#######.####.#.\n#####.##.#.##.###.##\n..#####..#.#########\n####################\n#.####....###.#.#.##\n##.#################\n#####.##.###..####..\n..######..##.#######\n####.##.####...##..#\n.#####..#.######.###\n##...#.##########...\n#.##########.#######\n.####.#.###.###.#.##\n....##.##.###..#####\n.#.#.###########.###\n#.#.#.#####.####.###\n###.##.####.##.#..##"
in (input 2019 10)
grid (parse-grid in)
asteroids (->> (parse-grid in)
(filter #(= (second %) \#))
(map first)
(into #{}))
[station nseen] (best-monitoring asteroids)]
pt.1
(prn nseen)
pt.2
(let [sorted (->> (disj asteroids station)
(map (partial ->ray station))
(group-by :angle)
(sort)
(map (comp #(sort-by :length %) second)))
rounds (->> sorted
(iterate #(remove nil? (map next %)))
(take-while not-empty))
removals (->> (mapcat #(map first %) rounds)
(map :xy))]
(nth removals 199)))
| null | https://raw.githubusercontent.com/namenu/advent-of-code/83f8cf05931f814dab76696bf46fec1bb1276fac/2019/clojure/src/aoc/year2019/day10.clj | clojure | --- Day 10 : Monitoring Station ---
(ns aoc.year2019.day10
(:require [aoc.util :refer [input cart->polar]]
[aoc.grid :refer [parse-grid]]
[aoc.year2019.intcode :refer :all]))
pt.1
(defn already-hit? [[dx dy] hits]
(some (fn [[hx hy]]
(and (= (neg? dx) (neg? hx))
(= (neg? dy) (neg? hy))
(= (* dx hy) (* dy hx)))) hits))
(defn hits [asteroids [cx cy]]
(loop [targets asteroids
hits []]
(if-let [[tx ty] (first targets)]
(let [[dx dy] [(- tx cx) (- ty cy)]]
(if (already-hit? [dx dy] hits)
(recur (next targets) hits)
(recur (next targets) (conj hits [dx dy]))))
hits)))
(defn best-monitoring [asteroids]
(->> asteroids
(map #(vector % (count (hits asteroids %))))
(apply max-key second)))
(defn ->ray [[cx cy] [x y]]
(let [[dx dy] [(- x cx) (- y cy)]
[r theta] (cart->polar [dx (- dy)])
theta (- (/ Math/PI 2) theta)]
{:xy [x y]
:angle (if (neg? theta)
(+ theta Math/PI Math/PI) theta)
:length r}))
(let [in ".#..#\n.....\n#####\n....#\n...##"
in "......#.#.\n#..#.#....\n..#######.\n.#.#.###..\n.#..#.....\n..#....#.#\n#..#....#.\n.##.#..###\n##...#..#.\n.#....####"
in "#.#...#.#.\n.###....#.\n.#....#...\n##.#.#.#.#\n....#.#.#.\n.##..###.#\n..#...##..\n..##....##\n......#...\n.####.###.\n"
in ".#..#..###\n####.###.#\n....###.#.\n..###.##.#\n##.##.#.#.\n....###..#\n..#.#..#.#\n#..#.#.###\n.##...##.#\n.....#.#.."
in ".#..##.###...#######\n##.############..##.\n.#.######.########.#\n.###.#######.####.#.\n#####.##.#.##.###.##\n..#####..#.#########\n####################\n#.####....###.#.#.##\n##.#################\n#####.##.###..####..\n..######..##.#######\n####.##.####...##..#\n.#####..#.######.###\n##...#.##########...\n#.##########.#######\n.####.#.###.###.#.##\n....##.##.###..#####\n.#.#.###########.###\n#.#.#.#####.####.###\n###.##.####.##.#..##"
in (input 2019 10)
grid (parse-grid in)
asteroids (->> (parse-grid in)
(filter #(= (second %) \#))
(map first)
(into #{}))
[station nseen] (best-monitoring asteroids)]
pt.1
(prn nseen)
pt.2
(let [sorted (->> (disj asteroids station)
(map (partial ->ray station))
(group-by :angle)
(sort)
(map (comp #(sort-by :length %) second)))
rounds (->> sorted
(iterate #(remove nil? (map next %)))
(take-while not-empty))
removals (->> (mapcat #(map first %) rounds)
(map :xy))]
(nth removals 199)))
|
|
efe815b1ab38be31966d90684bae7a83d585b1ddec41a5f70eb15d053fd2bd18 | goldfirere/singletons | T89.hs | {-# LANGUAGE OverloadedStrings #-}
module T89 where
import Data.Singletons.Base.TH
$(singletons [d|data Foo = Foo deriving (Enum)|])
| null | https://raw.githubusercontent.com/goldfirere/singletons/e89070a8916d067342c027ef35fb4b2a5039b448/singletons-base/tests/compile-and-dump/Singletons/T89.hs | haskell | # LANGUAGE OverloadedStrings # | module T89 where
import Data.Singletons.Base.TH
$(singletons [d|data Foo = Foo deriving (Enum)|])
|
df74110d660a602fab4babe8c2d38e317ef2cbbc22e264f50b117c9250220f1e | f-me/carma-public | Tech.hs | module Carma.Model.Service.Tech where
import Data.Text
import Data.Typeable
import Data.Scientific
import Data.Aeson((.=), object)
import Data.Model
import Data.Model.View
import Carma.Model.LegacyTypes (Checkbox)
import Carma.Model.Service (Service)
import Carma.Model.TechType (TechType)
import qualified Carma.Model.TechType as TechType
data Tech = Tech
{ ident :: PK Int Tech ""
, techType :: F (Maybe (IdentI TechType)) "techType" "Услуга"
, orderNumber :: F (Maybe Text) "orderNumber" "Номер заказ-наряда"
, isCountryRide :: F Bool "isCountryRide" "За городом"
-- Naming scheme convention: complNpM, where N is the id of a
-- TechType dictionary entry. Actual visibility rules are set via
metas on these field using TechType idents ( see below ) .
, compl27p1 :: F (Maybe Checkbox) "compl27p1" "compl27p1"
, compl27p2 :: F (Maybe Checkbox) "compl27p2" "compl27p2"
, compl27p3 :: F (Maybe Checkbox) "compl27p3" "compl27p3"
, compl27p4 :: F (Maybe Checkbox) "compl27p4" "compl27p4"
, compl27p5 :: F (Maybe Checkbox) "compl27p5" "compl27p5"
, compl29p1 :: F (Maybe Checkbox) "compl29p1" "compl29p1"
, compl29p2 :: F (Maybe Checkbox) "compl29p2" "compl29p2"
, compl29p3 :: F (Maybe Checkbox) "compl29p3" "compl29p3"
, compl29p4 :: F (Maybe Checkbox) "compl29p4" "compl29p4"
, compl29p5 :: F (Maybe Checkbox) "compl29p5" "compl29p5"
, compl28p1 :: F (Maybe Checkbox) "compl28p1" "compl28p1"
, compl28p2 :: F (Maybe Checkbox) "compl28p2" "compl28p2"
, compl28p3 :: F (Maybe Checkbox) "compl28p3" "compl28p3"
, compl28p4 :: F (Maybe Checkbox) "compl28p4" "compl28p4"
, compl28p5 :: F (Maybe Checkbox) "compl28p5" "compl28p5"
, compl32p1 :: F (Maybe Checkbox) "compl32p1" "compl32p1"
, compl32p2 :: F (Maybe Checkbox) "compl32p2" "compl32p2"
, compl32p3 :: F (Maybe Checkbox) "compl32p3" "compl32p3"
, compl32p4 :: F (Maybe Checkbox) "compl32p4" "compl32p4"
, compl32p5 :: F (Maybe Checkbox) "compl32p5" "compl32p5"
, compl33p1 :: F (Maybe Checkbox) "compl33p1" "compl33p1"
, compl33p2 :: F (Maybe Checkbox) "compl33p2" "compl33p2"
, compl33p3 :: F (Maybe Checkbox) "compl33p3" "compl33p3"
, compl33p4 :: F (Maybe Checkbox) "compl33p4" "compl33p4"
, compl33p5 :: F (Maybe Checkbox) "compl33p5" "compl33p5"
, compl31p1 :: F (Maybe Checkbox) "compl31p1" "compl31p1"
, compl31p2 :: F (Maybe Checkbox) "compl31p2" "compl31p2"
, compl31p3 :: F (Maybe Checkbox) "compl31p3" "compl31p3"
, compl31p4 :: F (Maybe Checkbox) "compl31p4" "compl31p4"
, compl31p5 :: F (Maybe Checkbox) "compl31p5" "compl31p5"
, compl35p1 :: F (Maybe Checkbox) "compl35p1" "compl35p1"
, compl35p2 :: F (Maybe Checkbox) "compl35p2" "compl35p2"
, compl35p3 :: F (Maybe Checkbox) "compl35p3" "compl35p3"
, compl35p4 :: F (Maybe Checkbox) "compl35p4" "compl35p4"
, compl35p5 :: F (Maybe Checkbox) "compl35p5" "compl35p5"
, compl34p1 :: F (Maybe Checkbox) "compl34p1" "compl34p1"
, compl34p2 :: F (Maybe Checkbox) "compl34p2" "compl34p2"
, compl34p3 :: F (Maybe Checkbox) "compl34p3" "compl34p3"
, compl34p4 :: F (Maybe Checkbox) "compl34p4" "compl34p4"
, compl34p5 :: F (Maybe Checkbox) "compl34p5" "compl34p5"
, compl37p1 :: F (Maybe Checkbox) "compl37p1" "compl37p1"
, compl37p2 :: F (Maybe Checkbox) "compl37p2" "compl37p2"
, compl37p3 :: F (Maybe Checkbox) "compl37p3" "compl37p3"
, compl37p4 :: F (Maybe Checkbox) "compl37p4" "compl37p4"
, compl37p5 :: F (Maybe Checkbox) "compl37p5" "compl37p5"
, compl36p1 :: F (Maybe Checkbox) "compl36p1" "compl36p1"
, compl36p2 :: F (Maybe Checkbox) "compl36p2" "compl36p2"
, compl36p3 :: F (Maybe Checkbox) "compl36p3" "compl36p3"
, compl36p4 :: F (Maybe Checkbox) "compl36p4" "compl36p4"
, compl36p5 :: F (Maybe Checkbox) "compl36p5" "compl36p5"
, compl41p1 :: F (Maybe Checkbox) "compl41p1" "compl41p1"
, compl41p2 :: F (Maybe Checkbox) "compl41p2" "compl41p2"
, compl41p3 :: F (Maybe Checkbox) "compl41p3" "compl41p3"
, compl41p4 :: F (Maybe Checkbox) "compl41p4" "compl41p4"
, compl41p5 :: F (Maybe Checkbox) "compl41p5" "compl41p5"
, suburbanMilage :: F (Maybe Scientific) "suburbanMilage" "Пробег за городом"
, totalMilage :: F (Maybe Scientific) "totalMilage" "Километраж по тахометру"
, partnerWarnedInTime :: F (Maybe Bool) "partnerWarnedInTime" "Партнёр предупредил вовремя"
}
deriving Typeable
instance Model Tech where
type TableName Tech = "techtbl"
type Parent Tech = Service
parentInfo = ExParent modelInfo
modelInfo = mkModelInfo Tech ident
modelView v = case parentView v :: Maybe (ModelView Tech) of
Nothing -> Nothing
Just mv ->
Just $ modifyView (mv {mv_title = "Техпомощь"})
[ setMeta "filterBy" "isActive" techType
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) suburbanMilage
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) totalMilage
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) partnerWarnedInTime
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p5
, widget "partnerWarnedInTime-btn" partnerWarnedInTime
]
| null | https://raw.githubusercontent.com/f-me/carma-public/82a9f44f7d919e54daa4114aa08dfec58b01009b/carma-models/src/Carma/Model/Service/Tech.hs | haskell | Naming scheme convention: complNpM, where N is the id of a
TechType dictionary entry. Actual visibility rules are set via | module Carma.Model.Service.Tech where
import Data.Text
import Data.Typeable
import Data.Scientific
import Data.Aeson((.=), object)
import Data.Model
import Data.Model.View
import Carma.Model.LegacyTypes (Checkbox)
import Carma.Model.Service (Service)
import Carma.Model.TechType (TechType)
import qualified Carma.Model.TechType as TechType
data Tech = Tech
{ ident :: PK Int Tech ""
, techType :: F (Maybe (IdentI TechType)) "techType" "Услуга"
, orderNumber :: F (Maybe Text) "orderNumber" "Номер заказ-наряда"
, isCountryRide :: F Bool "isCountryRide" "За городом"
metas on these field using TechType idents ( see below ) .
, compl27p1 :: F (Maybe Checkbox) "compl27p1" "compl27p1"
, compl27p2 :: F (Maybe Checkbox) "compl27p2" "compl27p2"
, compl27p3 :: F (Maybe Checkbox) "compl27p3" "compl27p3"
, compl27p4 :: F (Maybe Checkbox) "compl27p4" "compl27p4"
, compl27p5 :: F (Maybe Checkbox) "compl27p5" "compl27p5"
, compl29p1 :: F (Maybe Checkbox) "compl29p1" "compl29p1"
, compl29p2 :: F (Maybe Checkbox) "compl29p2" "compl29p2"
, compl29p3 :: F (Maybe Checkbox) "compl29p3" "compl29p3"
, compl29p4 :: F (Maybe Checkbox) "compl29p4" "compl29p4"
, compl29p5 :: F (Maybe Checkbox) "compl29p5" "compl29p5"
, compl28p1 :: F (Maybe Checkbox) "compl28p1" "compl28p1"
, compl28p2 :: F (Maybe Checkbox) "compl28p2" "compl28p2"
, compl28p3 :: F (Maybe Checkbox) "compl28p3" "compl28p3"
, compl28p4 :: F (Maybe Checkbox) "compl28p4" "compl28p4"
, compl28p5 :: F (Maybe Checkbox) "compl28p5" "compl28p5"
, compl32p1 :: F (Maybe Checkbox) "compl32p1" "compl32p1"
, compl32p2 :: F (Maybe Checkbox) "compl32p2" "compl32p2"
, compl32p3 :: F (Maybe Checkbox) "compl32p3" "compl32p3"
, compl32p4 :: F (Maybe Checkbox) "compl32p4" "compl32p4"
, compl32p5 :: F (Maybe Checkbox) "compl32p5" "compl32p5"
, compl33p1 :: F (Maybe Checkbox) "compl33p1" "compl33p1"
, compl33p2 :: F (Maybe Checkbox) "compl33p2" "compl33p2"
, compl33p3 :: F (Maybe Checkbox) "compl33p3" "compl33p3"
, compl33p4 :: F (Maybe Checkbox) "compl33p4" "compl33p4"
, compl33p5 :: F (Maybe Checkbox) "compl33p5" "compl33p5"
, compl31p1 :: F (Maybe Checkbox) "compl31p1" "compl31p1"
, compl31p2 :: F (Maybe Checkbox) "compl31p2" "compl31p2"
, compl31p3 :: F (Maybe Checkbox) "compl31p3" "compl31p3"
, compl31p4 :: F (Maybe Checkbox) "compl31p4" "compl31p4"
, compl31p5 :: F (Maybe Checkbox) "compl31p5" "compl31p5"
, compl35p1 :: F (Maybe Checkbox) "compl35p1" "compl35p1"
, compl35p2 :: F (Maybe Checkbox) "compl35p2" "compl35p2"
, compl35p3 :: F (Maybe Checkbox) "compl35p3" "compl35p3"
, compl35p4 :: F (Maybe Checkbox) "compl35p4" "compl35p4"
, compl35p5 :: F (Maybe Checkbox) "compl35p5" "compl35p5"
, compl34p1 :: F (Maybe Checkbox) "compl34p1" "compl34p1"
, compl34p2 :: F (Maybe Checkbox) "compl34p2" "compl34p2"
, compl34p3 :: F (Maybe Checkbox) "compl34p3" "compl34p3"
, compl34p4 :: F (Maybe Checkbox) "compl34p4" "compl34p4"
, compl34p5 :: F (Maybe Checkbox) "compl34p5" "compl34p5"
, compl37p1 :: F (Maybe Checkbox) "compl37p1" "compl37p1"
, compl37p2 :: F (Maybe Checkbox) "compl37p2" "compl37p2"
, compl37p3 :: F (Maybe Checkbox) "compl37p3" "compl37p3"
, compl37p4 :: F (Maybe Checkbox) "compl37p4" "compl37p4"
, compl37p5 :: F (Maybe Checkbox) "compl37p5" "compl37p5"
, compl36p1 :: F (Maybe Checkbox) "compl36p1" "compl36p1"
, compl36p2 :: F (Maybe Checkbox) "compl36p2" "compl36p2"
, compl36p3 :: F (Maybe Checkbox) "compl36p3" "compl36p3"
, compl36p4 :: F (Maybe Checkbox) "compl36p4" "compl36p4"
, compl36p5 :: F (Maybe Checkbox) "compl36p5" "compl36p5"
, compl41p1 :: F (Maybe Checkbox) "compl41p1" "compl41p1"
, compl41p2 :: F (Maybe Checkbox) "compl41p2" "compl41p2"
, compl41p3 :: F (Maybe Checkbox) "compl41p3" "compl41p3"
, compl41p4 :: F (Maybe Checkbox) "compl41p4" "compl41p4"
, compl41p5 :: F (Maybe Checkbox) "compl41p5" "compl41p5"
, suburbanMilage :: F (Maybe Scientific) "suburbanMilage" "Пробег за городом"
, totalMilage :: F (Maybe Scientific) "totalMilage" "Километраж по тахометру"
, partnerWarnedInTime :: F (Maybe Bool) "partnerWarnedInTime" "Партнёр предупредил вовремя"
}
deriving Typeable
instance Model Tech where
type TableName Tech = "techtbl"
type Parent Tech = Service
parentInfo = ExParent modelInfo
modelInfo = mkModelInfo Tech ident
modelView v = case parentView v :: Maybe (ModelView Tech) of
Nothing -> Nothing
Just mv ->
Just $ modifyView (mv {mv_title = "Техпомощь"})
[ setMeta "filterBy" "isActive" techType
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) suburbanMilage
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) totalMilage
, setMeta "visibleIf" (object ["isCountryRide" .= [True]]) partnerWarnedInTime
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_27]]) compl27p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_29]]) compl29p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_28]]) compl28p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_32]]) compl32p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_33]]) compl33p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_31]]) compl31p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_35]]) compl35p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_34]]) compl34p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_37]]) compl37p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_36]]) compl36p5
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p1
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p2
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p3
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p4
, setMeta "visibleIf" (object ["techType" .= [TechType.customTech_41]]) compl41p5
, widget "partnerWarnedInTime-btn" partnerWarnedInTime
]
|
2958a99f0edca108758ab4d251c83a6336d0372c01e1614890ea0f6c50fbddac | jgrodziski/keycloak-clojure | keycloak.clj | (ns myapp.backend.keycloak
(:require [mount.core :refer [defstate]]
[clojure.java.io :as io])
(:import [org.keycloak.adapters KeycloakDeployment KeycloakDeploymentBuilder]
[org.keycloak.representations AccessToken]
[org.keycloak RSATokenVerifier]))
(defn load-keycloak-deployment
"take the keycloak configuration json file location on the classpath and return a KeycloakDeployment object"
([]
(load-keycloak-deployment "keycloak.json"))
([keycloak-json-file]
(with-open [keycloak-json-is (io/input-stream (io/resource keycloak-json-file))]
(KeycloakDeploymentBuilder/build keycloak-json-is))))
(defstate keycloak-deployment
:start (load-keycloak-deployment))
(defn verify
([token]
(verify keycloak-deployment token))
([deployment token]
TODO put that in config file
public-key (.getPublicKey (.getPublicKeyLocator deployment) kid deployment)]
(RSATokenVerifier/verifyToken token public-key (.getRealmInfoUrl deployment)))))
(defn extract
"return a map with :user and :roles keys with values extracted from the Keycloak access token"
[access-token]
{:user (.getPreferredUsername access-token)
:roles (set (map keyword (.getRoles (.getRealmAccess access-token))))})
| null | https://raw.githubusercontent.com/jgrodziski/keycloak-clojure/b811fe4f3e5b0d5cea7b1ce0aba7825b447b7696/sample/yada-backend/src/myapp/backend/keycloak.clj | clojure | (ns myapp.backend.keycloak
(:require [mount.core :refer [defstate]]
[clojure.java.io :as io])
(:import [org.keycloak.adapters KeycloakDeployment KeycloakDeploymentBuilder]
[org.keycloak.representations AccessToken]
[org.keycloak RSATokenVerifier]))
(defn load-keycloak-deployment
"take the keycloak configuration json file location on the classpath and return a KeycloakDeployment object"
([]
(load-keycloak-deployment "keycloak.json"))
([keycloak-json-file]
(with-open [keycloak-json-is (io/input-stream (io/resource keycloak-json-file))]
(KeycloakDeploymentBuilder/build keycloak-json-is))))
(defstate keycloak-deployment
:start (load-keycloak-deployment))
(defn verify
([token]
(verify keycloak-deployment token))
([deployment token]
TODO put that in config file
public-key (.getPublicKey (.getPublicKeyLocator deployment) kid deployment)]
(RSATokenVerifier/verifyToken token public-key (.getRealmInfoUrl deployment)))))
(defn extract
"return a map with :user and :roles keys with values extracted from the Keycloak access token"
[access-token]
{:user (.getPreferredUsername access-token)
:roles (set (map keyword (.getRoles (.getRealmAccess access-token))))})
|
|
3a38c134338cb64f9c103314ef0e97e89f891c6a22a80197ba2633a4e3c4b01d | mankyKitty/fantastic-waddle | Bootstrap.hs | {-# LANGUAGE OverloadedStrings #-}
module Styling.Bootstrap
( BSStyle (..)
, styleClass
, contained
, bsButton
, bsButton_
, rangeInpConf
, bsRangeInput
, bsNumberInput
) where
import Control.Lens (mapped, over, _1, (%~))
import Data.Function ((&))
import Data.Char (toLower)
import Data.Semigroup ((<>))
import Data.Text (Text, pack)
import Data.Map (Map)
import Reflex (Reflex)
import qualified Reflex as R
import Reflex.Dom.Core (Event, RangeInput, MonadWidget, (=:))
import qualified Reflex.Dom.Core as RD
import Internal (tshow)
data BSStyle
= Primary
| Secondary
| Success
| Danger
| Warning
| Info
| Light
| Dark
| Link
deriving (Eq,Show)
styleClass :: BSStyle -> Text
styleClass = pack . (\(h:t) -> toLower h : t ) . show
bsButton :: MonadWidget t m => BSStyle -> m a -> m (Event t (), a)
bsButton sty child = over (mapped . _1) (RD.domEvent RD.Click)
$ RD.elAttr' "button" ("class" =: ("m-2 btn btn-" <> styleClass sty) <> "type" =: "button") child
bsButton_ :: MonadWidget t m => Text -> BSStyle -> m (Event t ())
bsButton_ l sty = fst <$> bsButton sty (RD.text l)
contained :: MonadWidget t m => m a -> m a
contained = RD.divClass "row" . RD.divClass "container"
rangeInpConf
:: Reflex t
=> Float
-> Text
-> RD.RangeInputConfig t
rangeInpConf i id' = RD.RangeInputConfig i R.never . pure
$ "class" =: "custom-range"
<> "id" =: id'
<> "type" =: "range"
bsRangeInput
:: MonadWidget t m
=> Text
-> Text
-> Float
-> (Map Text Text -> Map Text Text)
-> m (RangeInput t)
bsRangeInput lbl id' i f =
RD.divClass "form-group" $ do
RD.elAttr "label" ("for" =: id') $ RD.text lbl
RD.rangeInput $ rangeInpConf i id' & RD.rangeInputConfig_attributes . mapped %~ f
bsNumberInput
:: ( Show n
, Num n
, MonadWidget t m
)
=> Text
-> Text
-> n
-> m (RD.TextInput t)
bsNumberInput lbl id' i = RD.divClass "form-group" $ do
RD.elAttr "label" ("for" =: id') $ RD.text lbl
RD.textInput (RD.TextInputConfig "number" (tshow i) R.never (pure $ "id" =: id'))
| null | https://raw.githubusercontent.com/mankyKitty/fantastic-waddle/680ca473bf7141c63528195ae23cb799b2fb0eac/frontend/src/Styling/Bootstrap.hs | haskell | # LANGUAGE OverloadedStrings # | module Styling.Bootstrap
( BSStyle (..)
, styleClass
, contained
, bsButton
, bsButton_
, rangeInpConf
, bsRangeInput
, bsNumberInput
) where
import Control.Lens (mapped, over, _1, (%~))
import Data.Function ((&))
import Data.Char (toLower)
import Data.Semigroup ((<>))
import Data.Text (Text, pack)
import Data.Map (Map)
import Reflex (Reflex)
import qualified Reflex as R
import Reflex.Dom.Core (Event, RangeInput, MonadWidget, (=:))
import qualified Reflex.Dom.Core as RD
import Internal (tshow)
data BSStyle
= Primary
| Secondary
| Success
| Danger
| Warning
| Info
| Light
| Dark
| Link
deriving (Eq,Show)
styleClass :: BSStyle -> Text
styleClass = pack . (\(h:t) -> toLower h : t ) . show
bsButton :: MonadWidget t m => BSStyle -> m a -> m (Event t (), a)
bsButton sty child = over (mapped . _1) (RD.domEvent RD.Click)
$ RD.elAttr' "button" ("class" =: ("m-2 btn btn-" <> styleClass sty) <> "type" =: "button") child
bsButton_ :: MonadWidget t m => Text -> BSStyle -> m (Event t ())
bsButton_ l sty = fst <$> bsButton sty (RD.text l)
contained :: MonadWidget t m => m a -> m a
contained = RD.divClass "row" . RD.divClass "container"
rangeInpConf
:: Reflex t
=> Float
-> Text
-> RD.RangeInputConfig t
rangeInpConf i id' = RD.RangeInputConfig i R.never . pure
$ "class" =: "custom-range"
<> "id" =: id'
<> "type" =: "range"
bsRangeInput
:: MonadWidget t m
=> Text
-> Text
-> Float
-> (Map Text Text -> Map Text Text)
-> m (RangeInput t)
bsRangeInput lbl id' i f =
RD.divClass "form-group" $ do
RD.elAttr "label" ("for" =: id') $ RD.text lbl
RD.rangeInput $ rangeInpConf i id' & RD.rangeInputConfig_attributes . mapped %~ f
bsNumberInput
:: ( Show n
, Num n
, MonadWidget t m
)
=> Text
-> Text
-> n
-> m (RD.TextInput t)
bsNumberInput lbl id' i = RD.divClass "form-group" $ do
RD.elAttr "label" ("for" =: id') $ RD.text lbl
RD.textInput (RD.TextInputConfig "number" (tshow i) R.never (pure $ "id" =: id'))
|
8ef00e287b34fbbc18251a877a5933d917c866a0860cf8be3e449bfe8dc0e285 | vincenthz/hs-pem | PEM.hs | -- |
-- Module : Data.PEM
-- License : BSD-style
Maintainer : < >
-- Stability : experimental
-- Portability : portable
--
-- Read and write PEM files
--
module Data.PEM
( module Data.PEM.Types
, module Data.PEM.Writer
, module Data.PEM.Parser
) where
import Data.PEM.Types
import Data.PEM.Writer
import Data.PEM.Parser
| null | https://raw.githubusercontent.com/vincenthz/hs-pem/f83c850b29850d9c6e4307984cd314f7fa8bd2d7/Data/PEM.hs | haskell | |
Module : Data.PEM
License : BSD-style
Stability : experimental
Portability : portable
Read and write PEM files
| Maintainer : < >
module Data.PEM
( module Data.PEM.Types
, module Data.PEM.Writer
, module Data.PEM.Parser
) where
import Data.PEM.Types
import Data.PEM.Writer
import Data.PEM.Parser
|
5dc46eeee9997e495e82ff5eab29d869863353300bd5de8c83d5cbd6e95dd2c4 | racehub/om-bootstrap | bar_basic.cljs | #_
(:require [om-bootstrap.button :as b]
[om-bootstrap.nav :as n]
[om-tools.dom :as d :include-macros true])
(n/navbar
{:brand (d/a {:href "#"}
"Navbar")}
(n/nav
{:collapsible? true}
(n/nav-item {:key 1 :href "#"} "Link")
(n/nav-item {:key 2 :href "#"} "Link")
(b/dropdown {:key 3, :title "Dropdown"}
(b/menu-item {:key 1} "Action")
(b/menu-item {:key 2} "Another action")
(b/menu-item {:key 3} "Something else here")
(b/menu-item {:divider? true})
(b/menu-item {:key 4} "Separated link"))
:right
(n/nav-item {:key 1 :href "#"} "Right")))
| null | https://raw.githubusercontent.com/racehub/om-bootstrap/18fb7f67c306d208bcb012a1b765ac1641d7a00b/dev/snippets/nav/bar_basic.cljs | clojure | #_
(:require [om-bootstrap.button :as b]
[om-bootstrap.nav :as n]
[om-tools.dom :as d :include-macros true])
(n/navbar
{:brand (d/a {:href "#"}
"Navbar")}
(n/nav
{:collapsible? true}
(n/nav-item {:key 1 :href "#"} "Link")
(n/nav-item {:key 2 :href "#"} "Link")
(b/dropdown {:key 3, :title "Dropdown"}
(b/menu-item {:key 1} "Action")
(b/menu-item {:key 2} "Another action")
(b/menu-item {:key 3} "Something else here")
(b/menu-item {:divider? true})
(b/menu-item {:key 4} "Separated link"))
:right
(n/nav-item {:key 1 :href "#"} "Right")))
|
|
0e681f31866ff1074e1e86d7ae16ba18b4a9e6390274044f972744380898867c | SahilKang/cl-rdkafka | consumer.lisp | Copyright ( C ) 2018 - 2020 < >
Copyright 2022 Google LLC
;;;
;;; This file is part of cl-rdkafka.
;;;
;;; cl-rdkafka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; cl-rdkafka is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with cl-rdkafka. If not, see </>.
(in-package #:cl-rdkafka)
(defclass consumer ()
((rd-kafka-consumer
:documentation "Pointer to rd_kafka_t struct.")
(rd-kafka-queue
:documentation "Pointer to rd_kafka_queue_t struct.")
(key-serde
:type deserializer
:documentation "DESERIALIZER to map byte vector to object.")
(value-serde
:type deserializer
:documentation "DESERIALIZER to map byte vector to object."))
(:documentation
"A client that consumes messages from kafka topics.
MAKE-INSTANCE accepts the following keyword args:
* CONF: A required plist, alist, or hash-table mapping config keys
to their respective values; both keys and values should be
strings. The provided key-value pairs are passed as-is to
librdkafka, so consult the librdkafka config docs for more
info.
* SERDE: An optional unary function accepting a byte vector and
returning a deserialized value; defaults to #'identity.
* KEY-SERDE: An optional unary function used to deserialize message
defaults to SERDE .
* VALUE-SERDE: An optional unary function used to deserialize
defaults to SERDE .
Example:
(let ((consumer (make-instance
'kf:consumer
:conf '(\"bootstrap.servers\" \"127.0.0.1:9092\"
\"group.id\" \"consumer-group-id\"
\"enable.auto.commit\" \"false\"
\"auto.offset.reset\" \"earliest\")
:serde #'babel:octets-to-string)))
(kf:subscribe consumer '(\"topic-name\"))
(loop
for message = (kf:poll consumer 2000)
while message
for key = (kf:key message)
for value = (kf:value message)
collect (list key value)
do (kf:commit consumer)))"))
(defgeneric subscribe (consumer topics))
(defgeneric unsubscribe (consumer))
(defgeneric subscription (consumer))
(defgeneric poll (consumer timeout-ms))
(defgeneric seek (consumer topic partition offset timeout-ms))
(defgeneric seek-to-beginning (consumer topic partition timeout-ms))
(defgeneric seek-to-end (consumer topic partition timeout-ms))
(defgeneric commit (consumer &key offsets asyncp))
(defgeneric committed (consumer partitions timeout-ms))
(defgeneric assignment (consumer &key offsetsp))
(defgeneric assign (consumer partitions))
(defgeneric member-id (consumer))
(defgeneric pause (consumer partitions))
(defgeneric resume (consumer partitions))
(defgeneric watermarks (consumer topic partition timeout-ms))
(defgeneric offsets-for-times (consumer timestamps timeout-ms))
(defgeneric positions (consumer partitions))
(defgeneric close (consumer))
(defun get-good-commits-and-assert-no-bad-commits (rd-kafka-event)
(let (goodies baddies)
(foreach-toppar
(cl-rdkafka/ll:rd-kafka-event-topic-partition-list rd-kafka-event)
(topic partition offset metadata metadata-size err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let* ((meta (pointer->bytes metadata metadata-size))
(offset+meta (cons offset meta)))
(push (cons toppar offset+meta) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Commit failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))
(defun process-commit-event (rd-kafka-event queue)
(assert-expected-event rd-kafka-event cl-rdkafka/ll:rd-kafka-event-offset-commit)
(let ((err (cl-rdkafka/ll:rd-kafka-event-error rd-kafka-event))
(promise (lparallel.queue:pop-queue queue)))
(handler-case
(cond
((eq err 'cl-rdkafka/ll:rd-kafka-resp-err--no-offset)
(lparallel:fulfill promise))
((eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(lparallel:fulfill promise
(get-good-commits-and-assert-no-bad-commits rd-kafka-event)))
(t (error (make-rdkafka-error err))))
(condition (c)
(lparallel:fulfill promise c)))))
(defun make-consumer-finalizer (rd-kafka-consumer rd-kafka-queue)
(lambda ()
(deregister-rd-kafka-queue rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-queue-destroy rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)))
(defmethod initialize-instance :after
((consumer consumer)
&key conf (serde #'identity) (key-serde serde) (value-serde serde))
(with-slots (rd-kafka-consumer
rd-kafka-queue
(ks key-serde)
(vs value-serde))
consumer
(with-conf rd-kafka-conf conf
(cl-rdkafka/ll:rd-kafka-conf-set-events
rd-kafka-conf
cl-rdkafka/ll:rd-kafka-event-offset-commit)
(cffi:with-foreign-object (errstr :char +errstr-len+)
(setf rd-kafka-consumer (cl-rdkafka/ll:rd-kafka-new
cl-rdkafka/ll:rd-kafka-consumer
rd-kafka-conf
errstr
+errstr-len+))
(when (cffi:null-pointer-p rd-kafka-consumer)
(error 'allocation-error
:name "consumer"
:description (cffi:foreign-string-to-lisp
errstr :max-chars +errstr-len+)))))
(setf rd-kafka-queue (cl-rdkafka/ll:rd-kafka-queue-new rd-kafka-consumer))
(when (cffi:null-pointer-p rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)
(error 'allocation-error :name "queue"))
(handler-case
(register-rd-kafka-queue rd-kafka-queue #'process-commit-event)
(condition (c)
(cl-rdkafka/ll:rd-kafka-queue-destroy rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)
(error c)))
(setf ks (make-instance 'deserializer
:name "key-serde"
:function key-serde)
vs (make-instance 'deserializer
:name "value-serde"
:function value-serde))
(tg:finalize consumer (make-consumer-finalizer rd-kafka-consumer rd-kafka-queue))))
(defmethod subscribe ((consumer consumer) (topics sequence))
"Subscribe CONSUMER to TOPICS.
Any topic prefixed with '^' will be regex-matched with the cluster's
topics."
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (alloc-toppar-list topics)
(let ((err (cl-rdkafka/ll:rd-kafka-subscribe rd-kafka-consumer
toppar-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))))))
(defmethod subscribe ((consumer consumer) (topic string))
"Subscribe CONSUMER to TOPIC.
If TOPIC starts with '^', then it will be regex-matched with the
cluster's topics."
(subscribe consumer (list topic)))
(defmethod unsubscribe ((consumer consumer))
"Unsubscribe CONSUMER from its current topic subscription."
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-unsubscribe rd-kafka-consumer)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))))
(defun %subscription (rd-kafka-consumer)
(cffi:with-foreign-object (rd-list :pointer)
(let ((err (cl-rdkafka/ll:rd-kafka-subscription
rd-kafka-consumer
rd-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(cffi:mem-ref rd-list :pointer))))
(defmethod subscription ((consumer consumer))
"Return a list of topic names that CONSUMER is subscribed to."
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (%subscription rd-kafka-consumer)
(let (topics)
(foreach-toppar toppar-list (topic)
(push topic topics))
(nreverse topics)))))
(defmethod poll ((consumer consumer) (timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and return a MESSAGE or nil.
May signal PARTITION-ERROR or condition from CONSUMER's serde. A
STORE-FUNCTION restart will be provided if it's a serde condition."
(with-slots (rd-kafka-consumer key-serde value-serde) consumer
(let ((rd-kafka-message (cl-rdkafka/ll:rd-kafka-consumer-poll
rd-kafka-consumer
timeout-ms)))
(unwind-protect
(unless (cffi:null-pointer-p rd-kafka-message)
(rd-kafka-message->message rd-kafka-message
(lambda (bytes)
(apply-serde key-serde bytes))
(lambda (bytes)
(apply-serde value-serde bytes))))
(unless (cffi:null-pointer-p rd-kafka-message)
(cl-rdkafka/ll:rd-kafka-message-destroy rd-kafka-message))))))
(defun %seek (consumer topic partition offset timeout-ms)
(with-slots (rd-kafka-consumer) consumer
(let ((rkt (cl-rdkafka/ll:rd-kafka-topic-new
rd-kafka-consumer
topic
(cffi:null-pointer))))
(when (cffi:null-pointer-p rkt)
(error (make-rdkafka-error (cl-rdkafka/ll:rd-kafka-last-error))))
(unwind-protect
(let ((err (cl-rdkafka/ll:rd-kafka-seek
rkt
partition
offset
timeout-ms)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))
(cl-rdkafka/ll:rd-kafka-topic-destroy rkt)))))
(defmethod seek
((consumer consumer)
(topic string)
(partition integer)
(offset integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to OFFSET."
(%seek consumer topic partition offset timeout-ms))
(defmethod seek-to-beginning
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to beginning of PARTITION."
(%seek consumer topic partition cl-rdkafka/ll:rd-kafka-offset-beginning timeout-ms))
(defmethod seek-to-end
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to end of PARTITION."
(%seek consumer topic partition cl-rdkafka/ll:rd-kafka-offset-end timeout-ms))
(defun %commit (rd-kafka-consumer toppar-list rd-kafka-queue)
(bt:with-lock-held (+address->queue-lock+)
(let ((err (cl-rdkafka/ll:rd-kafka-commit-queue
rd-kafka-consumer
toppar-list
rd-kafka-queue
(cffi:null-pointer)
(cffi:null-pointer))))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(let ((promise (lparallel:promise)))
(enqueue-payload rd-kafka-queue promise)
promise))))
(defmethod commit ((consumer consumer) &key offsets asyncp)
"Commit OFFSETS to broker.
If OFFSETS is nil, then the current assignment is committed;
otherwise, OFFSETS should be an alist mapping (topic . partition) cons
cells to either (offset . metadata) cons cells or lone offset values.
On success, an alist of committed offsets is returned, mapping
(topic . partition) to (offset . metadata).
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR
If ASYNCP is true, then a FUTURE will be returned instead."
(with-slots (rd-kafka-consumer rd-kafka-queue) consumer
(with-toppar-list
toppar-list
(if (null offsets)
(cffi:null-pointer)
(alloc-toppar-list-from-alist offsets))
(let* ((promise (%commit rd-kafka-consumer toppar-list rd-kafka-queue))
(future (make-instance 'future :promise promise :client consumer)))
(if asyncp
future
(value future))))))
(defun %assignment (rd-kafka-consumer)
(cffi:with-foreign-object (rd-list :pointer)
(let ((err (cl-rdkafka/ll:rd-kafka-assignment rd-kafka-consumer rd-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(cffi:mem-ref rd-list :pointer))))
(defmethod assignment ((consumer consumer) &key offsetsp)
"Return a list of partitions assigned to CONSUMER.
The elements of the returned list will be either:
* (topic . partition) cons cells if OFFSETSP is nil
* ((topic . partition) . offset) cons cells otherwise"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (%assignment rd-kafka-consumer)
(let (partitions)
(if offsetsp
(foreach-toppar toppar-list (topic partition offset)
(push (cons (cons topic partition) offset) partitions))
(foreach-toppar toppar-list (topic partition)
(push (cons topic partition) partitions)))
(nreverse partitions)))))
(defmethod committed
((consumer consumer) (partitions sequence) (timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and return committed offsets for PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
On success, an alist of committed offsets is returned, mapping
(topic . partition) to (offset . metadata).
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-committed
rd-kafka-consumer
toppar-list
timeout-ms))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar
toppar-list
(topic partition offset metadata metadata-size err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let* ((meta (pointer->bytes metadata metadata-size))
(offset+meta (cons offset meta)))
(push (cons toppar offset+meta) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Committed failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod assign ((consumer consumer) (partitions sequence))
"Assign PARTITIONS to CONSUMER.
PARTITIONS should be a sequence of either:
* (topic . partition) cons cells
* ((topic . partition) . offset) cons cells"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list
partitions
:topic (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(car car)
car)))
:partition (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(cdr car)
(cdr cons))))
:offset (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(cdr cons)
cl-rdkafka/ll:rd-kafka-offset-invalid))))
(let ((err (cl-rdkafka/ll:rd-kafka-assign rd-kafka-consumer toppar-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))))))
(defmethod member-id ((consumer consumer))
"Return CONSUMER's broker-assigned group member-id."
(with-slots (rd-kafka-consumer) consumer
(cl-rdkafka/ll:rd-kafka-memberid rd-kafka-consumer)))
(defmethod pause ((consumer consumer) (partitions sequence))
"Pause consumption from PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
PARTITIONS is returned on success.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: A list of (topic . partition) cons cells
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-pause-partitions
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (err topic partition)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push toppar goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Pause failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
partitions))))
(defmethod resume ((consumer consumer) (partitions sequence))
"Resume consumption from PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
PARTITIONS is returned on success.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: A list of (topic . partition) cons cells
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-resume-partitions
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (err topic partition)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push toppar goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Resume failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
partitions))))
(defmethod watermarks
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Query broker for low (oldest/beginning) and high (newest/end) offsets.
A (low . high) cons cell is returned."
(cffi:with-foreign-objects ((low :int64) (high :int64))
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-query-watermark-offsets
rd-kafka-consumer
topic
partition
low
high
timeout-ms)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-partition-error err topic partition)))
(cons (cffi:mem-ref low :int64)
(cffi:mem-ref high :int64))))))
(defmethod offsets-for-times
((consumer consumer)
(timestamps list)
(timeout-ms integer))
"Look up the offsets for the given partitions by timestamp.
The returned offset for each partition is the earliest offset whose
timestamp is greater than or equal to the given timestamp in
TIMESTAMPS.
TIMESTAMPS should be an alist mapping (topic . partition) cons cells
to timestamp values.
On success, an alist of offsets is returned, mapping
(topic . partition) cons cells to offset values.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list timestamps :topic #'caar :partition #'cdar :offset #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-offsets-for-times
rd-kafka-consumer
toppar-list
timeout-ms))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (topic partition offset err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push (cons toppar offset) goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Offsets for times error"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod positions ((consumer consumer) (partitions sequence))
"Retrieve current positions (offsets) for PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
On success, an alist of positions is returned, mapping
(topic . partition) to one of either:
* 1 plus the last consumed message offset
* nil if there was no previous message.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-position
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (topic partition offset err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let ((position (unless (= offset cl-rdkafka/ll:rd-kafka-offset-invalid)
offset)))
(push (cons toppar position) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Positions error"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod close ((consumer consumer))
"Close CONSUMER after revoking assignment, committing offsets, and leaving group.
CONSUMER will be closed during garbage collection if it's still open;
this method is provided if closing needs to occur at a well-defined
time."
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-consumer-close rd-kafka-consumer)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))))
| null | https://raw.githubusercontent.com/SahilKang/cl-rdkafka/267e18399351ec5b6a282c9dbd9b194145ff454b/src/high-level/consumer.lisp | lisp |
This file is part of cl-rdkafka.
cl-rdkafka is free software: you can redistribute it and/or modify
(at your option) any later version.
cl-rdkafka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with cl-rdkafka. If not, see </>.
both keys and values should be
defaults to #'identity.
| Copyright ( C ) 2018 - 2020 < >
Copyright 2022 Google LLC
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(in-package #:cl-rdkafka)
(defclass consumer ()
((rd-kafka-consumer
:documentation "Pointer to rd_kafka_t struct.")
(rd-kafka-queue
:documentation "Pointer to rd_kafka_queue_t struct.")
(key-serde
:type deserializer
:documentation "DESERIALIZER to map byte vector to object.")
(value-serde
:type deserializer
:documentation "DESERIALIZER to map byte vector to object."))
(:documentation
"A client that consumes messages from kafka topics.
MAKE-INSTANCE accepts the following keyword args:
* CONF: A required plist, alist, or hash-table mapping config keys
strings. The provided key-value pairs are passed as-is to
librdkafka, so consult the librdkafka config docs for more
info.
* SERDE: An optional unary function accepting a byte vector and
* KEY-SERDE: An optional unary function used to deserialize message
defaults to SERDE .
* VALUE-SERDE: An optional unary function used to deserialize
defaults to SERDE .
Example:
(let ((consumer (make-instance
'kf:consumer
:conf '(\"bootstrap.servers\" \"127.0.0.1:9092\"
\"group.id\" \"consumer-group-id\"
\"enable.auto.commit\" \"false\"
\"auto.offset.reset\" \"earliest\")
:serde #'babel:octets-to-string)))
(kf:subscribe consumer '(\"topic-name\"))
(loop
for message = (kf:poll consumer 2000)
while message
for key = (kf:key message)
for value = (kf:value message)
collect (list key value)
do (kf:commit consumer)))"))
(defgeneric subscribe (consumer topics))
(defgeneric unsubscribe (consumer))
(defgeneric subscription (consumer))
(defgeneric poll (consumer timeout-ms))
(defgeneric seek (consumer topic partition offset timeout-ms))
(defgeneric seek-to-beginning (consumer topic partition timeout-ms))
(defgeneric seek-to-end (consumer topic partition timeout-ms))
(defgeneric commit (consumer &key offsets asyncp))
(defgeneric committed (consumer partitions timeout-ms))
(defgeneric assignment (consumer &key offsetsp))
(defgeneric assign (consumer partitions))
(defgeneric member-id (consumer))
(defgeneric pause (consumer partitions))
(defgeneric resume (consumer partitions))
(defgeneric watermarks (consumer topic partition timeout-ms))
(defgeneric offsets-for-times (consumer timestamps timeout-ms))
(defgeneric positions (consumer partitions))
(defgeneric close (consumer))
(defun get-good-commits-and-assert-no-bad-commits (rd-kafka-event)
(let (goodies baddies)
(foreach-toppar
(cl-rdkafka/ll:rd-kafka-event-topic-partition-list rd-kafka-event)
(topic partition offset metadata metadata-size err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let* ((meta (pointer->bytes metadata metadata-size))
(offset+meta (cons offset meta)))
(push (cons toppar offset+meta) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Commit failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))
(defun process-commit-event (rd-kafka-event queue)
(assert-expected-event rd-kafka-event cl-rdkafka/ll:rd-kafka-event-offset-commit)
(let ((err (cl-rdkafka/ll:rd-kafka-event-error rd-kafka-event))
(promise (lparallel.queue:pop-queue queue)))
(handler-case
(cond
((eq err 'cl-rdkafka/ll:rd-kafka-resp-err--no-offset)
(lparallel:fulfill promise))
((eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(lparallel:fulfill promise
(get-good-commits-and-assert-no-bad-commits rd-kafka-event)))
(t (error (make-rdkafka-error err))))
(condition (c)
(lparallel:fulfill promise c)))))
(defun make-consumer-finalizer (rd-kafka-consumer rd-kafka-queue)
(lambda ()
(deregister-rd-kafka-queue rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-queue-destroy rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)))
(defmethod initialize-instance :after
((consumer consumer)
&key conf (serde #'identity) (key-serde serde) (value-serde serde))
(with-slots (rd-kafka-consumer
rd-kafka-queue
(ks key-serde)
(vs value-serde))
consumer
(with-conf rd-kafka-conf conf
(cl-rdkafka/ll:rd-kafka-conf-set-events
rd-kafka-conf
cl-rdkafka/ll:rd-kafka-event-offset-commit)
(cffi:with-foreign-object (errstr :char +errstr-len+)
(setf rd-kafka-consumer (cl-rdkafka/ll:rd-kafka-new
cl-rdkafka/ll:rd-kafka-consumer
rd-kafka-conf
errstr
+errstr-len+))
(when (cffi:null-pointer-p rd-kafka-consumer)
(error 'allocation-error
:name "consumer"
:description (cffi:foreign-string-to-lisp
errstr :max-chars +errstr-len+)))))
(setf rd-kafka-queue (cl-rdkafka/ll:rd-kafka-queue-new rd-kafka-consumer))
(when (cffi:null-pointer-p rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)
(error 'allocation-error :name "queue"))
(handler-case
(register-rd-kafka-queue rd-kafka-queue #'process-commit-event)
(condition (c)
(cl-rdkafka/ll:rd-kafka-queue-destroy rd-kafka-queue)
(cl-rdkafka/ll:rd-kafka-destroy rd-kafka-consumer)
(error c)))
(setf ks (make-instance 'deserializer
:name "key-serde"
:function key-serde)
vs (make-instance 'deserializer
:name "value-serde"
:function value-serde))
(tg:finalize consumer (make-consumer-finalizer rd-kafka-consumer rd-kafka-queue))))
(defmethod subscribe ((consumer consumer) (topics sequence))
"Subscribe CONSUMER to TOPICS.
Any topic prefixed with '^' will be regex-matched with the cluster's
topics."
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (alloc-toppar-list topics)
(let ((err (cl-rdkafka/ll:rd-kafka-subscribe rd-kafka-consumer
toppar-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))))))
(defmethod subscribe ((consumer consumer) (topic string))
"Subscribe CONSUMER to TOPIC.
If TOPIC starts with '^', then it will be regex-matched with the
cluster's topics."
(subscribe consumer (list topic)))
(defmethod unsubscribe ((consumer consumer))
"Unsubscribe CONSUMER from its current topic subscription."
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-unsubscribe rd-kafka-consumer)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))))
(defun %subscription (rd-kafka-consumer)
(cffi:with-foreign-object (rd-list :pointer)
(let ((err (cl-rdkafka/ll:rd-kafka-subscription
rd-kafka-consumer
rd-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(cffi:mem-ref rd-list :pointer))))
(defmethod subscription ((consumer consumer))
"Return a list of topic names that CONSUMER is subscribed to."
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (%subscription rd-kafka-consumer)
(let (topics)
(foreach-toppar toppar-list (topic)
(push topic topics))
(nreverse topics)))))
(defmethod poll ((consumer consumer) (timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and return a MESSAGE or nil.
May signal PARTITION-ERROR or condition from CONSUMER's serde. A
STORE-FUNCTION restart will be provided if it's a serde condition."
(with-slots (rd-kafka-consumer key-serde value-serde) consumer
(let ((rd-kafka-message (cl-rdkafka/ll:rd-kafka-consumer-poll
rd-kafka-consumer
timeout-ms)))
(unwind-protect
(unless (cffi:null-pointer-p rd-kafka-message)
(rd-kafka-message->message rd-kafka-message
(lambda (bytes)
(apply-serde key-serde bytes))
(lambda (bytes)
(apply-serde value-serde bytes))))
(unless (cffi:null-pointer-p rd-kafka-message)
(cl-rdkafka/ll:rd-kafka-message-destroy rd-kafka-message))))))
(defun %seek (consumer topic partition offset timeout-ms)
(with-slots (rd-kafka-consumer) consumer
(let ((rkt (cl-rdkafka/ll:rd-kafka-topic-new
rd-kafka-consumer
topic
(cffi:null-pointer))))
(when (cffi:null-pointer-p rkt)
(error (make-rdkafka-error (cl-rdkafka/ll:rd-kafka-last-error))))
(unwind-protect
(let ((err (cl-rdkafka/ll:rd-kafka-seek
rkt
partition
offset
timeout-ms)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))
(cl-rdkafka/ll:rd-kafka-topic-destroy rkt)))))
(defmethod seek
((consumer consumer)
(topic string)
(partition integer)
(offset integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to OFFSET."
(%seek consumer topic partition offset timeout-ms))
(defmethod seek-to-beginning
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to beginning of PARTITION."
(%seek consumer topic partition cl-rdkafka/ll:rd-kafka-offset-beginning timeout-ms))
(defmethod seek-to-end
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and seek CONSUMER to end of PARTITION."
(%seek consumer topic partition cl-rdkafka/ll:rd-kafka-offset-end timeout-ms))
(defun %commit (rd-kafka-consumer toppar-list rd-kafka-queue)
(bt:with-lock-held (+address->queue-lock+)
(let ((err (cl-rdkafka/ll:rd-kafka-commit-queue
rd-kafka-consumer
toppar-list
rd-kafka-queue
(cffi:null-pointer)
(cffi:null-pointer))))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(let ((promise (lparallel:promise)))
(enqueue-payload rd-kafka-queue promise)
promise))))
(defmethod commit ((consumer consumer) &key offsets asyncp)
"Commit OFFSETS to broker.
otherwise, OFFSETS should be an alist mapping (topic . partition) cons
cells to either (offset . metadata) cons cells or lone offset values.
On success, an alist of committed offsets is returned, mapping
(topic . partition) to (offset . metadata).
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR
If ASYNCP is true, then a FUTURE will be returned instead."
(with-slots (rd-kafka-consumer rd-kafka-queue) consumer
(with-toppar-list
toppar-list
(if (null offsets)
(cffi:null-pointer)
(alloc-toppar-list-from-alist offsets))
(let* ((promise (%commit rd-kafka-consumer toppar-list rd-kafka-queue))
(future (make-instance 'future :promise promise :client consumer)))
(if asyncp
future
(value future))))))
(defun %assignment (rd-kafka-consumer)
(cffi:with-foreign-object (rd-list :pointer)
(let ((err (cl-rdkafka/ll:rd-kafka-assignment rd-kafka-consumer rd-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(cffi:mem-ref rd-list :pointer))))
(defmethod assignment ((consumer consumer) &key offsetsp)
"Return a list of partitions assigned to CONSUMER.
The elements of the returned list will be either:
* (topic . partition) cons cells if OFFSETSP is nil
* ((topic . partition) . offset) cons cells otherwise"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list toppar-list (%assignment rd-kafka-consumer)
(let (partitions)
(if offsetsp
(foreach-toppar toppar-list (topic partition offset)
(push (cons (cons topic partition) offset) partitions))
(foreach-toppar toppar-list (topic partition)
(push (cons topic partition) partitions)))
(nreverse partitions)))))
(defmethod committed
((consumer consumer) (partitions sequence) (timeout-ms integer))
"Block for up to TIMEOUT-MS milliseconds and return committed offsets for PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
On success, an alist of committed offsets is returned, mapping
(topic . partition) to (offset . metadata).
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-committed
rd-kafka-consumer
toppar-list
timeout-ms))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar
toppar-list
(topic partition offset metadata metadata-size err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let* ((meta (pointer->bytes metadata metadata-size))
(offset+meta (cons offset meta)))
(push (cons toppar offset+meta) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Committed failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod assign ((consumer consumer) (partitions sequence))
"Assign PARTITIONS to CONSUMER.
PARTITIONS should be a sequence of either:
* (topic . partition) cons cells
* ((topic . partition) . offset) cons cells"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list
partitions
:topic (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(car car)
car)))
:partition (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(cdr car)
(cdr cons))))
:offset (lambda (cons)
(let ((car (car cons)))
(if (consp car)
(cdr cons)
cl-rdkafka/ll:rd-kafka-offset-invalid))))
(let ((err (cl-rdkafka/ll:rd-kafka-assign rd-kafka-consumer toppar-list)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))))))
(defmethod member-id ((consumer consumer))
"Return CONSUMER's broker-assigned group member-id."
(with-slots (rd-kafka-consumer) consumer
(cl-rdkafka/ll:rd-kafka-memberid rd-kafka-consumer)))
(defmethod pause ((consumer consumer) (partitions sequence))
"Pause consumption from PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
PARTITIONS is returned on success.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: A list of (topic . partition) cons cells
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-pause-partitions
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (err topic partition)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push toppar goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Pause failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
partitions))))
(defmethod resume ((consumer consumer) (partitions sequence))
"Resume consumption from PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
PARTITIONS is returned on success.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: A list of (topic . partition) cons cells
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-resume-partitions
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (err topic partition)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push toppar goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Resume failed"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
partitions))))
(defmethod watermarks
((consumer consumer)
(topic string)
(partition integer)
(timeout-ms integer))
"Query broker for low (oldest/beginning) and high (newest/end) offsets.
A (low . high) cons cell is returned."
(cffi:with-foreign-objects ((low :int64) (high :int64))
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-query-watermark-offsets
rd-kafka-consumer
topic
partition
low
high
timeout-ms)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-partition-error err topic partition)))
(cons (cffi:mem-ref low :int64)
(cffi:mem-ref high :int64))))))
(defmethod offsets-for-times
((consumer consumer)
(timestamps list)
(timeout-ms integer))
"Look up the offsets for the given partitions by timestamp.
The returned offset for each partition is the earliest offset whose
timestamp is greater than or equal to the given timestamp in
TIMESTAMPS.
TIMESTAMPS should be an alist mapping (topic . partition) cons cells
to timestamp values.
On success, an alist of offsets is returned, mapping
(topic . partition) cons cells to offset values.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list timestamps :topic #'caar :partition #'cdar :offset #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-offsets-for-times
rd-kafka-consumer
toppar-list
timeout-ms))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (topic partition offset err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(push (cons toppar offset) goodies)
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Offsets for times error"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod positions ((consumer consumer) (partitions sequence))
"Retrieve current positions (offsets) for PARTITIONS.
PARTITIONS should be a sequence of (topic . partition) cons cells.
On success, an alist of positions is returned, mapping
(topic . partition) to one of either:
* 1 plus the last consumed message offset
* nil if there was no previous message.
On failure, either an RDKAFKA-ERROR or PARTIAL-ERROR is signalled.
The PARTIAL-ERROR will have the slots:
* GOODIES: Same format as successful return value
* BADDIES: An alist mapping (topic . partition) to RDKAFKA-ERROR"
(with-slots (rd-kafka-consumer) consumer
(with-toppar-list
toppar-list
(alloc-toppar-list partitions :topic #'car :partition #'cdr)
(let ((err (cl-rdkafka/ll:rd-kafka-position
rd-kafka-consumer
toppar-list))
goodies
baddies)
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err)))
(foreach-toppar toppar-list (topic partition offset err)
(let ((toppar (cons topic partition)))
(if (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(let ((position (unless (= offset cl-rdkafka/ll:rd-kafka-offset-invalid)
offset)))
(push (cons toppar position) goodies))
(let ((rdkafka-error (make-rdkafka-error err)))
(push (cons toppar rdkafka-error) baddies)))))
(when baddies
(error 'partial-error
:description "Positions error"
:baddies (nreverse baddies)
:goodies (nreverse goodies)))
(nreverse goodies)))))
(defmethod close ((consumer consumer))
"Close CONSUMER after revoking assignment, committing offsets, and leaving group.
this method is provided if closing needs to occur at a well-defined
time."
(with-slots (rd-kafka-consumer) consumer
(let ((err (cl-rdkafka/ll:rd-kafka-consumer-close rd-kafka-consumer)))
(unless (eq err 'cl-rdkafka/ll:rd-kafka-resp-err-no-error)
(error (make-rdkafka-error err))))))
|
8377d3ea07484104b6f8b38996b7df85e8cbcc269310724ef3a40e4cdf7745f3 | racket/compiler | embed-me26.rkt | #lang racket/base
(module+ main
12)
(module submod racket/base
11)
10
(require (submod "embed-me27.rkt" other-submod))
| null | https://raw.githubusercontent.com/racket/compiler/88acf8a1ec81fec0fbcb6035af1d994d2fec4154/compiler-test/tests/compiler/embed/embed-me26.rkt | racket | #lang racket/base
(module+ main
12)
(module submod racket/base
11)
10
(require (submod "embed-me27.rkt" other-submod))
|
|
873bfb5b02972b5b5a60bd456ccfe30d67c26b82705bdb03a2e4e5734b4965d9 | erldb/erldb | erldb_ets.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2013 ,
%%% @doc
%%%
%%% @end
Created : 23 Jul 2013 by < >
%%%-------------------------------------------------------------------
-module(erldb_ets).
-behaviour(gen_server).
%% API
-export([start_link/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {
tabs = []
}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
( ) - > { ok , Pid } | ignore | { error , Error }
%% @end
%%--------------------------------------------------------------------
start_link(Args) ->
gen_server:start_link(?MODULE, Args, []).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init(Args) ->
[ gen_server:cast(self(), {init_table, Model, []}) || Model <- proplists:get_value(models, Args, []) ],
{ok, #state{}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({save, Object}, _From, State) ->
Model = element(1, Object),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
UpdatedObject =
case ets:last(Model) of
'$end_of_table' ->
erlang:setelement(PrimaryKeyPos, Object, 1);
Number when is_integer(Number) ->
erlang:setelement(PrimaryKeyPos, Object, Number+1);
_ ->
%% We don't do anything with this since we don't know what kind of scheme we're running at
Object
end,
true = ets:insert_new(Model, UpdatedObject),
{reply, UpdatedObject, State};
handle_call({update, Object}, _From, State) ->
Model = element(1, Object),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
UpdatedObject =
case ets:last(Model) of
'$end_of_table' ->
erlang:setelement(PrimaryKeyPos, Object, 1);
Number when is_integer(Number) ->
erlang:setelement(PrimaryKeyPos, Object, Number+1);
_ ->
Object
end,
true = ets:insert(Model, UpdatedObject),
{reply, UpdatedObject, State};
handle_call({find, Model, Conditions, _Options}, _From, State) ->
case ets:info(Model) of
undefined ->
{reply, {error, tab_not_found}, State};
_Info ->
Fields = get_fields(Model),
Match = build_match_q(Conditions, Fields),
Object = ets:match_object(Model, Match),
{reply, {ok, Object}, State}
end;
handle_call({delete, Object}, _From, State) ->
Model = element(1, Object),
Match = build_match_q_from_object(Object),
ObjectList = ets:match_object(Model, Match),
lists:foreach(
fun(Obj) ->
true = ets:delete_object(Model, Obj)
end, ObjectList),
{reply, ok, State};
handle_call({supported_condition, Conditions}, _From, State) ->
Supported = ['equals'],
List = [Operators || {_, Operators, _} <- Conditions,
lists:member(Operators, Supported) == false],
Reply =
case List of
[] -> {ok, supported};
List -> {error, not_supported, List}
end,
{reply, Reply, State};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast({init_table, Model, Args}, State) ->
Options = proplists:get_value(worker_options, Args, []),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
ets:new(Model, [named_table, public, {keypos, PrimaryKeyPos}|Options]),
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
build_match_q(QueryFields, Fields) ->
Query = ['_'|lists:map(fun({Fieldname, _, _, _}) ->
case lists:keyfind(Fieldname, 1, QueryFields) of
false ->
'_';
Match ->
build_col_query(Match)
end
end, Fields)],
erlang:list_to_tuple(Query).
build_col_query({_Fieldname, 'equals', Value}) ->
Value.
get_fields(Model) ->
[ Y || {Z,[Y]} <- Model:module_info(attributes),
Z =:= field ].
build_match_q_from_object(Object) ->
Model = element(1, Object),
Fields = get_fields(Model),
Query = [Model|lists:map(fun({_Fieldname, Pos, _, _}) ->
element(Pos, Object)
end, Fields)],
erlang:list_to_tuple(Query).
| null | https://raw.githubusercontent.com/erldb/erldb/d014c29ab5efa00c26847d637ce09be83b10cc19/src/erldb_ets.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
gen_server callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
We don't do anything with this since we don't know what kind of scheme we're running at
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
=================================================================== | @author < >
( C ) 2013 ,
Created : 23 Jul 2013 by < >
-module(erldb_ets).
-behaviour(gen_server).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {
tabs = []
}).
( ) - > { ok , Pid } | ignore | { error , Error }
start_link(Args) ->
gen_server:start_link(?MODULE, Args, []).
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init(Args) ->
[ gen_server:cast(self(), {init_table, Model, []}) || Model <- proplists:get_value(models, Args, []) ],
{ok, #state{}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({save, Object}, _From, State) ->
Model = element(1, Object),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
UpdatedObject =
case ets:last(Model) of
'$end_of_table' ->
erlang:setelement(PrimaryKeyPos, Object, 1);
Number when is_integer(Number) ->
erlang:setelement(PrimaryKeyPos, Object, Number+1);
_ ->
Object
end,
true = ets:insert_new(Model, UpdatedObject),
{reply, UpdatedObject, State};
handle_call({update, Object}, _From, State) ->
Model = element(1, Object),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
UpdatedObject =
case ets:last(Model) of
'$end_of_table' ->
erlang:setelement(PrimaryKeyPos, Object, 1);
Number when is_integer(Number) ->
erlang:setelement(PrimaryKeyPos, Object, Number+1);
_ ->
Object
end,
true = ets:insert(Model, UpdatedObject),
{reply, UpdatedObject, State};
handle_call({find, Model, Conditions, _Options}, _From, State) ->
case ets:info(Model) of
undefined ->
{reply, {error, tab_not_found}, State};
_Info ->
Fields = get_fields(Model),
Match = build_match_q(Conditions, Fields),
Object = ets:match_object(Model, Match),
{reply, {ok, Object}, State}
end;
handle_call({delete, Object}, _From, State) ->
Model = element(1, Object),
Match = build_match_q_from_object(Object),
ObjectList = ets:match_object(Model, Match),
lists:foreach(
fun(Obj) ->
true = ets:delete_object(Model, Obj)
end, ObjectList),
{reply, ok, State};
handle_call({supported_condition, Conditions}, _From, State) ->
Supported = ['equals'],
List = [Operators || {_, Operators, _} <- Conditions,
lists:member(Operators, Supported) == false],
Reply =
case List of
[] -> {ok, supported};
List -> {error, not_supported, List}
end,
{reply, Reply, State};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({init_table, Model, Args}, State) ->
Options = proplists:get_value(worker_options, Args, []),
Fields = get_fields(Model),
[PrimaryKeyPos|_] = [ Pos || {_Fieldname, Pos, _Type, Opt} <- Fields,
proplists:get_value(primary_key, Opt) /= undefined ],
ets:new(Model, [named_table, public, {keypos, PrimaryKeyPos}|Options]),
{noreply, State};
handle_cast(_Msg, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info(_Info, State) ->
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
build_match_q(QueryFields, Fields) ->
Query = ['_'|lists:map(fun({Fieldname, _, _, _}) ->
case lists:keyfind(Fieldname, 1, QueryFields) of
false ->
'_';
Match ->
build_col_query(Match)
end
end, Fields)],
erlang:list_to_tuple(Query).
build_col_query({_Fieldname, 'equals', Value}) ->
Value.
get_fields(Model) ->
[ Y || {Z,[Y]} <- Model:module_info(attributes),
Z =:= field ].
build_match_q_from_object(Object) ->
Model = element(1, Object),
Fields = get_fields(Model),
Query = [Model|lists:map(fun({_Fieldname, Pos, _, _}) ->
element(Pos, Object)
end, Fields)],
erlang:list_to_tuple(Query).
|
5b80176c02209da269eee57a15fd5f8ea8d43895d334911ad6975967d2439a77 | dys-bigwig/racket-ncurses | main.rkt | #lang racket/base
(require "api.rkt")
(provide (all-from-out "api.rkt"))
| null | https://raw.githubusercontent.com/dys-bigwig/racket-ncurses/864af9e67a1daab7796f7e06b95aefea2168bd6f/main.rkt | racket | #lang racket/base
(require "api.rkt")
(provide (all-from-out "api.rkt"))
|
|
bb46804ace7883fa5caceede09b114b2e1f5d3177bac3d0a5641874d190cfccb | agentultra/adventure-engine | Main.hs | module Main where
import Maker
main :: IO ()
main = run
| null | https://raw.githubusercontent.com/agentultra/adventure-engine/a7fc9722fb7771dd6c7d2c953910eb9e971a31f9/adventure-maker/Main.hs | haskell | module Main where
import Maker
main :: IO ()
main = run
|
|
88534abe01f6841ef464a478d32732eda3da1296c5547056a94be67566b24d3b | buntine/Simply-Scheme-Exercises | 18-5.scm | ; Write prune, a procedure that takes a tree as argument and returns a copy of the
; tree, but with all the leaf nodes of the original tree removed. (If the argument to prune
is a one - node tree , in which the root node has no children , then prune should return
; #f because the result of removing the root node wouldn’t be a tree.)
; My solution does not use the filter operator. Instead if the node is leaf then it just skips over it.
; Also I prefer using make-node instead of cons for data encapsulation.
(define (prune tree)
(cond ((leaf? tree) #f)
(else (make-node (datum tree) (prune-forest (children tree))))))
(define (prune-forest forest)
(cond ((null? forest) '())
((leaf? (car forest)) (prune-forest (cdr forest)))
(else (make-node (prune (car forest)) (prune-forest (cdr forest))))))
| null | https://raw.githubusercontent.com/buntine/Simply-Scheme-Exercises/c6cbf0bd60d6385b506b8df94c348ac5edc7f646/18-trees/18-5.scm | scheme | Write prune, a procedure that takes a tree as argument and returns a copy of the
tree, but with all the leaf nodes of the original tree removed. (If the argument to prune
#f because the result of removing the root node wouldn’t be a tree.)
My solution does not use the filter operator. Instead if the node is leaf then it just skips over it.
Also I prefer using make-node instead of cons for data encapsulation. | is a one - node tree , in which the root node has no children , then prune should return
(define (prune tree)
(cond ((leaf? tree) #f)
(else (make-node (datum tree) (prune-forest (children tree))))))
(define (prune-forest forest)
(cond ((null? forest) '())
((leaf? (car forest)) (prune-forest (cdr forest)))
(else (make-node (prune (car forest)) (prune-forest (cdr forest))))))
|
dee0c581f484fc3fa0ab921b58c80a524adcc092d090e3aabbe5dd64664bb481 | triffon/fp-2019-20 | first n members.rkt | (define (take n xs)
(cond
((< n 0) "error")
((> n (length xs)) xs)
((= n 0) '())
((if (= n 1) (list (car xs))
може да се напише ( cons ( car xs ) ( take ... ) )
(take 2134 '(9 7 2 3))
(take 0 '(2 9 2))
(take 2 '(1 2 3))
(take 3 '(1 2 3 4 5 6 7)) | null | https://raw.githubusercontent.com/triffon/fp-2019-20/7efb13ff4de3ea13baa2c5c59eb57341fac15641/exercises/computer-science-3/exercises/04.lists/solutions/first%20n%20members.rkt | racket | (define (take n xs)
(cond
((< n 0) "error")
((> n (length xs)) xs)
((= n 0) '())
((if (= n 1) (list (car xs))
може да се напише ( cons ( car xs ) ( take ... ) )
(take 2134 '(9 7 2 3))
(take 0 '(2 9 2))
(take 2 '(1 2 3))
(take 3 '(1 2 3 4 5 6 7)) |
|
2bacd1e0a4253c6b97bb7e2da0740ab51ee63952eb98f155af7619cb6c4371f3 | brendanhay/amazonka | ChoiceStatus.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . WellArchitected . Types . ChoiceStatus
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.WellArchitected.Types.ChoiceStatus
( ChoiceStatus
( ..,
ChoiceStatus_NOT_APPLICABLE,
ChoiceStatus_SELECTED,
ChoiceStatus_UNSELECTED
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype ChoiceStatus = ChoiceStatus'
{ fromChoiceStatus ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern ChoiceStatus_NOT_APPLICABLE :: ChoiceStatus
pattern ChoiceStatus_NOT_APPLICABLE = ChoiceStatus' "NOT_APPLICABLE"
pattern ChoiceStatus_SELECTED :: ChoiceStatus
pattern ChoiceStatus_SELECTED = ChoiceStatus' "SELECTED"
pattern ChoiceStatus_UNSELECTED :: ChoiceStatus
pattern ChoiceStatus_UNSELECTED = ChoiceStatus' "UNSELECTED"
# COMPLETE
ChoiceStatus_NOT_APPLICABLE ,
ChoiceStatus_SELECTED ,
ChoiceStatus_UNSELECTED ,
ChoiceStatus '
#
ChoiceStatus_NOT_APPLICABLE,
ChoiceStatus_SELECTED,
ChoiceStatus_UNSELECTED,
ChoiceStatus'
#-}
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-wellarchitected/gen/Amazonka/WellArchitected/Types/ChoiceStatus.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . WellArchitected . Types . ChoiceStatus
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WellArchitected.Types.ChoiceStatus
( ChoiceStatus
( ..,
ChoiceStatus_NOT_APPLICABLE,
ChoiceStatus_SELECTED,
ChoiceStatus_UNSELECTED
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype ChoiceStatus = ChoiceStatus'
{ fromChoiceStatus ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern ChoiceStatus_NOT_APPLICABLE :: ChoiceStatus
pattern ChoiceStatus_NOT_APPLICABLE = ChoiceStatus' "NOT_APPLICABLE"
pattern ChoiceStatus_SELECTED :: ChoiceStatus
pattern ChoiceStatus_SELECTED = ChoiceStatus' "SELECTED"
pattern ChoiceStatus_UNSELECTED :: ChoiceStatus
pattern ChoiceStatus_UNSELECTED = ChoiceStatus' "UNSELECTED"
# COMPLETE
ChoiceStatus_NOT_APPLICABLE ,
ChoiceStatus_SELECTED ,
ChoiceStatus_UNSELECTED ,
ChoiceStatus '
#
ChoiceStatus_NOT_APPLICABLE,
ChoiceStatus_SELECTED,
ChoiceStatus_UNSELECTED,
ChoiceStatus'
#-}
|
19189cca6406352451cd4bf14681193b33abced6a06d36986fd55ff895c4cb2b | kmi/irs | namespaces.lisp | Copyright © 2008
(in-package #:irs)
(defvar *source-namespaces*
'(("grnd" "-grounding#")
("rfc2616" "#")
("rio" "-in-ocml#")))
(defun register-source-namespace (prefix url)
(push (list prefix url) *source-namespaces*)
(setup-namespaces-for-source))
In here are definitions of OCML namespace prefixes that we need at
;;; compile time.
(defun setup-namespaces-for-source ()
"Set the current namespaces for building source."
;; XXX We should (but are not!) careful that we don't break the
;; bindings of the current ontology.
(dolist (namespace *source-namespaces*)
(ocml:register-namespace (first namespace) (second namespace))))
(eval-when (:load-toplevel)
(setup-namespaces-for-source))
| null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/src/kernel/namespaces.lisp | lisp | compile time.
XXX We should (but are not!) careful that we don't break the
bindings of the current ontology. | Copyright © 2008
(in-package #:irs)
(defvar *source-namespaces*
'(("grnd" "-grounding#")
("rfc2616" "#")
("rio" "-in-ocml#")))
(defun register-source-namespace (prefix url)
(push (list prefix url) *source-namespaces*)
(setup-namespaces-for-source))
In here are definitions of OCML namespace prefixes that we need at
(defun setup-namespaces-for-source ()
"Set the current namespaces for building source."
(dolist (namespace *source-namespaces*)
(ocml:register-namespace (first namespace) (second namespace))))
(eval-when (:load-toplevel)
(setup-namespaces-for-source))
|
b38b0b1a3f9b375ac9b8bd2a82ce4ba7e4bc5ea8af05aedca39c08d676756582 | jakubfijalkowski/hsass | Compilation.hs | -- | Compilation of sass source or sass files.
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE PatternGuards #
{-# LANGUAGE TypeSynonymInstances #-}
module Text.Sass.Compilation
(
-- * Compilation
compileFile
, compileString
, compileByteString
-- * Results
, SassExtendedResult
, StringResult
, ExtendedResult
, ExtendedResultBS
, resultString
, resultIncludes
, resultSourcemap
-- * Error reporting
, SassError
, errorStatus
, errorJson
, errorText
, errorMessage
, errorFile
, errorSource
, errorLine
, errorColumn
) where
import qualified Bindings.Libsass as Lib
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B.C8
import qualified Data.ByteString.Unsafe as B
import Data.List (stripPrefix)
import Data.Maybe (fromMaybe)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Control.Monad (forM, (>=>))
import Foreign
import Foreign.C
import Text.Sass.Internal
import Text.Sass.Options
-- | Represents compilation error.
data SassError = SassError {
errorStatus :: Int, -- ^ Compilation satus code.
errorContext :: ForeignPtr Lib.SassContext
}
-- | Represents extended result - compiled string (or other string-like type,
eg . ' ByteString ' ) with a list of includes and a source map .
--
-- Subject to name change in future.
data SassExtendedResult a = SassExtendedResult {
resultString :: a, -- ^ Compiled string.
resultContext :: ForeignPtr Lib.SassContext
}
| Result of compilation - ' Either ' ' SassError ' or a compiled string .
--
-- Subject to name change in future.
type StringResult = IO (Either SassError String)
| Result of compilation - ' Either ' ' SassError ' or extended results - a
-- compiled string with a list of included files and a source map.
--
-- Subject to name change in future.
type ExtendedResult = IO (Either SassError (SassExtendedResult String))
--
| Result of compilation - ' Either ' ' SassError ' or extended results - a
compiled ' ByteString ' with a list of included files and a source map .
--
-- Subject to name change in future.
type ExtendedResultBS = IO (Either SassError (SassExtendedResult ByteString))
-- | Typeclass that allows multiple results from compilation functions.
--
Currently , only three types are supported - ' String ' , ' ByteString ' and
' a@ ( where a is something that is an instance of
' SassResult ' ) . The first provides only a compiled string , the latter one
-- gives access to a list of included files and a source map (if available).
class SassResult a where
toSassResult :: Bool -> ForeignPtr Lib.SassContext -> IO a
instance Show SassError where
show (SassError s _) =
"SassError: cannot compile provided source, error status: " ++ show s
instance Eq SassError where
(SassError s1 _) == (SassError s2 _) = s1 == s2
instance Show (SassExtendedResult a) where
show _ = "SassExtendedResult"
-- | Only compiled code.
instance SassResult String where
toSassResult stripEncoding ptr = withForeignPtr ptr $ \ctx -> do
result <- Lib.sass_context_get_output_string ctx
!result' <- peekUTF8CString result
return $ if stripEncoding then strip result' else result'
where
strip s
| Just stripped <- stripPrefix "@charset \"UTF-8\";\n" s = stripped
| Just stripped <- stripPrefix "\65279" s = stripped
| otherwise = s
| Only compiled code ( UTF-8 encoding ) .
instance SassResult ByteString where
toSassResult stripEncoding ptr = withForeignPtr ptr $ \ctx -> do
result <- Lib.sass_context_get_output_string ctx
!result' <- B.packCString result
return $ if stripEncoding then strip result' else result'
where
strip s
| Just stripped <- stripCharset s = stripped
| Just stripped <- stripBom s = stripped
| otherwise = s
stripCharset = stripPrefixBS (B.C8.pack "@charset \"UTF-8\";\n")
stripBom = stripPrefixBS (B.C8.pack "\239\187\191")
stripPrefixBS bs1 bs2
| bs1 `B.C8.isPrefixOf` bs2 = Just (B.unsafeDrop (B.length bs1) bs2)
| otherwise = Nothing
-- | Compiled code with includes and a source map.
instance (SassResult a) => SassResult (SassExtendedResult a) where
toSassResult stripEncoding ptr = do
str <- toSassResult stripEncoding ptr
return $ SassExtendedResult str ptr
-- | Loads specified property from a context and converts it to desired type.
loadFromError :: (Ptr Lib.SassContext -> IO a) -- ^ Accessor function.
-> (a -> IO b) -- ^ Conversion method.
-> SassError -- ^ Pointer to context.
-> IO b -- ^ Result.
loadFromError get conv err = withForeignPtr ptr $ get >=> conv
where ptr = errorContext err
-- | Equivalent of @'loadFromError' 'get' 'peekUTF8CString 'err'@.
loadStringFromError
:: (Ptr Lib.SassContext -> IO CString) -- ^ Accessor function.
-> SassError -- ^ Pointer to context.
-> IO String -- ^ Result.
loadStringFromError get = loadFromError get peekUTF8CString
| Equivalent of @'loadFromError ' ' get ' ' fromInteger ' ' err'@.
loadIntFromError :: (Integral a)
=> (Ptr Lib.SassContext -> IO a) -- ^ Accessor function.
-> SassError -- ^ Pointer to context.
-> IO Int -- ^ Result.
loadIntFromError get = loadFromError get (return.fromIntegral)
-- | Loads information about an error as JSON.
errorJson :: SassError -> IO String
errorJson = loadStringFromError Lib.sass_context_get_error_json
-- | Loads an error text.
errorText :: SassError -> IO String
errorText = loadStringFromError Lib.sass_context_get_error_text
-- | Loads a user-friendly error message.
errorMessage :: SassError -> IO String
errorMessage = loadStringFromError Lib.sass_context_get_error_message
-- | Loads a filename where problem occured.
errorFile :: SassError -> IO String
errorFile = loadStringFromError Lib.sass_context_get_error_file
-- | Loads an error source.
errorSource :: SassError -> IO String
errorSource = loadStringFromError Lib.sass_context_get_error_src
-- | Loads a line in the file where problem occured.
errorLine :: SassError -> IO Int
errorLine = loadIntFromError Lib.sass_context_get_error_line
-- | Loads a line in the file where problem occured.
errorColumn :: SassError -> IO Int
errorColumn = loadIntFromError Lib.sass_context_get_error_column
-- | Loads a list of files that have been included during compilation.
resultIncludes :: SassExtendedResult a -> IO [String]
resultIncludes ex = withForeignPtr (resultContext ex) $ \ctx -> do
lst <- Lib.sass_context_get_included_files ctx
len <- Lib.sass_context_get_included_files_size ctx
forM (arrayRange $ fromIntegral len) (peekElemOff lst >=> peekUTF8CString)
-- | Loads a source map if it was generated by libsass.
resultSourcemap :: SassExtendedResult a -> IO (Maybe String)
resultSourcemap ex = withForeignPtr (resultContext ex) $ \ctx -> do
cstr <- Lib.sass_context_get_source_map_string ctx
if cstr == nullPtr
then return Nothing
else Just <$> peekUTF8CString cstr
-- | Common code for 'compileFile' and 'compileString'.
compileInternal :: (SassResult b)
=> CString -- ^ String that will be passed to 'make context'.
-> SassOptions
-> (CString -> IO (Ptr a)) -- ^ Make context.
-> (Ptr a -> IO CInt) -- ^ Compile context.
-> FinalizerPtr a -- ^ Context finalizer.
-> IO (Either SassError b)
compileInternal str opts make compile finalizer = do
Makes an assumption , that Sass_*_Context inherits from
-- and Sass_Options.
context <- make str
let opts' = castPtr context
copyOptionsToNative opts opts'
status <- withFunctions opts opts' $ compile context
fptr <- castForeignPtr <$> newForeignPtr finalizer context
if status /= 0
then return $ Left $
SassError (fromIntegral status) fptr
else do
result <- toSassResult (sassStripEncodingInfo opts) fptr
return $ Right result
-- | Compiles a file using specified options.
compileFile :: SassResult a
=> FilePath -- ^ Path to the file.
-> SassOptions -- ^ Compilation options.
-> IO (Either SassError a) -- ^ Error or output string.
compileFile path opts = withUTF8CString path $ \cpath ->
compileInternal cpath opts
Lib.sass_make_file_context
Lib.sass_compile_file_context
Lib.p_sass_delete_file_context
| Compiles raw content using specified options .
compileString :: SassResult a
=> String -- ^ String to compile.
-> SassOptions -- ^ Compilation options.
-> IO (Either SassError a) -- ^ Error or output string.
compileString str opts = do
cdata <- newUTF8CString str
compileInternal cdata opts
Lib.sass_make_data_context
Lib.sass_compile_data_context
Lib.p_sass_delete_data_context
| Compiles raw content using specified options .
compileByteString :: SassResult a
=> ByteString -- ^ String to compile.
-> SassOptions -- ^ Compilation options.
-> IO (Either SassError a) -- ^ Error or output string.
compileByteString str opts = do
cdata <- newCStringFromBS str
compileInternal cdata opts
Lib.sass_make_data_context
Lib.sass_compile_data_context
Lib.p_sass_delete_data_context
| null | https://raw.githubusercontent.com/jakubfijalkowski/hsass/afbce06f13edcc1ec16eb8f93cc730bc735dfdaf/Text/Sass/Compilation.hs | haskell | | Compilation of sass source or sass files.
# LANGUAGE BangPatterns #
# LANGUAGE CPP #
# LANGUAGE TypeSynonymInstances #
* Compilation
* Results
* Error reporting
| Represents compilation error.
^ Compilation satus code.
| Represents extended result - compiled string (or other string-like type,
Subject to name change in future.
^ Compiled string.
Subject to name change in future.
compiled string with a list of included files and a source map.
Subject to name change in future.
Subject to name change in future.
| Typeclass that allows multiple results from compilation functions.
gives access to a list of included files and a source map (if available).
| Only compiled code.
| Compiled code with includes and a source map.
| Loads specified property from a context and converts it to desired type.
^ Accessor function.
^ Conversion method.
^ Pointer to context.
^ Result.
| Equivalent of @'loadFromError' 'get' 'peekUTF8CString 'err'@.
^ Accessor function.
^ Pointer to context.
^ Result.
^ Accessor function.
^ Pointer to context.
^ Result.
| Loads information about an error as JSON.
| Loads an error text.
| Loads a user-friendly error message.
| Loads a filename where problem occured.
| Loads an error source.
| Loads a line in the file where problem occured.
| Loads a line in the file where problem occured.
| Loads a list of files that have been included during compilation.
| Loads a source map if it was generated by libsass.
| Common code for 'compileFile' and 'compileString'.
^ String that will be passed to 'make context'.
^ Make context.
^ Compile context.
^ Context finalizer.
and Sass_Options.
| Compiles a file using specified options.
^ Path to the file.
^ Compilation options.
^ Error or output string.
^ String to compile.
^ Compilation options.
^ Error or output string.
^ String to compile.
^ Compilation options.
^ Error or output string. | # LANGUAGE FlexibleInstances #
# LANGUAGE PatternGuards #
module Text.Sass.Compilation
(
compileFile
, compileString
, compileByteString
, SassExtendedResult
, StringResult
, ExtendedResult
, ExtendedResultBS
, resultString
, resultIncludes
, resultSourcemap
, SassError
, errorStatus
, errorJson
, errorText
, errorMessage
, errorFile
, errorSource
, errorLine
, errorColumn
) where
import qualified Bindings.Libsass as Lib
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as B.C8
import qualified Data.ByteString.Unsafe as B
import Data.List (stripPrefix)
import Data.Maybe (fromMaybe)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Control.Monad (forM, (>=>))
import Foreign
import Foreign.C
import Text.Sass.Internal
import Text.Sass.Options
data SassError = SassError {
errorContext :: ForeignPtr Lib.SassContext
}
eg . ' ByteString ' ) with a list of includes and a source map .
data SassExtendedResult a = SassExtendedResult {
resultContext :: ForeignPtr Lib.SassContext
}
| Result of compilation - ' Either ' ' SassError ' or a compiled string .
type StringResult = IO (Either SassError String)
| Result of compilation - ' Either ' ' SassError ' or extended results - a
type ExtendedResult = IO (Either SassError (SassExtendedResult String))
| Result of compilation - ' Either ' ' SassError ' or extended results - a
compiled ' ByteString ' with a list of included files and a source map .
type ExtendedResultBS = IO (Either SassError (SassExtendedResult ByteString))
Currently , only three types are supported - ' String ' , ' ByteString ' and
' a@ ( where a is something that is an instance of
' SassResult ' ) . The first provides only a compiled string , the latter one
class SassResult a where
toSassResult :: Bool -> ForeignPtr Lib.SassContext -> IO a
instance Show SassError where
show (SassError s _) =
"SassError: cannot compile provided source, error status: " ++ show s
instance Eq SassError where
(SassError s1 _) == (SassError s2 _) = s1 == s2
instance Show (SassExtendedResult a) where
show _ = "SassExtendedResult"
instance SassResult String where
toSassResult stripEncoding ptr = withForeignPtr ptr $ \ctx -> do
result <- Lib.sass_context_get_output_string ctx
!result' <- peekUTF8CString result
return $ if stripEncoding then strip result' else result'
where
strip s
| Just stripped <- stripPrefix "@charset \"UTF-8\";\n" s = stripped
| Just stripped <- stripPrefix "\65279" s = stripped
| otherwise = s
| Only compiled code ( UTF-8 encoding ) .
instance SassResult ByteString where
toSassResult stripEncoding ptr = withForeignPtr ptr $ \ctx -> do
result <- Lib.sass_context_get_output_string ctx
!result' <- B.packCString result
return $ if stripEncoding then strip result' else result'
where
strip s
| Just stripped <- stripCharset s = stripped
| Just stripped <- stripBom s = stripped
| otherwise = s
stripCharset = stripPrefixBS (B.C8.pack "@charset \"UTF-8\";\n")
stripBom = stripPrefixBS (B.C8.pack "\239\187\191")
stripPrefixBS bs1 bs2
| bs1 `B.C8.isPrefixOf` bs2 = Just (B.unsafeDrop (B.length bs1) bs2)
| otherwise = Nothing
instance (SassResult a) => SassResult (SassExtendedResult a) where
toSassResult stripEncoding ptr = do
str <- toSassResult stripEncoding ptr
return $ SassExtendedResult str ptr
loadFromError get conv err = withForeignPtr ptr $ get >=> conv
where ptr = errorContext err
loadStringFromError
loadStringFromError get = loadFromError get peekUTF8CString
| Equivalent of @'loadFromError ' ' get ' ' fromInteger ' ' err'@.
loadIntFromError :: (Integral a)
loadIntFromError get = loadFromError get (return.fromIntegral)
errorJson :: SassError -> IO String
errorJson = loadStringFromError Lib.sass_context_get_error_json
errorText :: SassError -> IO String
errorText = loadStringFromError Lib.sass_context_get_error_text
errorMessage :: SassError -> IO String
errorMessage = loadStringFromError Lib.sass_context_get_error_message
errorFile :: SassError -> IO String
errorFile = loadStringFromError Lib.sass_context_get_error_file
errorSource :: SassError -> IO String
errorSource = loadStringFromError Lib.sass_context_get_error_src
errorLine :: SassError -> IO Int
errorLine = loadIntFromError Lib.sass_context_get_error_line
errorColumn :: SassError -> IO Int
errorColumn = loadIntFromError Lib.sass_context_get_error_column
resultIncludes :: SassExtendedResult a -> IO [String]
resultIncludes ex = withForeignPtr (resultContext ex) $ \ctx -> do
lst <- Lib.sass_context_get_included_files ctx
len <- Lib.sass_context_get_included_files_size ctx
forM (arrayRange $ fromIntegral len) (peekElemOff lst >=> peekUTF8CString)
resultSourcemap :: SassExtendedResult a -> IO (Maybe String)
resultSourcemap ex = withForeignPtr (resultContext ex) $ \ctx -> do
cstr <- Lib.sass_context_get_source_map_string ctx
if cstr == nullPtr
then return Nothing
else Just <$> peekUTF8CString cstr
compileInternal :: (SassResult b)
-> SassOptions
-> IO (Either SassError b)
compileInternal str opts make compile finalizer = do
Makes an assumption , that Sass_*_Context inherits from
context <- make str
let opts' = castPtr context
copyOptionsToNative opts opts'
status <- withFunctions opts opts' $ compile context
fptr <- castForeignPtr <$> newForeignPtr finalizer context
if status /= 0
then return $ Left $
SassError (fromIntegral status) fptr
else do
result <- toSassResult (sassStripEncodingInfo opts) fptr
return $ Right result
compileFile :: SassResult a
compileFile path opts = withUTF8CString path $ \cpath ->
compileInternal cpath opts
Lib.sass_make_file_context
Lib.sass_compile_file_context
Lib.p_sass_delete_file_context
| Compiles raw content using specified options .
compileString :: SassResult a
compileString str opts = do
cdata <- newUTF8CString str
compileInternal cdata opts
Lib.sass_make_data_context
Lib.sass_compile_data_context
Lib.p_sass_delete_data_context
| Compiles raw content using specified options .
compileByteString :: SassResult a
compileByteString str opts = do
cdata <- newCStringFromBS str
compileInternal cdata opts
Lib.sass_make_data_context
Lib.sass_compile_data_context
Lib.p_sass_delete_data_context
|
3febc0cdf3ecf11879768c77952289efc98a14c1dbd0c0d16472f60817ea71d8 | kazu-yamamoto/wai-app-file-cgi | Path.hs | # LANGUAGE OverloadedStrings , BangPatterns #
module Network.Wai.Application.Classic.Path (
Path
, pathString
, fromString
, (</>), (<\>), (<.>)
, breakAtSeparator, hasLeadingPathSeparator, hasTrailingPathSeparator
, isSuffixOf
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.String
import Data.Word
----------------------------------------------------------------
-- | File path.
type Path = ByteString
pathString :: Path -> String
pathString = B8.unpack
# INLINE pathString #
----------------------------------------------------------------
-- pathDot :: Word8
pathDot = 46
pathDotBS :: ByteString
pathDotBS = "."
pathSep :: Word8
pathSep = 47
pathSepBS :: ByteString
pathSepBS = "/"
{-|
Checking if the path ends with the path separator.
>>> hasLeadingPathSeparator "/foo/bar"
True
>>> hasLeadingPathSeparator "foo/bar"
False
-}
hasLeadingPathSeparator :: Path -> Bool
hasLeadingPathSeparator bs
| BS.null bs = False
| BS.head bs == pathSep = True
| otherwise = False
# INLINE hasLeadingPathSeparator #
{-|
Checking if the path ends with the path separator.
>>> hasTrailingPathSeparator "/foo/bar/"
True
>>> hasTrailingPathSeparator "/foo/bar"
False
-}
hasTrailingPathSeparator :: Path -> Bool
hasTrailingPathSeparator bs
| BS.null bs = False
| BS.last bs == pathSep = True
| otherwise = False
# INLINE hasTrailingPathSeparator #
{-|
Appending with the file separator.
>>> "/foo" </> "bar"
"/foo/bar"
>>> "/foo/" </> "bar"
"/foo/bar"
>>> "/foo" </> "/bar"
"/foo/bar"
>>> "/foo/" </> "/bar"
"/foo/bar"
-}
(</>) :: Path -> Path -> Path
p1 </> p2 = p
where
!has1 = hasTrailingPathSeparator p1
!has2 = hasLeadingPathSeparator p2
!p | has1 && not has2 = p1 `BS.append` p2
| not has1 && has2 = p1 `BS.append` p2
| has1 = p1 `BS.append` BS.tail p2
| otherwise = BS.concat [p1,pathSepBS,p2]
{-# INLINE (</>) #-}
|
Removing prefix . The prefix of the second argument is removed
from the first argument .
> > > " foobar " < \ > " foo "
" bar "
> > > " foo " < \ > " foobar "
" "
> > > " foobar " < \ > " baz "
" bar "
Removing prefix. The prefix of the second argument is removed
from the first argument.
>>> "foobar" <\> "foo"
"bar"
>>> "foo" <\> "foobar"
""
>>> "foobar" <\> "baz"
"bar"
-}
(<\>) :: Path -> Path -> Path
p1 <\> p2 = p
where
!p = BS.drop (BS.length p2) p1
{-# INLINE (<\>) #-}
{-|
Adding suffix.
-}
(<.>) :: Path -> Path -> Path
p1 <.> p2 = p
where
!p = BS.concat [p1,pathDotBS,p2]
{-# INLINE (<.>) #-}
|
Breaking at the first path separator .
> > > breakAtSeparator " /foo / bar / baz "
( " " , " /foo / bar / baz " )
> > > breakAtSeparator " foo / bar / baz "
( " / baz " )
> > > breakAtSeparator " foo "
( " foo " , " " )
Breaking at the first path separator.
>>> breakAtSeparator "/foo/bar/baz"
("","/foo/bar/baz")
>>> breakAtSeparator "foo/bar/baz"
("foo","/bar/baz")
>>> breakAtSeparator "foo"
("foo","")
-}
breakAtSeparator :: Path -> (Path,Path)
breakAtSeparator p = BS.break (== pathSep) p
# INLINE breakAtSeparator #
isSuffixOf :: Path -> Path -> Bool
isSuffixOf = BS.isSuffixOf
# INLINE isSuffixOf #
| null | https://raw.githubusercontent.com/kazu-yamamoto/wai-app-file-cgi/2a096b810ded557bd5e0d2bfe0e1b7736d7823ab/Network/Wai/Application/Classic/Path.hs | haskell | --------------------------------------------------------------
| File path.
--------------------------------------------------------------
pathDot :: Word8
|
Checking if the path ends with the path separator.
>>> hasLeadingPathSeparator "/foo/bar"
True
>>> hasLeadingPathSeparator "foo/bar"
False
|
Checking if the path ends with the path separator.
>>> hasTrailingPathSeparator "/foo/bar/"
True
>>> hasTrailingPathSeparator "/foo/bar"
False
|
Appending with the file separator.
>>> "/foo" </> "bar"
"/foo/bar"
>>> "/foo/" </> "bar"
"/foo/bar"
>>> "/foo" </> "/bar"
"/foo/bar"
>>> "/foo/" </> "/bar"
"/foo/bar"
# INLINE (</>) #
# INLINE (<\>) #
|
Adding suffix.
# INLINE (<.>) # | # LANGUAGE OverloadedStrings , BangPatterns #
module Network.Wai.Application.Classic.Path (
Path
, pathString
, fromString
, (</>), (<\>), (<.>)
, breakAtSeparator, hasLeadingPathSeparator, hasTrailingPathSeparator
, isSuffixOf
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.String
import Data.Word
type Path = ByteString
pathString :: Path -> String
pathString = B8.unpack
# INLINE pathString #
pathDot = 46
pathDotBS :: ByteString
pathDotBS = "."
pathSep :: Word8
pathSep = 47
pathSepBS :: ByteString
pathSepBS = "/"
hasLeadingPathSeparator :: Path -> Bool
hasLeadingPathSeparator bs
| BS.null bs = False
| BS.head bs == pathSep = True
| otherwise = False
# INLINE hasLeadingPathSeparator #
hasTrailingPathSeparator :: Path -> Bool
hasTrailingPathSeparator bs
| BS.null bs = False
| BS.last bs == pathSep = True
| otherwise = False
# INLINE hasTrailingPathSeparator #
(</>) :: Path -> Path -> Path
p1 </> p2 = p
where
!has1 = hasTrailingPathSeparator p1
!has2 = hasLeadingPathSeparator p2
!p | has1 && not has2 = p1 `BS.append` p2
| not has1 && has2 = p1 `BS.append` p2
| has1 = p1 `BS.append` BS.tail p2
| otherwise = BS.concat [p1,pathSepBS,p2]
|
Removing prefix . The prefix of the second argument is removed
from the first argument .
> > > " foobar " < \ > " foo "
" bar "
> > > " foo " < \ > " foobar "
" "
> > > " foobar " < \ > " baz "
" bar "
Removing prefix. The prefix of the second argument is removed
from the first argument.
>>> "foobar" <\> "foo"
"bar"
>>> "foo" <\> "foobar"
""
>>> "foobar" <\> "baz"
"bar"
-}
(<\>) :: Path -> Path -> Path
p1 <\> p2 = p
where
!p = BS.drop (BS.length p2) p1
(<.>) :: Path -> Path -> Path
p1 <.> p2 = p
where
!p = BS.concat [p1,pathDotBS,p2]
|
Breaking at the first path separator .
> > > breakAtSeparator " /foo / bar / baz "
( " " , " /foo / bar / baz " )
> > > breakAtSeparator " foo / bar / baz "
( " / baz " )
> > > breakAtSeparator " foo "
( " foo " , " " )
Breaking at the first path separator.
>>> breakAtSeparator "/foo/bar/baz"
("","/foo/bar/baz")
>>> breakAtSeparator "foo/bar/baz"
("foo","/bar/baz")
>>> breakAtSeparator "foo"
("foo","")
-}
breakAtSeparator :: Path -> (Path,Path)
breakAtSeparator p = BS.break (== pathSep) p
# INLINE breakAtSeparator #
isSuffixOf :: Path -> Path -> Bool
isSuffixOf = BS.isSuffixOf
# INLINE isSuffixOf #
|
017894963be3a3c87c469586bf73a7f6ca7db2878809f089cce2201109d1992a | dgiot/dgiot | emqx_connection.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2018 - 2022 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% MQTT/TCP|TLS Connection
-module(emqx_connection).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-logger_header("[MQTT]").
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
%% API
-export([ start_link/3
, stop/1
]).
-export([ info/1
, stats/1
]).
-export([ async_set_keepalive/3
, async_set_keepalive/4
, async_set_socket_options/2
]).
-export([ call/2
, call/3
, cast/2
]).
%% Callback
-export([init/4]).
Sys callbacks
-export([ system_continue/3
, system_terminate/4
, system_code_change/4
, system_get_state/1
]).
%% Internal callback
-export([wakeup_from_hib/2, recvloop/2, get_state/1]).
%% Export for CT
-export([set_field/3]).
-import(emqx_misc,
[ maybe_apply/2
, start_timer/2
]).
-record(state, {
TCP / TLS Transport
transport :: esockd:transport(),
%% TCP/TLS Socket
socket :: esockd:socket(),
%% Peername of the connection
peername :: emqx_types:peername(),
of the connection
sockname :: emqx_types:peername(),
%% Sock State
sockstate :: emqx_types:sockstate(),
%% The {active, N} option
active_n :: pos_integer(),
Limiter
limiter :: maybe(emqx_limiter:limiter()),
%% Limit Timer
limit_timer :: maybe(reference()),
Parse State
parse_state :: emqx_frame:parse_state(),
Serialize options
serialize :: emqx_frame:serialize_opts(),
Channel State
channel :: emqx_channel:channel(),
GC State
gc_state :: maybe(emqx_gc:gc_state()),
%% Stats Timer
stats_timer :: disabled | maybe(reference()),
%% Idle Timeout
idle_timeout :: integer(),
%% Idle Timer
idle_timer :: maybe(reference())
}).
-type(state() :: #state{}).
-define(ACTIVE_N, 100).
-define(ENABLED(X), (X =/= undefined)).
-define(ALARM_TCP_CONGEST(Channel),
list_to_binary(io_lib:format("mqtt_conn/congested/~s/~s",
[emqx_channel:info(clientid, Channel),
emqx_channel:info(username, Channel)]))).
-define(INFO_KEYS,
[ socktype
, peername
, sockname
, sockstate
, active_n
]).
-define(SOCK_STATS,
[ recv_oct
, recv_cnt
, send_oct
, send_cnt
, send_pend
]).
-define(ALARM_CONN_INFO_KEYS,
[ socktype
, sockname
, peername
, clientid
, username
, proto_name
, proto_ver
, connected_at
]).
-define(ALARM_SOCK_STATS_KEYS,
[ send_pend
, recv_cnt
, recv_oct
, send_cnt
, send_oct
]).
-define(ALARM_SOCK_OPTS_KEYS,
[ high_watermark
, high_msgq_watermark
, sndbuf
, recbuf
, buffer
]).
-dialyzer({no_match, [info/2]}).
-spec(start_link(esockd:transport(), esockd:socket(), proplists:proplist())
-> {ok, pid()}).
start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options],
CPid = proc_lib:spawn_link(?MODULE, init, Args),
{ok, CPid}.
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
%% @doc Get infos of the connection/channel.
-spec(info(pid() | state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) ->
call(CPid, info);
info(State = #state{channel = Channel}) ->
ChanInfo = emqx_channel:info(Channel),
SockInfo = maps:from_list(
info(?INFO_KEYS, State)),
ChanInfo#{sockinfo => SockInfo}.
info(Keys, State) when is_list(Keys) ->
[{Key, info(Key, State)} || Key <- Keys];
info(socktype, #state{transport = Transport, socket = Socket}) ->
Transport:type(Socket);
info(peername, #state{peername = Peername}) ->
Peername;
info(sockname, #state{sockname = Sockname}) ->
Sockname;
info(sockstate, #state{sockstate = SockSt}) ->
SockSt;
info(active_n, #state{active_n = ActiveN}) ->
ActiveN;
info(stats_timer, #state{stats_timer = StatsTimer}) ->
StatsTimer;
info(limit_timer, #state{limit_timer = LimitTimer}) ->
LimitTimer;
info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter).
%% @doc Get stats of the connection/channel.
-spec(stats(pid() | state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) ->
call(CPid, stats);
stats(#state{transport = Transport,
socket = Socket,
channel = Channel}) ->
SockStats = case Transport:getstat(Socket, ?SOCK_STATS) of
{ok, Ss} -> Ss;
{error, _} -> []
end,
ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(),
lists:append([SockStats, ChanStats, ProcStats]).
%% @doc Set TCP keepalive socket options to override system defaults.
Idle : The number of seconds a connection needs to be idle before
TCP begins sending out keep - alive probes ( Linux default 7200 ) .
%% Interval: The number of seconds between TCP keep-alive probes
( Linux default 75 ) .
%% Probes: The maximum number of TCP keep-alive probes to send before
%% giving up and killing the connection if no response is
obtained from the other end ( Linux default 9 ) .
%%
%% NOTE: This API sets TCP socket options, which has nothing to do with
%% the MQTT layer's keepalive (PINGREQ and PINGRESP).
async_set_keepalive(Idle, Interval, Probes) ->
async_set_keepalive(self(), Idle, Interval, Probes).
async_set_keepalive(Pid, Idle, Interval, Probes) ->
Options = [ {keepalive, true}
, {raw, 6, 4, <<Idle:32/native>>}
, {raw, 6, 5, <<Interval:32/native>>}
, {raw, 6, 6, <<Probes:32/native>>}
],
async_set_socket_options(Pid, Options).
%% @doc Set custom socket options.
%% This API is made async because the call might be originated from
%% a hookpoint callback (otherwise deadlock).
%% If failed to set, the error message is logged.
async_set_socket_options(Pid, Options) ->
cast(Pid, {async_set_socket_options, Options}).
cast(Pid, Req) ->
gen_server:cast(Pid, Req).
call(Pid, Req) ->
call(Pid, Req, infinity).
call(Pid, Req, Timeout) ->
gen_server:call(Pid, Req, Timeout).
stop(Pid) ->
gen_server:stop(Pid).
%%--------------------------------------------------------------------
%% callbacks
%%--------------------------------------------------------------------
init(Parent, Transport, RawSocket, Options) ->
case Transport:wait(RawSocket) of
{ok, Socket} ->
run_loop(Parent, init_state(Transport, Socket, Options));
{error, Reason} ->
ok = Transport:fast_close(RawSocket),
exit_on_sock_error(Reason)
end.
init_state(Transport, Socket, Options) ->
{ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]),
{ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]),
Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]),
ConnInfo = #{socktype => Transport:type(Socket),
peername => Peername,
sockname => Sockname,
peercert => Peercert,
conn_mod => ?MODULE
},
Zone = proplists:get_value(zone, Options),
ActiveN = proplists:get_value(active_n, Options, ?ACTIVE_N),
PubLimit = emqx_zone:publish_limit(Zone),
BytesIn = proplists:get_value(rate_limit, Options),
RateLimit = emqx_zone:ratelimit(Zone),
Limiter = emqx_limiter:init(Zone, PubLimit, BytesIn, RateLimit),
FrameOpts = emqx_zone:mqtt_frame_options(Zone),
ParseState = emqx_frame:initial_parse_state(FrameOpts),
Serialize = emqx_frame:serialize_opts(),
Channel = emqx_channel:init(ConnInfo, Options),
GcState = emqx_zone:init_gc_state(Zone),
StatsTimer = emqx_zone:stats_timer(Zone),
IdleTimeout = emqx_zone:idle_timeout(Zone),
IdleTimer = start_timer(IdleTimeout, idle_timeout),
#state{transport = Transport,
socket = Socket,
peername = Peername,
sockname = Sockname,
sockstate = idle,
active_n = ActiveN,
limiter = Limiter,
parse_state = ParseState,
serialize = Serialize,
channel = Channel,
gc_state = GcState,
stats_timer = StatsTimer,
idle_timeout = IdleTimeout,
idle_timer = IdleTimer
}.
run_loop(Parent, State = #state{transport = Transport,
socket = Socket,
peername = Peername,
channel = Channel}) ->
emqx_logger:set_metadata_peername(esockd:format(Peername)),
_ = emqx_misc:tune_heap_size(emqx_zone:oom_policy(
emqx_channel:info(zone, Channel))),
case activate_socket(State) of
{ok, NState} -> hibernate(Parent, NState);
{error, Reason} ->
ok = Transport:fast_close(Socket),
exit_on_sock_error(Reason)
end.
-spec exit_on_sock_error(any()) -> no_return().
exit_on_sock_error(Reason) when Reason =:= einval;
Reason =:= enotconn;
Reason =:= closed ->
erlang:exit(normal);
exit_on_sock_error(timeout) ->
erlang:exit({shutdown, ssl_upgrade_timeout});
exit_on_sock_error(Reason) ->
erlang:exit({shutdown, Reason}).
%%--------------------------------------------------------------------
Recv Loop
recvloop(Parent, State = #state{idle_timeout = IdleTimeout}) ->
receive
Msg ->
handle_recv(Msg, Parent, State)
after
IdleTimeout + 100 ->
hibernate(Parent, cancel_stats_timer(State))
end.
handle_recv({system, From, Request}, Parent, State) ->
sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State);
handle_recv({'EXIT', Parent, Reason}, Parent, State) ->
FIXME : it 's not trapping exit , should never receive an EXIT
terminate(Reason, State);
handle_recv(Msg, Parent, State = #state{idle_timeout = IdleTimeout}) ->
case process_msg([Msg], ensure_stats_timer(IdleTimeout, State)) of
{ok, NewState} ->
?MODULE:recvloop(Parent, NewState);
{stop, Reason, NewSate} ->
terminate(Reason, NewSate)
end.
hibernate(Parent, State) ->
proc_lib:hibernate(?MODULE, wakeup_from_hib, [Parent, State]).
%% Maybe do something here later.
wakeup_from_hib(Parent, State) ->
?MODULE:recvloop(Parent, State).
%%--------------------------------------------------------------------
%% Ensure/cancel stats timer
-compile({inline, [ensure_stats_timer/2]}).
ensure_stats_timer(Timeout, State = #state{stats_timer = undefined}) ->
State#state{stats_timer = start_timer(Timeout, emit_stats)};
ensure_stats_timer(_Timeout, State) -> State.
-compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef),
State#state{stats_timer = undefined};
cancel_stats_timer(State) -> State.
%%--------------------------------------------------------------------
Process next Msg
process_msg([], State) ->
{ok, State};
process_msg([Msg | More], State) ->
try
case handle_msg(Msg, State) of
ok ->
process_msg(More, State);
{ok, NState} ->
process_msg(More, NState);
{ok, Msgs, NState} ->
process_msg(append_msg(More, Msgs), NState);
{stop, Reason, NState} ->
{stop, Reason, NState}
end
catch
exit : normal ->
{stop, normal, State};
exit : shutdown ->
{stop, shutdown, State};
exit : {shutdown, _} = Shutdown ->
{stop, Shutdown, State};
Exception : Context : Stack ->
{stop, #{exception => Exception,
context => Context,
stacktrace => Stack}, State}
end.
-compile({inline, [append_msg/2]}).
append_msg([], Msgs) when is_list(Msgs) ->
Msgs;
append_msg([], Msg) -> [Msg];
append_msg(Q, Msgs) when is_list(Msgs) ->
lists:append(Q, Msgs);
append_msg(Q, Msg) ->
lists:append(Q, [Msg]).
%%--------------------------------------------------------------------
%% Handle a Msg
handle_msg({'$gen_call', From, Req}, State) ->
case handle_call(From, Req, State) of
{reply, Reply, NState} ->
gen_server:reply(From, Reply),
{ok, NState};
{stop, Reason, Reply, NState} ->
gen_server:reply(From, Reply),
stop(Reason, NState)
end;
handle_msg({'$gen_cast', Req}, State) ->
NewState = handle_cast(Req, State),
{ok, NewState};
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
?LOG(debug, "RECV ~0p", [Data]),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
parse_incoming(Data, State);
handle_msg({incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
State = #state{idle_timer = IdleTimer}) ->
ok = emqx_misc:cancel_timer(IdleTimer),
Serialize = emqx_frame:serialize_opts(ConnPkt),
NState = State#state{serialize = Serialize,
idle_timer = undefined
},
handle_incoming(Packet, NState);
handle_msg({incoming, Packet}, State) ->
handle_incoming(Packet, State);
handle_msg({outgoing, Packets}, State) ->
handle_outgoing(Packets, State);
handle_msg({Error, _Sock, Reason}, State)
when Error == tcp_error; Error == ssl_error ->
handle_info({sock_error, Reason}, State);
handle_msg({Closed, _Sock}, State)
when Closed == tcp_closed; Closed == ssl_closed ->
handle_info({sock_closed, Closed}, close_socket(State));
handle_msg({Passive, _Sock}, State)
when Passive == tcp_passive; Passive == ssl_passive ->
%% In Stats
Pubs = emqx_pd:reset_counter(incoming_pubs),
Bytes = emqx_pd:reset_counter(incoming_bytes),
InStats = #{cnt => Pubs, oct => Bytes},
%% Ensure Rate Limit
NState = ensure_rate_limit(InStats, State),
Run GC and Check OOM
NState1 = check_oom(run_gc(InStats, NState)),
handle_info(activate_socket, NState1);
handle_msg(Deliver = {deliver, _Topic, _Msg},
#state{active_n = ActiveN} = State) ->
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State);
%% Something sent
handle_msg({inet_reply, _Sock, ok}, State = #state{active_n = ActiveN}) ->
case emqx_pd:get_counter(outgoing_pubs) > ActiveN of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
OutStats = #{cnt => Pubs, oct => Bytes},
{ok, check_oom(run_gc(OutStats, State))};
false -> ok
end;
handle_msg({inet_reply, _Sock, {error, Reason}}, State) ->
handle_info({sock_error, Reason}, State);
handle_msg({connack, ConnAck}, State) ->
handle_outgoing(ConnAck, State);
handle_msg({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
handle_info({sock_closed, Reason}, close_socket(State));
handle_msg({event, connected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:insert_channel_info(ClientId, info(State), stats(State));
handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
{ok, State};
handle_msg({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State};
handle_msg({timeout, TRef, TMsg}, State) ->
handle_timeout(TRef, TMsg, State);
handle_msg(Shutdown = {shutdown, _Reason}, State) ->
stop(Shutdown, State);
handle_msg(Msg, State) ->
handle_info(Msg, State).
%%--------------------------------------------------------------------
%% Terminate
-spec terminate(any(), state()) -> no_return().
terminate(Reason, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
try
Channel1 = emqx_channel:set_conn_state(disconnected, Channel),
emqx_congestion:cancel_alarms(Socket, Transport, Channel1),
emqx_channel:terminate(Reason, Channel1),
close_socket_ok(State)
catch
E : C : S ->
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end,
?tp(info, terminate, #{reason => Reason}),
maybe_raise_exception(Reason).
%% close socket, discard new state, always return ok.
close_socket_ok(State) ->
_ = close_socket(State),
ok.
%% tell truth about the original exception
maybe_raise_exception(#{exception := Exception,
context := Context,
stacktrace := Stacktrace
}) ->
erlang:raise(Exception, Context, Stacktrace);
maybe_raise_exception(Reason) ->
exit(Reason).
%%--------------------------------------------------------------------
Sys callbacks
system_continue(Parent, _Debug, State) ->
?MODULE:recvloop(Parent, State).
system_terminate(Reason, _Parent, _Debug, State) ->
terminate(Reason, State).
system_code_change(State, _Mod, _OldVsn, _Extra) ->
{ok, State}.
system_get_state(State) -> {ok, State}.
%%--------------------------------------------------------------------
%% Handle call
handle_call(_From, info, State) ->
{reply, info(State), State};
handle_call(_From, stats, State) ->
{reply, stats(State), State};
handle_call(_From, {ratelimit, Policy}, State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
Limiter = emqx_limiter:init(Zone, Policy),
{reply, ok, State#state{limiter = Limiter}};
handle_call(_From, Req, State = #state{channel = Channel}) ->
case emqx_channel:handle_call(Req, Channel) of
{reply, Reply, NChannel} ->
{reply, Reply, State#state{channel = NChannel}};
{shutdown, Reason, Reply, NChannel} ->
shutdown(Reason, Reply, State#state{channel = NChannel});
{shutdown, Reason, Reply, OutPacket, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(OutPacket, NState),
shutdown(Reason, Reply, NState)
end.
%%--------------------------------------------------------------------
%% Handle timeout
handle_timeout(_TRef, idle_timeout, State) ->
shutdown(idle_timeout, State);
handle_timeout(_TRef, limit_timeout, State) ->
NState = State#state{sockstate = idle,
limit_timer = undefined
},
handle_info(activate_socket, NState);
handle_timeout(_TRef, emit_stats, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State#state{stats_timer = undefined}};
handle_timeout(TRef, keepalive, State = #state{transport = Transport,
socket = Socket,
channel = Channel})->
case emqx_channel:info(conn_state, Channel) of
disconnected -> {ok, State};
_ ->
case Transport:getstat(Socket, [recv_oct]) of
{ok, [{recv_oct, RecvOct}]} ->
handle_timeout(TRef, {keepalive, RecvOct}, State);
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end
end;
handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State).
%%--------------------------------------------------------------------
Parse incoming data
-compile({inline, [parse_incoming/2]}).
parse_incoming(Data, State) ->
{Packets, NState} = parse_incoming(Data, [], State),
{ok, next_incoming_msgs(Packets), NState}.
parse_incoming(<<>>, Packets, State) ->
{Packets, State};
parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
try emqx_frame:parse(Data, ParseState) of
{more, NParseState} ->
{Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet | Packets], NState)
catch
error:proxy_protocol_config_disabled:_Stk ->
?LOG(error,
"~nMalformed packet, "
"please check proxy_protocol config for specific listeners and zones~n"),
{[{frame_error, proxy_protocol_config_disabled} | Packets], State};
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
[Reason, Stk, Data]),
{[{frame_error, Reason} | Packets], State}
end.
-compile({inline, [next_incoming_msgs/1]}).
next_incoming_msgs([Packet]) ->
{incoming, Packet};
next_incoming_msgs(Packets) ->
[{incoming, Packet} || Packet <- lists:reverse(Packets)].
%%--------------------------------------------------------------------
%% Handle incoming packet
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
with_channel(handle_in, [FrameError], State).
%%--------------------------------------------------------------------
%% With Channel
with_channel(Fun, Args, State = #state{channel = Channel}) ->
case erlang:apply(emqx_channel, Fun, Args ++ [Channel]) of
ok -> {ok, State};
{ok, NChannel} ->
{ok, State#state{channel = NChannel}};
{ok, Replies, NChannel} ->
{ok, next_msgs(Replies), State#state{channel = NChannel}};
{shutdown, Reason, NChannel} ->
shutdown(Reason, State#state{channel = NChannel});
{shutdown, Reason, Packet, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(Packet, NState),
shutdown(Reason, NState)
end.
%%--------------------------------------------------------------------
%% Handle outgoing packets
handle_outgoing(Packets, State) when is_list(Packets) ->
send(lists:map(serialize_and_inc_stats_fun(State), Packets), State);
handle_outgoing(Packet, State) ->
send((serialize_and_inc_stats_fun(State))(Packet), State).
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large!",
[emqx_packet:format(Packet)]),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
ok = inc_outgoing_stats({error, message_too_large}),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
ok = inc_outgoing_stats(Packet),
Data
end
end.
%%--------------------------------------------------------------------
%% Send data
-spec(send(iodata(), state()) -> ok).
send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ->
Oct = iolist_size(IoData),
ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, []) of
ok -> ok;
Error = {error, _Reason} ->
%% Send an inet_reply to postpone handling the error
self() ! {inet_reply, Socket, Error},
ok
end.
%%--------------------------------------------------------------------
%% Handle Info
handle_info(activate_socket, State = #state{sockstate = OldSst}) ->
case activate_socket(State) of
{ok, NState = #state{sockstate = NewSst}} ->
case OldSst =/= NewSst of
true -> {ok, {event, NewSst}, NState};
false -> {ok, NState}
end;
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end;
handle_info({sock_error, Reason}, State) ->
case Reason =/= closed andalso Reason =/= einval of
true -> ?LOG(warning, "socket_error: ~p", [Reason]);
false -> ok
end,
handle_info({sock_closed, Reason}, close_socket(State));
handle_info(Info, State) ->
with_channel(handle_info, [Info], State).
%%--------------------------------------------------------------------
%% Handle Info
handle_cast({async_set_socket_options, Opts},
State = #state{transport = Transport,
socket = Socket
}) ->
case Transport:setopts(Socket, Opts) of
ok -> ?tp(info, "custom_socket_options_successfully", #{opts => Opts});
Err -> ?tp(error, "failed_to_set_custom_socket_optionn", #{reason => Err})
end,
State;
handle_cast(Req, State) ->
?tp(error, "received_unknown_cast", #{cast => Req}),
State.
%%--------------------------------------------------------------------
%% Ensure rate limit
ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
case ?ENABLED(Limiter) andalso emqx_limiter:check(Stats, Limiter) of
false -> State;
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(notice, "Pause ~pms due to rate limit", [Time]),
TRef = start_timer(Time, limit_timeout),
State#state{sockstate = blocked,
limiter = Limiter1,
limit_timer = TRef
}
end.
%%--------------------------------------------------------------------
Run GC and Check OOM
run_gc(Stats, State = #state{gc_state = GcSt}) ->
case ?ENABLED(GcSt) andalso emqx_gc:run(Stats, GcSt) of
false -> State;
{_IsGC, GcSt1} ->
State#state{gc_state = GcSt1}
end.
check_oom(State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
OomPolicy = emqx_zone:oom_policy(Zone),
?tp(debug, check_oom, #{policy => OomPolicy}),
case ?ENABLED(OomPolicy) andalso emqx_misc:check_oom(OomPolicy) of
{shutdown, Reason} ->
%% triggers terminate/2 callback immediately
erlang:exit({shutdown, Reason});
_Other ->
ok
end,
State.
%%--------------------------------------------------------------------
Activate Socket
-compile({inline, [activate_socket/1]}).
activate_socket(State = #state{sockstate = closed}) ->
{ok, State};
activate_socket(State = #state{sockstate = blocked}) ->
{ok, State};
activate_socket(State = #state{transport = Transport,
socket = Socket,
active_n = N}) ->
case Transport:setopts(Socket, [{active, N}]) of
ok -> {ok, State#state{sockstate = running}};
Error -> Error
end.
%%--------------------------------------------------------------------
%% Close Socket
close_socket(State = #state{sockstate = closed}) -> State;
close_socket(State = #state{transport = Transport, socket = Socket}) ->
ok = Transport:fast_close(Socket),
State#state{sockstate = closed}.
%%--------------------------------------------------------------------
%% Inc incoming/outgoing stats
-compile({inline, [inc_incoming_stats/1]}).
inc_incoming_stats(Packet = ?PACKET(Type)) ->
inc_counter(recv_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(recv_msg, 1),
inc_qos_stats(recv_msg, Packet),
inc_counter(incoming_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_recv(Packet).
-compile({inline, [inc_outgoing_stats/1]}).
inc_outgoing_stats({error, message_too_large}) ->
inc_counter('send_msg.dropped', 1),
inc_counter('send_msg.dropped.too_large', 1);
inc_outgoing_stats(Packet = ?PACKET(Type)) ->
inc_counter(send_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(send_msg, 1),
inc_counter(outgoing_pubs, 1),
inc_qos_stats(send_msg, Packet);
false ->
ok
end,
emqx_metrics:inc_sent(Packet).
inc_qos_stats(Type, #mqtt_packet{header = #mqtt_packet_header{qos = QoS}}) when ?IS_QOS(QoS) ->
inc_counter(inc_qos_stats_key(Type, QoS), 1);
inc_qos_stats(_, _) -> ok.
inc_qos_stats_key(send_msg, ?QOS_0) -> 'send_msg.qos0';
inc_qos_stats_key(send_msg, ?QOS_1) -> 'send_msg.qos1';
inc_qos_stats_key(send_msg, ?QOS_2) -> 'send_msg.qos2';
inc_qos_stats_key(recv_msg, ?QOS_0) -> 'recv_msg.qos0';
inc_qos_stats_key(recv_msg, ?QOS_1) -> 'recv_msg.qos1';
inc_qos_stats_key(recv_msg, ?QOS_2) -> 'recv_msg.qos2'.
%%--------------------------------------------------------------------
%% Helper functions
-compile({inline, [next_msgs/1]}).
next_msgs(Packet) when is_record(Packet, mqtt_packet) ->
{outgoing, Packet};
next_msgs(Event) when is_tuple(Event) ->
Event;
next_msgs(More) when is_list(More) ->
More.
-compile({inline, [shutdown/2, shutdown/3]}).
shutdown(Reason, State) ->
stop({shutdown, Reason}, State).
shutdown(Reason, Reply, State) ->
stop({shutdown, Reason}, Reply, State).
-compile({inline, [stop/2, stop/3]}).
stop(Reason, State) ->
{stop, Reason, State}.
stop(Reason, Reply, State) ->
{stop, Reason, Reply, State}.
inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc),
ok.
%%--------------------------------------------------------------------
%% For CT tests
%%--------------------------------------------------------------------
set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
setelement(Pos+1, State, Value).
get_state(Pid) ->
State = sys:get_state(Pid),
maps:from_list(lists:zip(record_info(fields, state),
tl(tuple_to_list(State)))).
| null | https://raw.githubusercontent.com/dgiot/dgiot/c601555e45f38d02aafc308b18a9e28c543b6f2c/src/emqx_connection.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
MQTT/TCP|TLS Connection
API
Callback
Internal callback
Export for CT
TCP/TLS Socket
Peername of the connection
Sock State
The {active, N} option
Limit Timer
Stats Timer
Idle Timeout
Idle Timer
--------------------------------------------------------------------
API
--------------------------------------------------------------------
@doc Get infos of the connection/channel.
@doc Get stats of the connection/channel.
@doc Set TCP keepalive socket options to override system defaults.
Interval: The number of seconds between TCP keep-alive probes
Probes: The maximum number of TCP keep-alive probes to send before
giving up and killing the connection if no response is
NOTE: This API sets TCP socket options, which has nothing to do with
the MQTT layer's keepalive (PINGREQ and PINGRESP).
@doc Set custom socket options.
This API is made async because the call might be originated from
a hookpoint callback (otherwise deadlock).
If failed to set, the error message is logged.
--------------------------------------------------------------------
callbacks
--------------------------------------------------------------------
--------------------------------------------------------------------
Maybe do something here later.
--------------------------------------------------------------------
Ensure/cancel stats timer
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle a Msg
In Stats
Ensure Rate Limit
Something sent
--------------------------------------------------------------------
Terminate
close socket, discard new state, always return ok.
tell truth about the original exception
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle call
--------------------------------------------------------------------
Handle timeout
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle incoming packet
--------------------------------------------------------------------
With Channel
--------------------------------------------------------------------
Handle outgoing packets
--------------------------------------------------------------------
Send data
Send an inet_reply to postpone handling the error
--------------------------------------------------------------------
Handle Info
--------------------------------------------------------------------
Handle Info
--------------------------------------------------------------------
Ensure rate limit
--------------------------------------------------------------------
triggers terminate/2 callback immediately
--------------------------------------------------------------------
--------------------------------------------------------------------
Close Socket
--------------------------------------------------------------------
Inc incoming/outgoing stats
--------------------------------------------------------------------
Helper functions
--------------------------------------------------------------------
For CT tests
-------------------------------------------------------------------- | Copyright ( c ) 2018 - 2022 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_connection).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-logger_header("[MQTT]").
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
-export([ start_link/3
, stop/1
]).
-export([ info/1
, stats/1
]).
-export([ async_set_keepalive/3
, async_set_keepalive/4
, async_set_socket_options/2
]).
-export([ call/2
, call/3
, cast/2
]).
-export([init/4]).
Sys callbacks
-export([ system_continue/3
, system_terminate/4
, system_code_change/4
, system_get_state/1
]).
-export([wakeup_from_hib/2, recvloop/2, get_state/1]).
-export([set_field/3]).
-import(emqx_misc,
[ maybe_apply/2
, start_timer/2
]).
-record(state, {
TCP / TLS Transport
transport :: esockd:transport(),
socket :: esockd:socket(),
peername :: emqx_types:peername(),
of the connection
sockname :: emqx_types:peername(),
sockstate :: emqx_types:sockstate(),
active_n :: pos_integer(),
Limiter
limiter :: maybe(emqx_limiter:limiter()),
limit_timer :: maybe(reference()),
Parse State
parse_state :: emqx_frame:parse_state(),
Serialize options
serialize :: emqx_frame:serialize_opts(),
Channel State
channel :: emqx_channel:channel(),
GC State
gc_state :: maybe(emqx_gc:gc_state()),
stats_timer :: disabled | maybe(reference()),
idle_timeout :: integer(),
idle_timer :: maybe(reference())
}).
-type(state() :: #state{}).
-define(ACTIVE_N, 100).
-define(ENABLED(X), (X =/= undefined)).
-define(ALARM_TCP_CONGEST(Channel),
list_to_binary(io_lib:format("mqtt_conn/congested/~s/~s",
[emqx_channel:info(clientid, Channel),
emqx_channel:info(username, Channel)]))).
-define(INFO_KEYS,
[ socktype
, peername
, sockname
, sockstate
, active_n
]).
-define(SOCK_STATS,
[ recv_oct
, recv_cnt
, send_oct
, send_cnt
, send_pend
]).
-define(ALARM_CONN_INFO_KEYS,
[ socktype
, sockname
, peername
, clientid
, username
, proto_name
, proto_ver
, connected_at
]).
-define(ALARM_SOCK_STATS_KEYS,
[ send_pend
, recv_cnt
, recv_oct
, send_cnt
, send_oct
]).
-define(ALARM_SOCK_OPTS_KEYS,
[ high_watermark
, high_msgq_watermark
, sndbuf
, recbuf
, buffer
]).
-dialyzer({no_match, [info/2]}).
-spec(start_link(esockd:transport(), esockd:socket(), proplists:proplist())
-> {ok, pid()}).
start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options],
CPid = proc_lib:spawn_link(?MODULE, init, Args),
{ok, CPid}.
-spec(info(pid() | state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) ->
call(CPid, info);
info(State = #state{channel = Channel}) ->
ChanInfo = emqx_channel:info(Channel),
SockInfo = maps:from_list(
info(?INFO_KEYS, State)),
ChanInfo#{sockinfo => SockInfo}.
info(Keys, State) when is_list(Keys) ->
[{Key, info(Key, State)} || Key <- Keys];
info(socktype, #state{transport = Transport, socket = Socket}) ->
Transport:type(Socket);
info(peername, #state{peername = Peername}) ->
Peername;
info(sockname, #state{sockname = Sockname}) ->
Sockname;
info(sockstate, #state{sockstate = SockSt}) ->
SockSt;
info(active_n, #state{active_n = ActiveN}) ->
ActiveN;
info(stats_timer, #state{stats_timer = StatsTimer}) ->
StatsTimer;
info(limit_timer, #state{limit_timer = LimitTimer}) ->
LimitTimer;
info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter).
-spec(stats(pid() | state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) ->
call(CPid, stats);
stats(#state{transport = Transport,
socket = Socket,
channel = Channel}) ->
SockStats = case Transport:getstat(Socket, ?SOCK_STATS) of
{ok, Ss} -> Ss;
{error, _} -> []
end,
ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(),
lists:append([SockStats, ChanStats, ProcStats]).
Idle : The number of seconds a connection needs to be idle before
TCP begins sending out keep - alive probes ( Linux default 7200 ) .
( Linux default 75 ) .
obtained from the other end ( Linux default 9 ) .
async_set_keepalive(Idle, Interval, Probes) ->
async_set_keepalive(self(), Idle, Interval, Probes).
async_set_keepalive(Pid, Idle, Interval, Probes) ->
Options = [ {keepalive, true}
, {raw, 6, 4, <<Idle:32/native>>}
, {raw, 6, 5, <<Interval:32/native>>}
, {raw, 6, 6, <<Probes:32/native>>}
],
async_set_socket_options(Pid, Options).
async_set_socket_options(Pid, Options) ->
cast(Pid, {async_set_socket_options, Options}).
cast(Pid, Req) ->
gen_server:cast(Pid, Req).
call(Pid, Req) ->
call(Pid, Req, infinity).
call(Pid, Req, Timeout) ->
gen_server:call(Pid, Req, Timeout).
stop(Pid) ->
gen_server:stop(Pid).
init(Parent, Transport, RawSocket, Options) ->
case Transport:wait(RawSocket) of
{ok, Socket} ->
run_loop(Parent, init_state(Transport, Socket, Options));
{error, Reason} ->
ok = Transport:fast_close(RawSocket),
exit_on_sock_error(Reason)
end.
init_state(Transport, Socket, Options) ->
{ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]),
{ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]),
Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]),
ConnInfo = #{socktype => Transport:type(Socket),
peername => Peername,
sockname => Sockname,
peercert => Peercert,
conn_mod => ?MODULE
},
Zone = proplists:get_value(zone, Options),
ActiveN = proplists:get_value(active_n, Options, ?ACTIVE_N),
PubLimit = emqx_zone:publish_limit(Zone),
BytesIn = proplists:get_value(rate_limit, Options),
RateLimit = emqx_zone:ratelimit(Zone),
Limiter = emqx_limiter:init(Zone, PubLimit, BytesIn, RateLimit),
FrameOpts = emqx_zone:mqtt_frame_options(Zone),
ParseState = emqx_frame:initial_parse_state(FrameOpts),
Serialize = emqx_frame:serialize_opts(),
Channel = emqx_channel:init(ConnInfo, Options),
GcState = emqx_zone:init_gc_state(Zone),
StatsTimer = emqx_zone:stats_timer(Zone),
IdleTimeout = emqx_zone:idle_timeout(Zone),
IdleTimer = start_timer(IdleTimeout, idle_timeout),
#state{transport = Transport,
socket = Socket,
peername = Peername,
sockname = Sockname,
sockstate = idle,
active_n = ActiveN,
limiter = Limiter,
parse_state = ParseState,
serialize = Serialize,
channel = Channel,
gc_state = GcState,
stats_timer = StatsTimer,
idle_timeout = IdleTimeout,
idle_timer = IdleTimer
}.
run_loop(Parent, State = #state{transport = Transport,
socket = Socket,
peername = Peername,
channel = Channel}) ->
emqx_logger:set_metadata_peername(esockd:format(Peername)),
_ = emqx_misc:tune_heap_size(emqx_zone:oom_policy(
emqx_channel:info(zone, Channel))),
case activate_socket(State) of
{ok, NState} -> hibernate(Parent, NState);
{error, Reason} ->
ok = Transport:fast_close(Socket),
exit_on_sock_error(Reason)
end.
-spec exit_on_sock_error(any()) -> no_return().
exit_on_sock_error(Reason) when Reason =:= einval;
Reason =:= enotconn;
Reason =:= closed ->
erlang:exit(normal);
exit_on_sock_error(timeout) ->
erlang:exit({shutdown, ssl_upgrade_timeout});
exit_on_sock_error(Reason) ->
erlang:exit({shutdown, Reason}).
Recv Loop
recvloop(Parent, State = #state{idle_timeout = IdleTimeout}) ->
receive
Msg ->
handle_recv(Msg, Parent, State)
after
IdleTimeout + 100 ->
hibernate(Parent, cancel_stats_timer(State))
end.
handle_recv({system, From, Request}, Parent, State) ->
sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State);
handle_recv({'EXIT', Parent, Reason}, Parent, State) ->
FIXME : it 's not trapping exit , should never receive an EXIT
terminate(Reason, State);
handle_recv(Msg, Parent, State = #state{idle_timeout = IdleTimeout}) ->
case process_msg([Msg], ensure_stats_timer(IdleTimeout, State)) of
{ok, NewState} ->
?MODULE:recvloop(Parent, NewState);
{stop, Reason, NewSate} ->
terminate(Reason, NewSate)
end.
hibernate(Parent, State) ->
proc_lib:hibernate(?MODULE, wakeup_from_hib, [Parent, State]).
wakeup_from_hib(Parent, State) ->
?MODULE:recvloop(Parent, State).
-compile({inline, [ensure_stats_timer/2]}).
ensure_stats_timer(Timeout, State = #state{stats_timer = undefined}) ->
State#state{stats_timer = start_timer(Timeout, emit_stats)};
ensure_stats_timer(_Timeout, State) -> State.
-compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef),
State#state{stats_timer = undefined};
cancel_stats_timer(State) -> State.
Process next Msg
process_msg([], State) ->
{ok, State};
process_msg([Msg | More], State) ->
try
case handle_msg(Msg, State) of
ok ->
process_msg(More, State);
{ok, NState} ->
process_msg(More, NState);
{ok, Msgs, NState} ->
process_msg(append_msg(More, Msgs), NState);
{stop, Reason, NState} ->
{stop, Reason, NState}
end
catch
exit : normal ->
{stop, normal, State};
exit : shutdown ->
{stop, shutdown, State};
exit : {shutdown, _} = Shutdown ->
{stop, Shutdown, State};
Exception : Context : Stack ->
{stop, #{exception => Exception,
context => Context,
stacktrace => Stack}, State}
end.
-compile({inline, [append_msg/2]}).
append_msg([], Msgs) when is_list(Msgs) ->
Msgs;
append_msg([], Msg) -> [Msg];
append_msg(Q, Msgs) when is_list(Msgs) ->
lists:append(Q, Msgs);
append_msg(Q, Msg) ->
lists:append(Q, [Msg]).
handle_msg({'$gen_call', From, Req}, State) ->
case handle_call(From, Req, State) of
{reply, Reply, NState} ->
gen_server:reply(From, Reply),
{ok, NState};
{stop, Reason, Reply, NState} ->
gen_server:reply(From, Reply),
stop(Reason, NState)
end;
handle_msg({'$gen_cast', Req}, State) ->
NewState = handle_cast(Req, State),
{ok, NewState};
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
?LOG(debug, "RECV ~0p", [Data]),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
parse_incoming(Data, State);
handle_msg({incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
State = #state{idle_timer = IdleTimer}) ->
ok = emqx_misc:cancel_timer(IdleTimer),
Serialize = emqx_frame:serialize_opts(ConnPkt),
NState = State#state{serialize = Serialize,
idle_timer = undefined
},
handle_incoming(Packet, NState);
handle_msg({incoming, Packet}, State) ->
handle_incoming(Packet, State);
handle_msg({outgoing, Packets}, State) ->
handle_outgoing(Packets, State);
handle_msg({Error, _Sock, Reason}, State)
when Error == tcp_error; Error == ssl_error ->
handle_info({sock_error, Reason}, State);
handle_msg({Closed, _Sock}, State)
when Closed == tcp_closed; Closed == ssl_closed ->
handle_info({sock_closed, Closed}, close_socket(State));
handle_msg({Passive, _Sock}, State)
when Passive == tcp_passive; Passive == ssl_passive ->
Pubs = emqx_pd:reset_counter(incoming_pubs),
Bytes = emqx_pd:reset_counter(incoming_bytes),
InStats = #{cnt => Pubs, oct => Bytes},
NState = ensure_rate_limit(InStats, State),
Run GC and Check OOM
NState1 = check_oom(run_gc(InStats, NState)),
handle_info(activate_socket, NState1);
handle_msg(Deliver = {deliver, _Topic, _Msg},
#state{active_n = ActiveN} = State) ->
Delivers = [Deliver | emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State);
handle_msg({inet_reply, _Sock, ok}, State = #state{active_n = ActiveN}) ->
case emqx_pd:get_counter(outgoing_pubs) > ActiveN of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
OutStats = #{cnt => Pubs, oct => Bytes},
{ok, check_oom(run_gc(OutStats, State))};
false -> ok
end;
handle_msg({inet_reply, _Sock, {error, Reason}}, State) ->
handle_info({sock_error, Reason}, State);
handle_msg({connack, ConnAck}, State) ->
handle_outgoing(ConnAck, State);
handle_msg({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
handle_info({sock_closed, Reason}, close_socket(State));
handle_msg({event, connected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:insert_channel_info(ClientId, info(State), stats(State));
handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
{ok, State};
handle_msg({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State};
handle_msg({timeout, TRef, TMsg}, State) ->
handle_timeout(TRef, TMsg, State);
handle_msg(Shutdown = {shutdown, _Reason}, State) ->
stop(Shutdown, State);
handle_msg(Msg, State) ->
handle_info(Msg, State).
-spec terminate(any(), state()) -> no_return().
terminate(Reason, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
try
Channel1 = emqx_channel:set_conn_state(disconnected, Channel),
emqx_congestion:cancel_alarms(Socket, Transport, Channel1),
emqx_channel:terminate(Reason, Channel1),
close_socket_ok(State)
catch
E : C : S ->
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end,
?tp(info, terminate, #{reason => Reason}),
maybe_raise_exception(Reason).
close_socket_ok(State) ->
_ = close_socket(State),
ok.
maybe_raise_exception(#{exception := Exception,
context := Context,
stacktrace := Stacktrace
}) ->
erlang:raise(Exception, Context, Stacktrace);
maybe_raise_exception(Reason) ->
exit(Reason).
Sys callbacks
system_continue(Parent, _Debug, State) ->
?MODULE:recvloop(Parent, State).
system_terminate(Reason, _Parent, _Debug, State) ->
terminate(Reason, State).
system_code_change(State, _Mod, _OldVsn, _Extra) ->
{ok, State}.
system_get_state(State) -> {ok, State}.
handle_call(_From, info, State) ->
{reply, info(State), State};
handle_call(_From, stats, State) ->
{reply, stats(State), State};
handle_call(_From, {ratelimit, Policy}, State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
Limiter = emqx_limiter:init(Zone, Policy),
{reply, ok, State#state{limiter = Limiter}};
handle_call(_From, Req, State = #state{channel = Channel}) ->
case emqx_channel:handle_call(Req, Channel) of
{reply, Reply, NChannel} ->
{reply, Reply, State#state{channel = NChannel}};
{shutdown, Reason, Reply, NChannel} ->
shutdown(Reason, Reply, State#state{channel = NChannel});
{shutdown, Reason, Reply, OutPacket, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(OutPacket, NState),
shutdown(Reason, Reply, NState)
end.
handle_timeout(_TRef, idle_timeout, State) ->
shutdown(idle_timeout, State);
handle_timeout(_TRef, limit_timeout, State) ->
NState = State#state{sockstate = idle,
limit_timer = undefined
},
handle_info(activate_socket, NState);
handle_timeout(_TRef, emit_stats, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State#state{stats_timer = undefined}};
handle_timeout(TRef, keepalive, State = #state{transport = Transport,
socket = Socket,
channel = Channel})->
case emqx_channel:info(conn_state, Channel) of
disconnected -> {ok, State};
_ ->
case Transport:getstat(Socket, [recv_oct]) of
{ok, [{recv_oct, RecvOct}]} ->
handle_timeout(TRef, {keepalive, RecvOct}, State);
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end
end;
handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State).
Parse incoming data
-compile({inline, [parse_incoming/2]}).
parse_incoming(Data, State) ->
{Packets, NState} = parse_incoming(Data, [], State),
{ok, next_incoming_msgs(Packets), NState}.
parse_incoming(<<>>, Packets, State) ->
{Packets, State};
parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
try emqx_frame:parse(Data, ParseState) of
{more, NParseState} ->
{Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet | Packets], NState)
catch
error:proxy_protocol_config_disabled:_Stk ->
?LOG(error,
"~nMalformed packet, "
"please check proxy_protocol config for specific listeners and zones~n"),
{[{frame_error, proxy_protocol_config_disabled} | Packets], State};
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
[Reason, Stk, Data]),
{[{frame_error, Reason} | Packets], State}
end.
-compile({inline, [next_incoming_msgs/1]}).
next_incoming_msgs([Packet]) ->
{incoming, Packet};
next_incoming_msgs(Packets) ->
[{incoming, Packet} || Packet <- lists:reverse(Packets)].
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
with_channel(handle_in, [FrameError], State).
with_channel(Fun, Args, State = #state{channel = Channel}) ->
case erlang:apply(emqx_channel, Fun, Args ++ [Channel]) of
ok -> {ok, State};
{ok, NChannel} ->
{ok, State#state{channel = NChannel}};
{ok, Replies, NChannel} ->
{ok, next_msgs(Replies), State#state{channel = NChannel}};
{shutdown, Reason, NChannel} ->
shutdown(Reason, State#state{channel = NChannel});
{shutdown, Reason, Packet, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(Packet, NState),
shutdown(Reason, NState)
end.
handle_outgoing(Packets, State) when is_list(Packets) ->
send(lists:map(serialize_and_inc_stats_fun(State), Packets), State);
handle_outgoing(Packet, State) ->
send((serialize_and_inc_stats_fun(State))(Packet), State).
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large!",
[emqx_packet:format(Packet)]),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
ok = inc_outgoing_stats({error, message_too_large}),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
ok = inc_outgoing_stats(Packet),
Data
end
end.
-spec(send(iodata(), state()) -> ok).
send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ->
Oct = iolist_size(IoData),
ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, []) of
ok -> ok;
Error = {error, _Reason} ->
self() ! {inet_reply, Socket, Error},
ok
end.
handle_info(activate_socket, State = #state{sockstate = OldSst}) ->
case activate_socket(State) of
{ok, NState = #state{sockstate = NewSst}} ->
case OldSst =/= NewSst of
true -> {ok, {event, NewSst}, NState};
false -> {ok, NState}
end;
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end;
handle_info({sock_error, Reason}, State) ->
case Reason =/= closed andalso Reason =/= einval of
true -> ?LOG(warning, "socket_error: ~p", [Reason]);
false -> ok
end,
handle_info({sock_closed, Reason}, close_socket(State));
handle_info(Info, State) ->
with_channel(handle_info, [Info], State).
handle_cast({async_set_socket_options, Opts},
State = #state{transport = Transport,
socket = Socket
}) ->
case Transport:setopts(Socket, Opts) of
ok -> ?tp(info, "custom_socket_options_successfully", #{opts => Opts});
Err -> ?tp(error, "failed_to_set_custom_socket_optionn", #{reason => Err})
end,
State;
handle_cast(Req, State) ->
?tp(error, "received_unknown_cast", #{cast => Req}),
State.
ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
case ?ENABLED(Limiter) andalso emqx_limiter:check(Stats, Limiter) of
false -> State;
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(notice, "Pause ~pms due to rate limit", [Time]),
TRef = start_timer(Time, limit_timeout),
State#state{sockstate = blocked,
limiter = Limiter1,
limit_timer = TRef
}
end.
Run GC and Check OOM
run_gc(Stats, State = #state{gc_state = GcSt}) ->
case ?ENABLED(GcSt) andalso emqx_gc:run(Stats, GcSt) of
false -> State;
{_IsGC, GcSt1} ->
State#state{gc_state = GcSt1}
end.
check_oom(State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
OomPolicy = emqx_zone:oom_policy(Zone),
?tp(debug, check_oom, #{policy => OomPolicy}),
case ?ENABLED(OomPolicy) andalso emqx_misc:check_oom(OomPolicy) of
{shutdown, Reason} ->
erlang:exit({shutdown, Reason});
_Other ->
ok
end,
State.
Activate Socket
-compile({inline, [activate_socket/1]}).
activate_socket(State = #state{sockstate = closed}) ->
{ok, State};
activate_socket(State = #state{sockstate = blocked}) ->
{ok, State};
activate_socket(State = #state{transport = Transport,
socket = Socket,
active_n = N}) ->
case Transport:setopts(Socket, [{active, N}]) of
ok -> {ok, State#state{sockstate = running}};
Error -> Error
end.
close_socket(State = #state{sockstate = closed}) -> State;
close_socket(State = #state{transport = Transport, socket = Socket}) ->
ok = Transport:fast_close(Socket),
State#state{sockstate = closed}.
-compile({inline, [inc_incoming_stats/1]}).
inc_incoming_stats(Packet = ?PACKET(Type)) ->
inc_counter(recv_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(recv_msg, 1),
inc_qos_stats(recv_msg, Packet),
inc_counter(incoming_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_recv(Packet).
-compile({inline, [inc_outgoing_stats/1]}).
inc_outgoing_stats({error, message_too_large}) ->
inc_counter('send_msg.dropped', 1),
inc_counter('send_msg.dropped.too_large', 1);
inc_outgoing_stats(Packet = ?PACKET(Type)) ->
inc_counter(send_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(send_msg, 1),
inc_counter(outgoing_pubs, 1),
inc_qos_stats(send_msg, Packet);
false ->
ok
end,
emqx_metrics:inc_sent(Packet).
inc_qos_stats(Type, #mqtt_packet{header = #mqtt_packet_header{qos = QoS}}) when ?IS_QOS(QoS) ->
inc_counter(inc_qos_stats_key(Type, QoS), 1);
inc_qos_stats(_, _) -> ok.
inc_qos_stats_key(send_msg, ?QOS_0) -> 'send_msg.qos0';
inc_qos_stats_key(send_msg, ?QOS_1) -> 'send_msg.qos1';
inc_qos_stats_key(send_msg, ?QOS_2) -> 'send_msg.qos2';
inc_qos_stats_key(recv_msg, ?QOS_0) -> 'recv_msg.qos0';
inc_qos_stats_key(recv_msg, ?QOS_1) -> 'recv_msg.qos1';
inc_qos_stats_key(recv_msg, ?QOS_2) -> 'recv_msg.qos2'.
-compile({inline, [next_msgs/1]}).
next_msgs(Packet) when is_record(Packet, mqtt_packet) ->
{outgoing, Packet};
next_msgs(Event) when is_tuple(Event) ->
Event;
next_msgs(More) when is_list(More) ->
More.
-compile({inline, [shutdown/2, shutdown/3]}).
shutdown(Reason, State) ->
stop({shutdown, Reason}, State).
shutdown(Reason, Reply, State) ->
stop({shutdown, Reason}, Reply, State).
-compile({inline, [stop/2, stop/3]}).
stop(Reason, State) ->
{stop, Reason, State}.
stop(Reason, Reply, State) ->
{stop, Reason, Reply, State}.
inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc),
ok.
set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
setelement(Pos+1, State, Value).
get_state(Pid) ->
State = sys:get_state(Pid),
maps:from_list(lists:zip(record_info(fields, state),
tl(tuple_to_list(State)))).
|
b9d8afe9dfdd9c3ab8a8d5275a41c70215adfb2d4f817a61fd1c3adc943bc309 | erlware/relx | replace_os_vars_tests.erl | -module(replace_os_vars_tests).
-export([]).
| null | https://raw.githubusercontent.com/erlware/relx/16a7972f7679778d9d7f40228b1a20351f1077bd/shelltests/dev_replace_os_vars_tests/src/replace_os_vars_tests.erl | erlang | -module(replace_os_vars_tests).
-export([]).
|
|
111c1c1dcfbb546aa1e01e57846aab77a09affbd8de3388e63b96542203da85e | ocsigen/ocsigenserver | outputfilter.ml | Ocsigen
*
* Module outputfilter.ml
* Copyright ( C ) 2008
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* Module outputfilter.ml
* Copyright (C) 2008 Vincent Balat
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
(* This module enables rewritting the server output *)
type header_filter =
[ `Rewrite of Ocsigen_header.Name.t * Pcre.regexp * string
| `Add of Ocsigen_header.Name.t * string * bool option ]
let gen filter = function
| Ocsigen_extensions.Req_not_found (code, _) ->
Lwt.return (Ocsigen_extensions.Ext_next code)
| Ocsigen_extensions.Req_found (_ri, res) ->
Lwt.return
@@ Ocsigen_extensions.Ext_found
(fun () ->
Lwt.return
@@
match filter with
| `Rewrite (header, regexp, dest) -> (
try
let l =
List.map
(Ocsigen_lib.Netstring_pcre.global_replace regexp dest)
(Ocsigen_response.header_multi res header)
and a = Ocsigen_response.remove_header res header in
Ocsigen_response.add_header_multi a header l
with Not_found -> res)
| `Add (header, dest, replace) -> (
match replace with
| None -> (
match Ocsigen_response.header res header with
| Some _ -> res
| None -> Ocsigen_response.add_header res header dest)
| Some false -> Ocsigen_response.add_header res header dest
| Some true -> Ocsigen_response.replace_header res header dest))
let gen_code code = function
| Ocsigen_extensions.Req_not_found (code, _) ->
Lwt.return (Ocsigen_extensions.Ext_next code)
| Ocsigen_extensions.Req_found (_ri, res) ->
Lwt.return
@@ Ocsigen_extensions.Ext_found
(fun () -> Lwt.return (Ocsigen_response.set_status res code))
let parse_config config_elem =
let header = ref None in
let regexp = ref None in
let dest = ref None in
let replace = ref None in
let code = ref None in
Ocsigen_extensions.(
Configuration.process_element ~in_tag:"host"
~other_elements:(fun t _ _ -> raise (Bad_config_tag_for_extension t))
~elements:
[ Configuration.element ~name:"outputfilter"
~attributes:
[ Configuration.attribute ~name:"header" (fun s ->
header := Some s)
; Configuration.attribute ~name:"regexp" (fun s ->
regexp := Some (Ocsigen_lib.Netstring_pcre.regexp s))
; Configuration.attribute ~name:"dest" (fun s -> dest := Some s)
; Configuration.attribute ~name:"replace" (fun s ->
try replace := Some (bool_of_string s)
with Invalid_argument _ ->
badconfig
"Wrong value for attribute replace of <outputfilter/>: %s. It should be true or false"
s) ]
()
; Configuration.element ~name:"sethttpcode"
~attributes:
[ Configuration.attribute ~name:"code" (fun s ->
try
match Cohttp.Code.status_of_code (int_of_string s) with
| #Cohttp.Code.status as status -> code := Some status
| `Code _ -> failwith "Invalid code"
with Failure _ ->
badconfig "Invalid code attribute in <sethttpcode>") ]
() ]
config_elem);
match !code with
| None -> (
match !header, !regexp, !dest, !replace with
| _, Some _, _, Some _ ->
Ocsigen_extensions.badconfig
"Wrong attributes for <outputfilter/>: attributes regexp and replace can't be set simultaneously"
| Some h, Some r, Some d, None ->
gen (`Rewrite (Ocsigen_header.Name.of_string h, r, d))
| Some h, None, Some d, rep ->
gen (`Add (Ocsigen_header.Name.of_string h, d, rep))
| _ ->
Ocsigen_extensions.badconfig
"Wrong attributes for <outputfilter header=... dest=... (regexp=... / replace=...)/>"
)
| Some code -> gen_code code
let () =
Ocsigen_extensions.register ~name:"outputfilter"
~fun_site:(fun _ _ _ _ _ _ -> parse_config)
()
let mode = Ocsigen_server.Site.Config.key ()
let extension =
Ocsigen_server.Site.create_extension
(fun {Ocsigen_server.Site.Config.accessor} ->
match accessor mode with
| Some (`Code c) -> gen_code c
| Some (#header_filter as f) -> gen f
| None -> failwith "Outputfilter.mode not set")
| null | https://raw.githubusercontent.com/ocsigen/ocsigenserver/d468cf464dcc9f05f820c35f346ffdbe6b9c7931/src/extensions/outputfilter.ml | ocaml | This module enables rewritting the server output | Ocsigen
*
* Module outputfilter.ml
* Copyright ( C ) 2008
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* Module outputfilter.ml
* Copyright (C) 2008 Vincent Balat
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type header_filter =
[ `Rewrite of Ocsigen_header.Name.t * Pcre.regexp * string
| `Add of Ocsigen_header.Name.t * string * bool option ]
let gen filter = function
| Ocsigen_extensions.Req_not_found (code, _) ->
Lwt.return (Ocsigen_extensions.Ext_next code)
| Ocsigen_extensions.Req_found (_ri, res) ->
Lwt.return
@@ Ocsigen_extensions.Ext_found
(fun () ->
Lwt.return
@@
match filter with
| `Rewrite (header, regexp, dest) -> (
try
let l =
List.map
(Ocsigen_lib.Netstring_pcre.global_replace regexp dest)
(Ocsigen_response.header_multi res header)
and a = Ocsigen_response.remove_header res header in
Ocsigen_response.add_header_multi a header l
with Not_found -> res)
| `Add (header, dest, replace) -> (
match replace with
| None -> (
match Ocsigen_response.header res header with
| Some _ -> res
| None -> Ocsigen_response.add_header res header dest)
| Some false -> Ocsigen_response.add_header res header dest
| Some true -> Ocsigen_response.replace_header res header dest))
let gen_code code = function
| Ocsigen_extensions.Req_not_found (code, _) ->
Lwt.return (Ocsigen_extensions.Ext_next code)
| Ocsigen_extensions.Req_found (_ri, res) ->
Lwt.return
@@ Ocsigen_extensions.Ext_found
(fun () -> Lwt.return (Ocsigen_response.set_status res code))
let parse_config config_elem =
let header = ref None in
let regexp = ref None in
let dest = ref None in
let replace = ref None in
let code = ref None in
Ocsigen_extensions.(
Configuration.process_element ~in_tag:"host"
~other_elements:(fun t _ _ -> raise (Bad_config_tag_for_extension t))
~elements:
[ Configuration.element ~name:"outputfilter"
~attributes:
[ Configuration.attribute ~name:"header" (fun s ->
header := Some s)
; Configuration.attribute ~name:"regexp" (fun s ->
regexp := Some (Ocsigen_lib.Netstring_pcre.regexp s))
; Configuration.attribute ~name:"dest" (fun s -> dest := Some s)
; Configuration.attribute ~name:"replace" (fun s ->
try replace := Some (bool_of_string s)
with Invalid_argument _ ->
badconfig
"Wrong value for attribute replace of <outputfilter/>: %s. It should be true or false"
s) ]
()
; Configuration.element ~name:"sethttpcode"
~attributes:
[ Configuration.attribute ~name:"code" (fun s ->
try
match Cohttp.Code.status_of_code (int_of_string s) with
| #Cohttp.Code.status as status -> code := Some status
| `Code _ -> failwith "Invalid code"
with Failure _ ->
badconfig "Invalid code attribute in <sethttpcode>") ]
() ]
config_elem);
match !code with
| None -> (
match !header, !regexp, !dest, !replace with
| _, Some _, _, Some _ ->
Ocsigen_extensions.badconfig
"Wrong attributes for <outputfilter/>: attributes regexp and replace can't be set simultaneously"
| Some h, Some r, Some d, None ->
gen (`Rewrite (Ocsigen_header.Name.of_string h, r, d))
| Some h, None, Some d, rep ->
gen (`Add (Ocsigen_header.Name.of_string h, d, rep))
| _ ->
Ocsigen_extensions.badconfig
"Wrong attributes for <outputfilter header=... dest=... (regexp=... / replace=...)/>"
)
| Some code -> gen_code code
let () =
Ocsigen_extensions.register ~name:"outputfilter"
~fun_site:(fun _ _ _ _ _ _ -> parse_config)
()
let mode = Ocsigen_server.Site.Config.key ()
let extension =
Ocsigen_server.Site.create_extension
(fun {Ocsigen_server.Site.Config.accessor} ->
match accessor mode with
| Some (`Code c) -> gen_code c
| Some (#header_filter as f) -> gen f
| None -> failwith "Outputfilter.mode not set")
|
2ec01bb55fe6ff60ecfc61646e68b95edd59a75ec9602f7b1137a1a8fd8f8044 | talw/crisp-compiler | Utils.hs | module Utils where
import Data.Word
import Data.List (elemIndex)
tr :: [Char] -> [Char] -> String -> String
tr as bs str = flip map str $
\c -> maybe c (\ix -> bs !! ix) $ elemIndex c as
readBinary :: String -> Word32
readBinary = bin2dec . read
bin2dec :: Word32 -> Word32
bin2dec = convertBase 2 10
dec2bin : : Word32
dec2bin = convertBase 10 2
convertBase :: Integral a => a -> a -> a -> a
convertBase fromBase toBase = convertDec 10 toBase . convertDec fromBase 10
where convertDec fb tb n = go n 1
where go 0 _ = 0
go x fac = if lsb `elem` [0..min fb tb - 1]
then addition + go (x `div` tb) (fac*fb)
else error "convertBase - invalid character"
where lsb = x `mod` tb
addition = lsb*fac
| null | https://raw.githubusercontent.com/talw/crisp-compiler/1c4d6e9897520e8089ae329d961aeeeadc4648a4/src/Utils.hs | haskell | module Utils where
import Data.Word
import Data.List (elemIndex)
tr :: [Char] -> [Char] -> String -> String
tr as bs str = flip map str $
\c -> maybe c (\ix -> bs !! ix) $ elemIndex c as
readBinary :: String -> Word32
readBinary = bin2dec . read
bin2dec :: Word32 -> Word32
bin2dec = convertBase 2 10
dec2bin : : Word32
dec2bin = convertBase 10 2
convertBase :: Integral a => a -> a -> a -> a
convertBase fromBase toBase = convertDec 10 toBase . convertDec fromBase 10
where convertDec fb tb n = go n 1
where go 0 _ = 0
go x fac = if lsb `elem` [0..min fb tb - 1]
then addition + go (x `div` tb) (fac*fb)
else error "convertBase - invalid character"
where lsb = x `mod` tb
addition = lsb*fac
|
|
01c26f4e68a2b2f1fb604ca7f6bd026c3721acef4f51e106d97d882d499b12ab | exercism/haskell | Tests.hs | # LANGUAGE TupleSections #
import Data.Foldable (for_)
import Data.Function (on)
import Data.Tree (Tree(Node), rootLabel)
import Data.List (sort)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import POV (fromPOV, tracePathBetween)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
describe "fromPOV" $ do
let cases =
[ ("reparenting singleton" , singleton , Just singleton')
, ("reparenting with sibling" , simple , Just simple' )
, ("reparenting flat" , flat , Just flat' )
, ("reparenting nested" , nested , Just nested' )
, ("reparenting kids" , kids , Just kids' )
, ("reparenting cousins" , cousins , Just cousins' )
, ("from POV of non-existent node", leaf "foo", Nothing ) ]
rootShouldMatch = shouldBe `on` fmap rootLabel
edgesShouldMatch = shouldBe `on` fmap (sort . toEdges)
test (name, input, output) = describe name $ do
it "correct root" $ fromPOV "x" input `rootShouldMatch` output
it "correct edges" $ fromPOV "x" input `edgesShouldMatch` output
in for_ cases test
describe "Should not be able to find a missing node" $
let cases = [ ("singleton", singleton)
, ("flat" , flat )
, ("kids" , kids )
, ("nested" , nested )
, ("cousins" , cousins ) ]
test (name, g) = it name $ fromPOV "NOT THERE" g `shouldBe` Nothing
in for_ cases test
describe "tracePathBetween" $ do
it "Can find path from x -> parent" $
tracePathBetween "x" "parent" simple
`shouldBe` Just [ "x"
, "parent" ]
it "Can find path from x -> sibling" $
tracePathBetween "x" "b" flat
`shouldBe` Just [ "x"
, "root"
, "b" ]
it "Can trace a path from x -> cousin" $
tracePathBetween "x" "cousin-1" cousins
`shouldBe` Just [ "x"
, "parent"
, "grandparent"
, "uncle"
, "cousin-1" ]
it "Can find path from nodes other than x" $
tracePathBetween "a" "c" flat
`shouldBe` Just [ "a"
, "root"
, "c" ]
it "Can find path not involving root" $
tracePathBetween "x" "sibling-1" rootNotNeeded
`shouldBe` Just [ "x"
, "parent"
, "sibling-1" ]
it "Cannot trace if destination does not exist" $
tracePathBetween "x" "NOT THERE" cousins
`shouldBe` Nothing
it "Cannot trace if source does not exist" $
tracePathBetween "NOT THERE" "x" cousins
`shouldBe` Nothing
-- Functions used in the tests.
leaf :: a -> Tree a
leaf v = Node v []
-- In the trees we're making, we don't care about the ordering of children.
-- This is significant when rerooting on nodes that have a parent and children.
-- The former parent can go either before or after the former children.
-- Either choice would be correct in the context of this problem.
-- So all we need to check is:
1 ) The graph is actually rooted on the requested node .
2 ) The sorted edge list is correct .
This function helps check the second condition .
toEdges :: Ord a => Tree a -> [(a, a)]
toEdges (Node r ts) = map ((r,) . rootLabel) ts ++ concatMap toEdges ts
-- Trees used in the tests.
singleton , simple , flat , kids , nested , cousins :: Tree String
singleton', simple', flat', kids', nested', cousins' :: Tree String
singleton = leaf "x"
singleton' = leaf "x"
simple = Node "parent"
[ leaf "x"
, leaf "sibling"
]
simple' = Node "x"
[ Node "parent"
[ leaf "sibling"
]
]
flat = Node "root"
[ leaf "a"
, leaf "b"
, leaf "x"
, leaf "c"
]
flat' = Node "x"
[ Node "root"
[ leaf "a"
, leaf "b"
, leaf "c"
]
]
kids = Node "root"
[ Node "x"
[ leaf "kid-0"
, leaf "kid-1"
]
]
kids' = Node "x"
[ leaf "kid-0"
, leaf "kid-1"
, leaf "root"
]
nested = Node "level-0"
[ Node "level-1"
[ Node "level-2"
[ Node "level-3"
[ leaf "x"
]
]
]
]
nested' = Node "x"
[ Node "level-3"
[ Node "level-2"
[ Node "level-1"
[ leaf "level-0"
]
]
]
]
cousins = Node "grandparent"
[ Node "parent"
[ Node "x"
[ leaf "kid-a"
, leaf "kid-b"
]
, leaf "sibling-0"
, leaf "sibling-1"
]
, Node "uncle"
[ leaf "cousin-0"
, leaf "cousin-1"
]
]
cousins' = Node "x"
[ leaf "kid-a"
, leaf "kid-b"
, Node "parent"
[ leaf "sibling-0"
, leaf "sibling-1"
, Node "grandparent"
[ Node "uncle"
[ leaf "cousin-0"
, leaf "cousin-1"
]
]
]
]
rootNotNeeded :: Tree String
rootNotNeeded = Node "grandparent"
[ Node "parent"
[ leaf "x"
, leaf "sibling-0"
, leaf "sibling-1"
]
]
| null | https://raw.githubusercontent.com/exercism/haskell/f81ee7dc338294b3dbefb7bd39fc193546fcec26/exercises/practice/pov/test/Tests.hs | haskell | Functions used in the tests.
In the trees we're making, we don't care about the ordering of children.
This is significant when rerooting on nodes that have a parent and children.
The former parent can go either before or after the former children.
Either choice would be correct in the context of this problem.
So all we need to check is:
Trees used in the tests. | # LANGUAGE TupleSections #
import Data.Foldable (for_)
import Data.Function (on)
import Data.Tree (Tree(Node), rootLabel)
import Data.List (sort)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import POV (fromPOV, tracePathBetween)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = do
describe "fromPOV" $ do
let cases =
[ ("reparenting singleton" , singleton , Just singleton')
, ("reparenting with sibling" , simple , Just simple' )
, ("reparenting flat" , flat , Just flat' )
, ("reparenting nested" , nested , Just nested' )
, ("reparenting kids" , kids , Just kids' )
, ("reparenting cousins" , cousins , Just cousins' )
, ("from POV of non-existent node", leaf "foo", Nothing ) ]
rootShouldMatch = shouldBe `on` fmap rootLabel
edgesShouldMatch = shouldBe `on` fmap (sort . toEdges)
test (name, input, output) = describe name $ do
it "correct root" $ fromPOV "x" input `rootShouldMatch` output
it "correct edges" $ fromPOV "x" input `edgesShouldMatch` output
in for_ cases test
describe "Should not be able to find a missing node" $
let cases = [ ("singleton", singleton)
, ("flat" , flat )
, ("kids" , kids )
, ("nested" , nested )
, ("cousins" , cousins ) ]
test (name, g) = it name $ fromPOV "NOT THERE" g `shouldBe` Nothing
in for_ cases test
describe "tracePathBetween" $ do
it "Can find path from x -> parent" $
tracePathBetween "x" "parent" simple
`shouldBe` Just [ "x"
, "parent" ]
it "Can find path from x -> sibling" $
tracePathBetween "x" "b" flat
`shouldBe` Just [ "x"
, "root"
, "b" ]
it "Can trace a path from x -> cousin" $
tracePathBetween "x" "cousin-1" cousins
`shouldBe` Just [ "x"
, "parent"
, "grandparent"
, "uncle"
, "cousin-1" ]
it "Can find path from nodes other than x" $
tracePathBetween "a" "c" flat
`shouldBe` Just [ "a"
, "root"
, "c" ]
it "Can find path not involving root" $
tracePathBetween "x" "sibling-1" rootNotNeeded
`shouldBe` Just [ "x"
, "parent"
, "sibling-1" ]
it "Cannot trace if destination does not exist" $
tracePathBetween "x" "NOT THERE" cousins
`shouldBe` Nothing
it "Cannot trace if source does not exist" $
tracePathBetween "NOT THERE" "x" cousins
`shouldBe` Nothing
leaf :: a -> Tree a
leaf v = Node v []
1 ) The graph is actually rooted on the requested node .
2 ) The sorted edge list is correct .
This function helps check the second condition .
toEdges :: Ord a => Tree a -> [(a, a)]
toEdges (Node r ts) = map ((r,) . rootLabel) ts ++ concatMap toEdges ts
singleton , simple , flat , kids , nested , cousins :: Tree String
singleton', simple', flat', kids', nested', cousins' :: Tree String
singleton = leaf "x"
singleton' = leaf "x"
simple = Node "parent"
[ leaf "x"
, leaf "sibling"
]
simple' = Node "x"
[ Node "parent"
[ leaf "sibling"
]
]
flat = Node "root"
[ leaf "a"
, leaf "b"
, leaf "x"
, leaf "c"
]
flat' = Node "x"
[ Node "root"
[ leaf "a"
, leaf "b"
, leaf "c"
]
]
kids = Node "root"
[ Node "x"
[ leaf "kid-0"
, leaf "kid-1"
]
]
kids' = Node "x"
[ leaf "kid-0"
, leaf "kid-1"
, leaf "root"
]
nested = Node "level-0"
[ Node "level-1"
[ Node "level-2"
[ Node "level-3"
[ leaf "x"
]
]
]
]
nested' = Node "x"
[ Node "level-3"
[ Node "level-2"
[ Node "level-1"
[ leaf "level-0"
]
]
]
]
cousins = Node "grandparent"
[ Node "parent"
[ Node "x"
[ leaf "kid-a"
, leaf "kid-b"
]
, leaf "sibling-0"
, leaf "sibling-1"
]
, Node "uncle"
[ leaf "cousin-0"
, leaf "cousin-1"
]
]
cousins' = Node "x"
[ leaf "kid-a"
, leaf "kid-b"
, Node "parent"
[ leaf "sibling-0"
, leaf "sibling-1"
, Node "grandparent"
[ Node "uncle"
[ leaf "cousin-0"
, leaf "cousin-1"
]
]
]
]
rootNotNeeded :: Tree String
rootNotNeeded = Node "grandparent"
[ Node "parent"
[ leaf "x"
, leaf "sibling-0"
, leaf "sibling-1"
]
]
|
179e48395f0fb2c0818a11fbb08051a4c8f60a38d5e7611703be5789a5644d55 | Abbath/Calculator | Css.hs | {-# LANGUAGE OverloadedStrings #-}
module Calculator.Css where
import Clay
getCss :: Css
getCss = do
h1 ? fontSize (px 24)
body ? textAlign (alignSide sideCenter)
input # "type=\"input\"" ? do
width (px 600)
height (px 50)
fontSize (px 18)
postCss :: Css
postCss = do
h1 ? fontSize (px 24)
body ? textAlign (alignSide sideCenter)
input # "type=\"input\"" ? do
width (px 600)
height (px 50)
fontSize (px 18)
input # "type=\"submit\"" ? do
fontFamily ["Tahoma"] [sansSerif]
color white
background red
borderStyle none
ul ? do
listStyleType none
fontFamily ["Tahoma"] [sansSerif]
table ? do
width (px 600)
"table-layout" -: "fixed"
textAlign (alignSide sideCenter)
marginRight auto
marginLeft auto
td ? ("word-wrap" -: "break-word")
tr # ":nth-child(even)" ? backgroundColor "#c0c0c0"
tr # ":nth-child(odd)" ? backgroundColor "#e0e0e0"
| null | https://raw.githubusercontent.com/Abbath/Calculator/75985d5a9b4e602bc087462c8046bf2cf692a0e1/Calculator/src/Calculator/Css.hs | haskell | # LANGUAGE OverloadedStrings # | module Calculator.Css where
import Clay
getCss :: Css
getCss = do
h1 ? fontSize (px 24)
body ? textAlign (alignSide sideCenter)
input # "type=\"input\"" ? do
width (px 600)
height (px 50)
fontSize (px 18)
postCss :: Css
postCss = do
h1 ? fontSize (px 24)
body ? textAlign (alignSide sideCenter)
input # "type=\"input\"" ? do
width (px 600)
height (px 50)
fontSize (px 18)
input # "type=\"submit\"" ? do
fontFamily ["Tahoma"] [sansSerif]
color white
background red
borderStyle none
ul ? do
listStyleType none
fontFamily ["Tahoma"] [sansSerif]
table ? do
width (px 600)
"table-layout" -: "fixed"
textAlign (alignSide sideCenter)
marginRight auto
marginLeft auto
td ? ("word-wrap" -: "break-word")
tr # ":nth-child(even)" ? backgroundColor "#c0c0c0"
tr # ":nth-child(odd)" ? backgroundColor "#e0e0e0"
|
cba81eff2ac869894319163c59fc98fe9c7b4c248e64096fef9920c956d0ed25 | sjl/flax | api.lisp | (in-package :flax.drawing)
;;;; Parameters ---------------------------------------------------------------
(defparameter *black* (rgb 0 0 0))
(defparameter *white* (rgb 1 1 1))
Canvas -------------------------------------------------------------------
(defclass* canvas ()
((width :type (integer 1))
(height :type (integer 1))
(padding :type (single-float 0.0 0.5) :initform 0.03)
(output-transformation :type mat3)))
(defun recompute-output-transformation (canvas)
(setf (output-transformation canvas)
(transformation
(place (vec 0 0)
(vec (coerce (width canvas) 'single-float)
(coerce (height canvas) 'single-float))
:padding (padding canvas)))))
(defmethod initialize-instance :after ((canvas canvas) &key)
(recompute-output-transformation canvas))
(define-with-macro canvas width height)
(defgeneric make-canvas (type &key &allow-other-keys))
;;;; Utils --------------------------------------------------------------------
(defun-inline homogenize (v)
(vec3 (vx v) (vy v) 1))
(defun convert-coordinate (canvas coordinate)
(let ((c (m* (output-transformation canvas) coordinate)))
(values (vx3 c) (vy3 c))))
(defun convert-magnitude (canvas magnitude)
(ntransform magnitude (output-transformation canvas)))
(defmacro with-coordinate (canvas-symbol binding &body body)
(ecase (length binding)
(2 (destructuring-bind (magnitude-symbol value) binding
`(let ((,magnitude-symbol (convert-magnitude ,canvas-symbol ,value)))
,@body)))
(3 (destructuring-bind (x-symbol y-symbol value) binding
`(multiple-value-bind (,x-symbol ,y-symbol)
(convert-coordinate ,canvas-symbol ,value)
,@body)))))
(defmacro with-coordinates (canvas bindings &body body)
(once-only (canvas)
`(nest
,@(mapcar (lambda (binding)
`(with-coordinate ,canvas ,binding))
bindings)
(progn ,@body))))
(defun coord-to-string (c)
(format nil "(~A, ~A)" (vx c) (vy c)))
(defun coord-to-pair (canvas c)
(with-coordinates canvas ((x y c))
(cons x y)))
(defun coords-to-pairs (canvas cs)
(loop :for c :in cs :collect (coord-to-pair canvas c)))
Drawables ----------------------------------------------------------------
(defclass* drawable ()
((opacity :type (double-float 0.0d0 1.0d0))
(color :type color)))
(defgeneric draw (canvas drawing-object))
;;;; Paths --------------------------------------------------------------------
(defclass* path (drawable)
((points :type list)))
(defun normalize-point (point)
(if (listp point)
point
(list point)))
(defun normalize-points (points)
(mapcar #'normalize-point points))
(defun path (points &key (opacity 1.0d0) (color *black*))
(make-instance 'path
:points (mapcar-curried #'mapcar #'homogenize (normalize-points points))
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o path) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~{~A~^ -> ~}"
(mapcar (compose #'coord-to-string #'first) (points o)))))
(defmethod ntransform ((path path) transformation)
(dolist (ps (points path))
(dolist (p ps)
(ntransform p transformation)))
path)
;;;; Triangles ----------------------------------------------------------------
(defclass* triangle (drawable)
((a :type vec3)
(b :type vec3)
(c :type vec3)))
(defun triangle (a b c &key (opacity 1.0d0) (color *black*))
(make-instance 'triangle :a (homogenize a) :b (homogenize b) :c (homogenize c)
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o triangle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) (~D, ~D) (~D, ~D)"
(vx (a o))
(vy (a o))
(vx (b o))
(vy (b o))
(vx (c o))
(vy (c o)))))
(defmethod ntransform ((triangle triangle) transformation)
(ntransform (a triangle) transformation)
(ntransform (b triangle) transformation)
(ntransform (c triangle) transformation)
triangle)
;;;; Rectangles ---------------------------------------------------------------
(defclass* rectangle (drawable)
((a :type vec3)
(b :type vec3)
(round-corners :type float :initform 0.0)))
(defun rectangle (a b &key (opacity 1.0d0) (color *black*) round-corners)
(make-instance 'rectangle :a (homogenize a) :b (homogenize b)
:color color
:opacity (coerce opacity 'double-float)
:round-corners (or round-corners 0.0)))
(defmethod print-object ((o rectangle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) (~D, ~D)"
(vx (a o))
(vy (a o))
(vx (b o))
(vy (b o)))))
(defun compute-corner-rounding (canvas rect)
(if-let ((rounding (round-corners rect)))
(with-canvas (canvas)
(* rounding
(* (- 1.0 (* 2 (padding canvas)))
(min height width))))
0))
(defmethod ntransform ((rectangle rectangle) transformation)
(ntransform (a rectangle) transformation)
(ntransform (b rectangle) transformation)
(zapf (round-corners rectangle) (ntransform % transformation))
rectangle)
;;;; Circles ------------------------------------------------------------------
(defclass* circle (drawable)
((center :type vec3)
(radius :type single-float)))
(defun circle (center radius &key (opacity 1.0d0) (color *black*))
(make-instance 'circle :center (homogenize center) :radius radius
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o circle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) radius ~D"
(vx (center o))
(vy (center o))
(radius o))))
(defmethod ntransform ((circle circle) transformation)
(ntransform (center circle) transformation)
;; For non-aspect-ratio-preserving transformations, we want to keep circles
;; as circles, but ensure they fit within the new bounding box. So we take
the smaller of the two possible radius transformations .
(let ((a (vec 0 0 1))
(b (vec 1 1 1)))
(ntransform a transformation)
(ntransform b transformation)
(let ((c (v- a b)))
(mulf (radius circle) (min (abs (vx c)) (abs (vy c))))))
circle)
;;;; Points -------------------------------------------------------------------
(defclass* point (drawable)
((location :type vec3)))
(defun point (location &key (opacity 1.0d0) (color *black*))
(make-instance 'point :location (homogenize location)
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o point) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D)"
(vx (location o))
(vy (location o)))))
(defmethod ntransform ((point point) transformation)
(ntransform (location point) transformation)
point)
;;;; Glyph --------------------------------------------------------------------
(defclass* glyph (drawable)
((pos :type vec3)
(width :type single-float)
(ch :type character)
(paths :type list)))
(defun glyph (position width character &key (opacity 1.0d0) (color *black*))
(make-instance 'glyph
:pos (homogenize position)
:width (coerce width 'single-float)
:ch character
:color color
:opacity (coerce opacity 'double-float)))
(defun recompute-glyph-paths (glyph)
(let ((paths (letter-paths (ch glyph)))
(size (* 2 (width glyph))))
(ntransform paths (transformation
(scale size size)
(translate (vx (pos glyph))
(vy (pos glyph)))))
(setf (paths glyph) paths)))
(defmethod initialize-instance :after ((glyph glyph) &key)
(recompute-glyph-paths glyph))
(defmethod print-object ((o glyph) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~A ~A" (ch o) (pos o))))
(defmethod ntransform ((glyph glyph) transformation)
(ntransform (pos glyph) transformation)
(ntransformf (width glyph) transformation)
(ntransformf (paths glyph) transformation)
;; (recompute-glyph-paths glyph)
glyph)
(defmethod draw (canvas (glyph glyph))
(map-curried #'draw canvas (paths glyph)))
;;;; Text ---------------------------------------------------------------------
(defclass* text (drawable)
((pos :type vec3)
(letter-width :type single-float)
(letter-spacing :type single-float)
(content :type string)
(glyphs :type list)))
(defun rebuild-glyphs (text)
(setf (glyphs text)
(iterate
(with pos = (pos text))
(with y = (vy (pos text)))
(with space = (+ (letter-width text) (letter-spacing text)))
(with scale = (/ (letter-width text) 0.5))
(for ch :in-string (content text))
(for pch :previous ch)
(for x :from (vx pos) :by space)
(incf x (* (kern pch ch) scale))
(collect (glyph (vec x y) (letter-width text) ch
:opacity (opacity text)
:color (color text))))))
(defun text (position letter-width content &key (letter-spacing 0.0) (opacity 1.0d0) (color *black*))
(make-instance 'text
:pos (homogenize position)
:letter-width (coerce letter-width 'single-float)
:letter-spacing (coerce letter-spacing 'single-float)
:content content
:color color
:opacity (coerce opacity 'double-float)))
(defmethod initialize-instance :after ((text text) &key)
(rebuild-glyphs text))
(defmethod print-object ((o text) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~S ~A"
(content o)
(pos o))))
(defmethod draw (canvas (text text))
(map-curried #'draw canvas (glyphs text)))
(defmethod ntransform ((text text) transformation)
(ntransform (pos text) transformation)
(ntransformf (letter-width text) transformation)
(rebuild-glyphs text)
text)
;;;; Rendering ----------------------------------------------------------------
(defgeneric render-object (canvas object))
(defun render (canvas objects)
(map-curried #'render-object canvas objects))
;;;; File Writing -------------------------------------------------------------
(defgeneric write-file (canvas filename))
;;;; File Extensions ----------------------------------------------------------
(defgeneric file-extension (type))
(defmethod file-extension (type)
(string-downcase (symbol-name type)))
Toplevel -----------------------------------------------------------------
(defun full-filename (filename canvas-type)
(format nil "~A.~A" filename (file-extension canvas-type)))
(defmacro with-rendering
((canvas-symbol canvas-type filename width height &key
(padding 0.03)
(background '(rgb 1 1 1)))
&body body)
(once-only (canvas-type)
`(progn
#+sbcl (sb-ext:gc :full t)
(let ((,canvas-symbol (make-canvas ,canvas-type
:height ,height
:width ,width
:padding ,padding
:background ,background)))
(multiple-value-prog1 ,@body
(write-file ,canvas-symbol (full-filename ,filename ,canvas-type)))))))
;;;; Usage --------------------------------------------------------------------
;;;; Implementations ----------------------------------------------------------
;;; To implement a new type of canvas, you'll need to:
;;;
;;; * Add a new subclass of canvas.
;;; * Implement make-canvas.
;;; * Implement all the drawing methods for the various shapes.
;;; * Implement render-object (which should call draw and maybe do other stuff).
;;; * Implement write-file.
| null | https://raw.githubusercontent.com/sjl/flax/0dc4e7c0d096cd01a1009c8fbd0d96174ed48090/src/drawing/api.lisp | lisp | Parameters ---------------------------------------------------------------
Utils --------------------------------------------------------------------
Paths --------------------------------------------------------------------
Triangles ----------------------------------------------------------------
Rectangles ---------------------------------------------------------------
Circles ------------------------------------------------------------------
For non-aspect-ratio-preserving transformations, we want to keep circles
as circles, but ensure they fit within the new bounding box. So we take
Points -------------------------------------------------------------------
Glyph --------------------------------------------------------------------
(recompute-glyph-paths glyph)
Text ---------------------------------------------------------------------
Rendering ----------------------------------------------------------------
File Writing -------------------------------------------------------------
File Extensions ----------------------------------------------------------
Usage --------------------------------------------------------------------
Implementations ----------------------------------------------------------
To implement a new type of canvas, you'll need to:
* Add a new subclass of canvas.
* Implement make-canvas.
* Implement all the drawing methods for the various shapes.
* Implement render-object (which should call draw and maybe do other stuff).
* Implement write-file. | (in-package :flax.drawing)
(defparameter *black* (rgb 0 0 0))
(defparameter *white* (rgb 1 1 1))
Canvas -------------------------------------------------------------------
(defclass* canvas ()
((width :type (integer 1))
(height :type (integer 1))
(padding :type (single-float 0.0 0.5) :initform 0.03)
(output-transformation :type mat3)))
(defun recompute-output-transformation (canvas)
(setf (output-transformation canvas)
(transformation
(place (vec 0 0)
(vec (coerce (width canvas) 'single-float)
(coerce (height canvas) 'single-float))
:padding (padding canvas)))))
(defmethod initialize-instance :after ((canvas canvas) &key)
(recompute-output-transformation canvas))
(define-with-macro canvas width height)
(defgeneric make-canvas (type &key &allow-other-keys))
(defun-inline homogenize (v)
(vec3 (vx v) (vy v) 1))
(defun convert-coordinate (canvas coordinate)
(let ((c (m* (output-transformation canvas) coordinate)))
(values (vx3 c) (vy3 c))))
(defun convert-magnitude (canvas magnitude)
(ntransform magnitude (output-transformation canvas)))
(defmacro with-coordinate (canvas-symbol binding &body body)
(ecase (length binding)
(2 (destructuring-bind (magnitude-symbol value) binding
`(let ((,magnitude-symbol (convert-magnitude ,canvas-symbol ,value)))
,@body)))
(3 (destructuring-bind (x-symbol y-symbol value) binding
`(multiple-value-bind (,x-symbol ,y-symbol)
(convert-coordinate ,canvas-symbol ,value)
,@body)))))
(defmacro with-coordinates (canvas bindings &body body)
(once-only (canvas)
`(nest
,@(mapcar (lambda (binding)
`(with-coordinate ,canvas ,binding))
bindings)
(progn ,@body))))
(defun coord-to-string (c)
(format nil "(~A, ~A)" (vx c) (vy c)))
(defun coord-to-pair (canvas c)
(with-coordinates canvas ((x y c))
(cons x y)))
(defun coords-to-pairs (canvas cs)
(loop :for c :in cs :collect (coord-to-pair canvas c)))
Drawables ----------------------------------------------------------------
(defclass* drawable ()
((opacity :type (double-float 0.0d0 1.0d0))
(color :type color)))
(defgeneric draw (canvas drawing-object))
(defclass* path (drawable)
((points :type list)))
(defun normalize-point (point)
(if (listp point)
point
(list point)))
(defun normalize-points (points)
(mapcar #'normalize-point points))
(defun path (points &key (opacity 1.0d0) (color *black*))
(make-instance 'path
:points (mapcar-curried #'mapcar #'homogenize (normalize-points points))
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o path) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~{~A~^ -> ~}"
(mapcar (compose #'coord-to-string #'first) (points o)))))
(defmethod ntransform ((path path) transformation)
(dolist (ps (points path))
(dolist (p ps)
(ntransform p transformation)))
path)
(defclass* triangle (drawable)
((a :type vec3)
(b :type vec3)
(c :type vec3)))
(defun triangle (a b c &key (opacity 1.0d0) (color *black*))
(make-instance 'triangle :a (homogenize a) :b (homogenize b) :c (homogenize c)
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o triangle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) (~D, ~D) (~D, ~D)"
(vx (a o))
(vy (a o))
(vx (b o))
(vy (b o))
(vx (c o))
(vy (c o)))))
(defmethod ntransform ((triangle triangle) transformation)
(ntransform (a triangle) transformation)
(ntransform (b triangle) transformation)
(ntransform (c triangle) transformation)
triangle)
(defclass* rectangle (drawable)
((a :type vec3)
(b :type vec3)
(round-corners :type float :initform 0.0)))
(defun rectangle (a b &key (opacity 1.0d0) (color *black*) round-corners)
(make-instance 'rectangle :a (homogenize a) :b (homogenize b)
:color color
:opacity (coerce opacity 'double-float)
:round-corners (or round-corners 0.0)))
(defmethod print-object ((o rectangle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) (~D, ~D)"
(vx (a o))
(vy (a o))
(vx (b o))
(vy (b o)))))
(defun compute-corner-rounding (canvas rect)
(if-let ((rounding (round-corners rect)))
(with-canvas (canvas)
(* rounding
(* (- 1.0 (* 2 (padding canvas)))
(min height width))))
0))
(defmethod ntransform ((rectangle rectangle) transformation)
(ntransform (a rectangle) transformation)
(ntransform (b rectangle) transformation)
(zapf (round-corners rectangle) (ntransform % transformation))
rectangle)
(defclass* circle (drawable)
((center :type vec3)
(radius :type single-float)))
(defun circle (center radius &key (opacity 1.0d0) (color *black*))
(make-instance 'circle :center (homogenize center) :radius radius
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o circle) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D) radius ~D"
(vx (center o))
(vy (center o))
(radius o))))
(defmethod ntransform ((circle circle) transformation)
(ntransform (center circle) transformation)
the smaller of the two possible radius transformations .
(let ((a (vec 0 0 1))
(b (vec 1 1 1)))
(ntransform a transformation)
(ntransform b transformation)
(let ((c (v- a b)))
(mulf (radius circle) (min (abs (vx c)) (abs (vy c))))))
circle)
(defclass* point (drawable)
((location :type vec3)))
(defun point (location &key (opacity 1.0d0) (color *black*))
(make-instance 'point :location (homogenize location)
:color color
:opacity (coerce opacity 'double-float)))
(defmethod print-object ((o point) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "(~D, ~D)"
(vx (location o))
(vy (location o)))))
(defmethod ntransform ((point point) transformation)
(ntransform (location point) transformation)
point)
(defclass* glyph (drawable)
((pos :type vec3)
(width :type single-float)
(ch :type character)
(paths :type list)))
(defun glyph (position width character &key (opacity 1.0d0) (color *black*))
(make-instance 'glyph
:pos (homogenize position)
:width (coerce width 'single-float)
:ch character
:color color
:opacity (coerce opacity 'double-float)))
(defun recompute-glyph-paths (glyph)
(let ((paths (letter-paths (ch glyph)))
(size (* 2 (width glyph))))
(ntransform paths (transformation
(scale size size)
(translate (vx (pos glyph))
(vy (pos glyph)))))
(setf (paths glyph) paths)))
(defmethod initialize-instance :after ((glyph glyph) &key)
(recompute-glyph-paths glyph))
(defmethod print-object ((o glyph) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~A ~A" (ch o) (pos o))))
(defmethod ntransform ((glyph glyph) transformation)
(ntransform (pos glyph) transformation)
(ntransformf (width glyph) transformation)
(ntransformf (paths glyph) transformation)
glyph)
(defmethod draw (canvas (glyph glyph))
(map-curried #'draw canvas (paths glyph)))
(defclass* text (drawable)
((pos :type vec3)
(letter-width :type single-float)
(letter-spacing :type single-float)
(content :type string)
(glyphs :type list)))
(defun rebuild-glyphs (text)
(setf (glyphs text)
(iterate
(with pos = (pos text))
(with y = (vy (pos text)))
(with space = (+ (letter-width text) (letter-spacing text)))
(with scale = (/ (letter-width text) 0.5))
(for ch :in-string (content text))
(for pch :previous ch)
(for x :from (vx pos) :by space)
(incf x (* (kern pch ch) scale))
(collect (glyph (vec x y) (letter-width text) ch
:opacity (opacity text)
:color (color text))))))
(defun text (position letter-width content &key (letter-spacing 0.0) (opacity 1.0d0) (color *black*))
(make-instance 'text
:pos (homogenize position)
:letter-width (coerce letter-width 'single-float)
:letter-spacing (coerce letter-spacing 'single-float)
:content content
:color color
:opacity (coerce opacity 'double-float)))
(defmethod initialize-instance :after ((text text) &key)
(rebuild-glyphs text))
(defmethod print-object ((o text) s)
(print-unreadable-object (o s :type t :identity nil)
(format s "~S ~A"
(content o)
(pos o))))
(defmethod draw (canvas (text text))
(map-curried #'draw canvas (glyphs text)))
(defmethod ntransform ((text text) transformation)
(ntransform (pos text) transformation)
(ntransformf (letter-width text) transformation)
(rebuild-glyphs text)
text)
(defgeneric render-object (canvas object))
(defun render (canvas objects)
(map-curried #'render-object canvas objects))
(defgeneric write-file (canvas filename))
(defgeneric file-extension (type))
(defmethod file-extension (type)
(string-downcase (symbol-name type)))
Toplevel -----------------------------------------------------------------
(defun full-filename (filename canvas-type)
(format nil "~A.~A" filename (file-extension canvas-type)))
(defmacro with-rendering
((canvas-symbol canvas-type filename width height &key
(padding 0.03)
(background '(rgb 1 1 1)))
&body body)
(once-only (canvas-type)
`(progn
#+sbcl (sb-ext:gc :full t)
(let ((,canvas-symbol (make-canvas ,canvas-type
:height ,height
:width ,width
:padding ,padding
:background ,background)))
(multiple-value-prog1 ,@body
(write-file ,canvas-symbol (full-filename ,filename ,canvas-type)))))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.