_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
58a535a326d722bd1daa24079188c2d2ef886ca93523cc5d494bc0b4a600aab9 | PacktWorkshops/The-Clojure-Workshop | repl.clj | (in-ns 'garden)
(def vegetables ["cucumber" "carrot"])
(def fruits ["orange" "apple" "melon"])
(in-ns 'shops)
(clojure.core/refer 'garden :rename '{fruits owoce})
(clojure.core/refer 'clojure.test :only '(are deftest is run-tests))
(clojure.core/refer 'clojure.core)
(deftest vegetables-test
(is (= vegetables
["cucumber" "carrot"])))
(deftest owoce-test
(is (= owoce
["orange" "apple" "melon"])))
(use '[clojure.string :only [split]])
(deftest split-test
(are [expected actual] (= expected actual)
["Clojure" "workshop"] (split "Clojure workshop" #" ")
["fruits" "garden"] (split "fruits garden" #" ")))
(use '[clojure.edn :rename {read-string string-read}])
(deftest class-test
(is (= java.util.Date
(class (string-read "#inst \"1989-02-06T13:20:50.52Z\"")))))
(run-tests) | null | https://raw.githubusercontent.com/PacktWorkshops/The-Clojure-Workshop/3d309bb0e46a41ce2c93737870433b47ce0ba6a2/Chapter08/tests/Exercise8.06/repl.clj | clojure | (in-ns 'garden)
(def vegetables ["cucumber" "carrot"])
(def fruits ["orange" "apple" "melon"])
(in-ns 'shops)
(clojure.core/refer 'garden :rename '{fruits owoce})
(clojure.core/refer 'clojure.test :only '(are deftest is run-tests))
(clojure.core/refer 'clojure.core)
(deftest vegetables-test
(is (= vegetables
["cucumber" "carrot"])))
(deftest owoce-test
(is (= owoce
["orange" "apple" "melon"])))
(use '[clojure.string :only [split]])
(deftest split-test
(are [expected actual] (= expected actual)
["Clojure" "workshop"] (split "Clojure workshop" #" ")
["fruits" "garden"] (split "fruits garden" #" ")))
(use '[clojure.edn :rename {read-string string-read}])
(deftest class-test
(is (= java.util.Date
(class (string-read "#inst \"1989-02-06T13:20:50.52Z\"")))))
(run-tests) |
|
faaca021c91da43bff951e9fad165bf865d8b71084aa4494280c79c061dab367 | ruhler/smten | Name.hs |
module Smten.Plugin.Name (
nameCG, qnameCG,
tynameCG, qtynameCG,
guardnmCG, qguardnmCG,
fieldnmCG, qfieldnmCG,
nullnmCG, qnullnmCG,
connmCG, qconnmCG,
denewtynmCG, qdenewtynmCG,
qislitnmCG,
) where
import Data.Char
import Data.Functor
import GhcPlugins
import Smten.Plugin.CG
import qualified Smten.Plugin.Output.Syntax as S
nameis :: String -> Name -> Bool
nameis str nm = str == (occNameString $ nameOccName nm)
-- dename ty nm
-- Extract the module name, occurence name, and unique name for the given
-- name.
-- ty - true if this is a type constructor name.
-- nm - the name.
dename :: Bool -> Name -> (Maybe String, String, String)
dename ty nm
| nameis "[]" nm = (Just "Smten.Smten.List",
if ty then "List__" else "Nil__",
error "uniqnm for []")
| nameis ":" nm = (Just "Smten.Smten.List", "Cons__", error "uniqnm for :")
| nameis "()" nm = (Just "Smten.Smten.Unit", "Unit__", error "uniqnm for ()")
| nameis "(,)" nm = (Just "Smten.Smten.Tuple", "Tuple2__", error "uniqnm for (,)")
| nameis "(,,)" nm = (Just "Smten.Smten.Tuple", "Tuple3__", error "uniqnm for (,,)")
| nameis "(,,,)" nm = (Just "Smten.Smten.Tuple", "Tuple4__", error "uniqnm for (,,,)")
| otherwise =
let modnm = moduleNameString . moduleName <$> nameModule_maybe nm
occnm = occNameString $ nameOccName nm
unqnm = show $ nameUnique nm
in (modnm, occnm, unqnm)
-- For class instances, symbol names are generated with the type embedded.
Here we remap those symbols for wired types so we can use ghc auto - deriving
-- to auto-derive the class instances by auto-deriving for the
-- unwired counterpart.
--
For example , to derive for ( , ) , we can now say :
deriving instance ( Prelude . Eq a , Prelude . Eq b ) = > Prelude . Eq ( Tuple2 _ _ a b )
And anyone using the Eq instance for ( , ) will use the symbols defined
-- by the auto-derivation.
dewire :: String -> String
dewire ('[':']':xs) = "List__" ++ dewire xs
dewire ('(':')':xs) = "Unit__" ++ dewire xs
dewire ('(':',':')':xs) = "Tuple2__" ++ dewire xs
dewire ('(':',':',':')':xs) = "Tuple3__" ++ dewire xs
dewire ('(':',':',':',':')':xs) = "Tuple4__" ++ dewire xs
dewire (x:xs) = x : dewire xs
dewire [] = []
Given a base name , turn it into an acceptable name .
-- Returns 'True' if the resulting name is symbolic, false otherwise.
--
-- If 'nosym' is True, then never return a symbolic name.
resym :: Bool -> String -> (Bool, String)
resym nosym nm =
let issym :: Char -> Bool
issym c = c `elem` "!#$%&*+./<=>?@\\^|-~:"
desym :: Char -> Char
desym c | isAlphaNum c = c
desym c | c == '#' = c
desym c | c == '_' = c
desym c | c == ':' = toEnum $ fromEnum 'A' + (fromEnum c `mod` 26)
desym c = toEnum $ fromEnum 'a' + (fromEnum c `mod` 26)
in case (nosym, nm) of
(False, _) | all issym nm -> (True, nm)
_ -> (False, map desym nm)
-- nmCG ty f qlf nm
-- translate a name to a smten name.
-- ty - True if this is a type constructor name.
-- f - transformation to perform on the base of the name.
-- qlf - True to generate a qualified version of the name.
-- False to generate an unqualified version of the name.
-- nm - The name to transform.
nmCG :: Bool -> (String -> String) -> Bool -> Name -> CG S.Name
nmCG ty f qlf nm
| nameis "(->)" nm = return "(->)"
| nameis "(#,#)" nm = return "(#,#)"
| otherwise = do
let (modnm, occnm, unqnm) = dename ty nm
useuniq = (not $ isExternalName nm)
|| (occnm == "main" && modnm /= Just ":Main")
isconsym = (head occnm == ':')
(issym, occnm') = resym (useuniq || isconsym) (dewire occnm)
-- Append the unique to the given name.
-- addunqnm "foo" "bar" = "foo_bar"
-- addunqnm "foo#" "bar" = "foo_bar#"
addunqnm :: String -> String -> String
addunqnm [] nm = "_" ++ unqnm
addunqnm "#" nm = "_" ++ unqnm ++ "#"
addunqnm (x:xs) nm = x : addunqnm xs unqnm
unqlf = f $ if useuniq
then addunqnm occnm' unqnm
else occnm'
full <- case (qlf, modnm) of
(True, Just ":Main") -> return unqlf
(True, Just v) -> usequalified (toGenMod v) unqlf
_ -> return unqlf
return $ if issym then "(" ++ full ++ ")" else full
-- Generate code for an unqualified name.
nameCG :: Name -> CG S.Name
nameCG = nmCG False id False
-- Generate code for a qualified name.
qnameCG :: Name -> CG S.Name
qnameCG = nmCG False id True
tynameCG :: Name -> CG S.Name
tynameCG = nmCG True id False
qtynameCG :: Name -> CG S.Name
qtynameCG = nmCG True id True
guardnmCG :: Name -> CG S.Name
guardnmCG = nmCG False ("gd" ++) False
qguardnmCG :: Name -> CG S.Name
qguardnmCG = nmCG False ("gd" ++) True
fieldnmCG :: Int -> Name -> CG S.Name
fieldnmCG i = nmCG False (("fl" ++ show i) ++ ) False
qfieldnmCG :: Int -> Name -> CG S.Name
qfieldnmCG i = nmCG False (("fl" ++ show i) ++) True
nullnmCG :: Name -> CG S.Name
nullnmCG = nmCG True ("__Null" ++) False
qnullnmCG :: Name -> CG S.Name
qnullnmCG = nmCG True ("__Null" ++) True
-- Name of the constructor function for a given constructor.
connmCG :: Name -> CG S.Name
connmCG = nmCG False ("__" ++) False
qconnmCG :: Name -> CG S.Name
qconnmCG = nmCG False ("__" ++) True
denewtynmCG :: Name -> CG S.Name
denewtynmCG = nmCG True ("__deNewTy" ++) False
qdenewtynmCG :: Name -> CG S.Name
qdenewtynmCG = nmCG True ("__deNewTy" ++) True
Name of isLitXXX function for Int # , Char # types .
qislitnmCG :: Name -> CG S.Name
qislitnmCG = nmCG True ("__isLit" ++) True
| null | https://raw.githubusercontent.com/ruhler/smten/16dd37fb0ee3809408803d4be20401211b6c4027/smten/Smten/Plugin/Name.hs | haskell | dename ty nm
Extract the module name, occurence name, and unique name for the given
name.
ty - true if this is a type constructor name.
nm - the name.
For class instances, symbol names are generated with the type embedded.
to auto-derive the class instances by auto-deriving for the
unwired counterpart.
by the auto-derivation.
Returns 'True' if the resulting name is symbolic, false otherwise.
If 'nosym' is True, then never return a symbolic name.
nmCG ty f qlf nm
translate a name to a smten name.
ty - True if this is a type constructor name.
f - transformation to perform on the base of the name.
qlf - True to generate a qualified version of the name.
False to generate an unqualified version of the name.
nm - The name to transform.
Append the unique to the given name.
addunqnm "foo" "bar" = "foo_bar"
addunqnm "foo#" "bar" = "foo_bar#"
Generate code for an unqualified name.
Generate code for a qualified name.
Name of the constructor function for a given constructor. |
module Smten.Plugin.Name (
nameCG, qnameCG,
tynameCG, qtynameCG,
guardnmCG, qguardnmCG,
fieldnmCG, qfieldnmCG,
nullnmCG, qnullnmCG,
connmCG, qconnmCG,
denewtynmCG, qdenewtynmCG,
qislitnmCG,
) where
import Data.Char
import Data.Functor
import GhcPlugins
import Smten.Plugin.CG
import qualified Smten.Plugin.Output.Syntax as S
nameis :: String -> Name -> Bool
nameis str nm = str == (occNameString $ nameOccName nm)
dename :: Bool -> Name -> (Maybe String, String, String)
dename ty nm
| nameis "[]" nm = (Just "Smten.Smten.List",
if ty then "List__" else "Nil__",
error "uniqnm for []")
| nameis ":" nm = (Just "Smten.Smten.List", "Cons__", error "uniqnm for :")
| nameis "()" nm = (Just "Smten.Smten.Unit", "Unit__", error "uniqnm for ()")
| nameis "(,)" nm = (Just "Smten.Smten.Tuple", "Tuple2__", error "uniqnm for (,)")
| nameis "(,,)" nm = (Just "Smten.Smten.Tuple", "Tuple3__", error "uniqnm for (,,)")
| nameis "(,,,)" nm = (Just "Smten.Smten.Tuple", "Tuple4__", error "uniqnm for (,,,)")
| otherwise =
let modnm = moduleNameString . moduleName <$> nameModule_maybe nm
occnm = occNameString $ nameOccName nm
unqnm = show $ nameUnique nm
in (modnm, occnm, unqnm)
Here we remap those symbols for wired types so we can use ghc auto - deriving
For example , to derive for ( , ) , we can now say :
deriving instance ( Prelude . Eq a , Prelude . Eq b ) = > Prelude . Eq ( Tuple2 _ _ a b )
And anyone using the Eq instance for ( , ) will use the symbols defined
dewire :: String -> String
dewire ('[':']':xs) = "List__" ++ dewire xs
dewire ('(':')':xs) = "Unit__" ++ dewire xs
dewire ('(':',':')':xs) = "Tuple2__" ++ dewire xs
dewire ('(':',':',':')':xs) = "Tuple3__" ++ dewire xs
dewire ('(':',':',':',':')':xs) = "Tuple4__" ++ dewire xs
dewire (x:xs) = x : dewire xs
dewire [] = []
Given a base name , turn it into an acceptable name .
resym :: Bool -> String -> (Bool, String)
resym nosym nm =
let issym :: Char -> Bool
issym c = c `elem` "!#$%&*+./<=>?@\\^|-~:"
desym :: Char -> Char
desym c | isAlphaNum c = c
desym c | c == '#' = c
desym c | c == '_' = c
desym c | c == ':' = toEnum $ fromEnum 'A' + (fromEnum c `mod` 26)
desym c = toEnum $ fromEnum 'a' + (fromEnum c `mod` 26)
in case (nosym, nm) of
(False, _) | all issym nm -> (True, nm)
_ -> (False, map desym nm)
nmCG :: Bool -> (String -> String) -> Bool -> Name -> CG S.Name
nmCG ty f qlf nm
| nameis "(->)" nm = return "(->)"
| nameis "(#,#)" nm = return "(#,#)"
| otherwise = do
let (modnm, occnm, unqnm) = dename ty nm
useuniq = (not $ isExternalName nm)
|| (occnm == "main" && modnm /= Just ":Main")
isconsym = (head occnm == ':')
(issym, occnm') = resym (useuniq || isconsym) (dewire occnm)
addunqnm :: String -> String -> String
addunqnm [] nm = "_" ++ unqnm
addunqnm "#" nm = "_" ++ unqnm ++ "#"
addunqnm (x:xs) nm = x : addunqnm xs unqnm
unqlf = f $ if useuniq
then addunqnm occnm' unqnm
else occnm'
full <- case (qlf, modnm) of
(True, Just ":Main") -> return unqlf
(True, Just v) -> usequalified (toGenMod v) unqlf
_ -> return unqlf
return $ if issym then "(" ++ full ++ ")" else full
nameCG :: Name -> CG S.Name
nameCG = nmCG False id False
qnameCG :: Name -> CG S.Name
qnameCG = nmCG False id True
tynameCG :: Name -> CG S.Name
tynameCG = nmCG True id False
qtynameCG :: Name -> CG S.Name
qtynameCG = nmCG True id True
guardnmCG :: Name -> CG S.Name
guardnmCG = nmCG False ("gd" ++) False
qguardnmCG :: Name -> CG S.Name
qguardnmCG = nmCG False ("gd" ++) True
fieldnmCG :: Int -> Name -> CG S.Name
fieldnmCG i = nmCG False (("fl" ++ show i) ++ ) False
qfieldnmCG :: Int -> Name -> CG S.Name
qfieldnmCG i = nmCG False (("fl" ++ show i) ++) True
nullnmCG :: Name -> CG S.Name
nullnmCG = nmCG True ("__Null" ++) False
qnullnmCG :: Name -> CG S.Name
qnullnmCG = nmCG True ("__Null" ++) True
connmCG :: Name -> CG S.Name
connmCG = nmCG False ("__" ++) False
qconnmCG :: Name -> CG S.Name
qconnmCG = nmCG False ("__" ++) True
denewtynmCG :: Name -> CG S.Name
denewtynmCG = nmCG True ("__deNewTy" ++) False
qdenewtynmCG :: Name -> CG S.Name
qdenewtynmCG = nmCG True ("__deNewTy" ++) True
Name of isLitXXX function for Int # , Char # types .
qislitnmCG :: Name -> CG S.Name
qislitnmCG = nmCG True ("__isLit" ++) True
|
bcf586ba77a51eeb55aca3ca57eee9f62ee6d4357ea8de1d42ba74859021f724 | ruricolist/serapeum | vectors.lisp | (in-package #:serapeum)
(defsubst ensure-vector (x)
"If X is a vector, return it.
Otherwise, return a vector with X as its sole element."
(if (vectorp x) x
(vector x)))
(-> vect (&rest t) (vector t *))
(defun vect (&rest initial-contents)
"Succinct constructor for adjustable vectors with fill pointers.
(vect 1 2 3)
≡ (make-array 3
:adjustable t
:fill-pointer 3
:initial-contents (list 1 2 3))
The fill pointer is placed after the last element in INITIAL-CONTENTS.
As a constructor this also has a matching definition as a Trivia
pattern for destructing."
(declare (dynamic-extent initial-contents))
(let ((len (length initial-contents)))
(make-array len
:element-type t
:adjustable t
:fill-pointer len
:initial-contents initial-contents)))
(defmacro generate-values-vector-case (vec)
TODO This should use ` tree - case ' , but it would need to be a
;; different file.
`(case (length ,vec)
,@(loop for i from 0 below 20
collect `(,i
(values ,@(loop for j from 0 below i
collect `(aref ,vec ,j)))))
(t (values-list (coerce ,vec 'list)))))
(defun values-vector (vec)
"Return the elements of VEC, a vector, as multiple values.
This is to vectors what `values-list' is to lists."
(declare (type vector vec))
(generate-values-vector-case vec))
(define-compiler-macro vect (&rest inits)
(let ((len (length inits)))
`(make-array ,len
:element-type t
:adjustable t
:fill-pointer ,len
:initial-contents
NB We use to stack - allocate the list of inits , but
;; that could result in junk in the vector; see issue
# 14 . Note that SBCL does not actually allocate
the list below ; see array-tran.lisp .
(list ,@inits))))
;;; This differs from the default Trivia `vector' pattern in that it
;;; works for adjustable vectors with fill pointers.
(defpattern vect (&rest elts)
(with-unique-names (it)
`(trivia:guard (and ,it (trivia:vector* ,@elts))
(= (length ,it) ,(length elts)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun expand-pad-x (call fn env vec len pad)
"Auxiliary function for `pad-X' compiler macros.
Optimizes some cases where PAD is a constant sequence."
(if (not (typep pad 'sequence)) call
(case (length pad)
(0
(cond ((constantp len env)
vec)
((constantp vec env)
;; We don't have to worry about evaluation order.
`(progn ,len ,vec))
(t
Evaluate VEC , then LEN , then return STRING .
(with-unique-names (temp)
;; Ensure LEN
`(let ((,temp ,vec))
,len
,temp)))))
(1 `(,fn ,vec ,len ',(aref pad 0)))
(t call)))))
(define-compiler-macro pad-start (&whole call vec len
&optional (pad #\Space)
&environment env)
(expand-pad-x call 'pad-start env vec len pad))
(-> pad-start (vector array-length &optional t)
vector)
(defun pad-start (vec length &optional (pad #\Space))
"Pad VEC, a vector, to LENGTH, using PAD.
If VEC is already the same length, or longer, than LENGTH, return VEC
unchanged.
(pad-start \"abc\" 3)
=> \"abc\"
If PAD is a sequence, then it is repeated before VEC to make up LENGTH.
(pad-start \"abc\" 9 \"def\")
=> \"defdefabc\"
If PAD is not a sequence, it is used to fill the remainder of VEC.
(pad-start \"abc\" 6 #\x)
=> \"xxxabc\"
PAD defaults to the space character.
This function is most useful for strings, but it can be used with any
vector. Note that the vector returned has the same element type as
VEC, so PAD must satisfy that element type.
Loosely inspired by ECMA."
(declare (vector vec)
(array-length length))
(cond ((>= (length vec) length) vec)
((typep pad 'sequence)
(let ((pad-len (length pad)))
(cond ((= pad-len 0) vec)
((= pad-len 1)
(pad-start vec length (elt pad 0)))
(t
(lret* ((offset (- length (length vec)))
(element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec :start1 offset)
(loop for i from 0 below offset by pad-len do
(replace out pad :start1 i :end1 offset)))))))
(t
(lret* ((offset (- length (length vec)))
(element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec :start1 offset)
(fill out pad :end offset)
out))))
(define-compiler-macro pad-end (&whole call vec len
&optional (pad #\Space)
&environment env)
(expand-pad-x call 'pad-end env vec len pad))
(-> pad-end (vector array-length &optional t)
vector)
(defun pad-end (vec length &optional (pad #\Space))
"Pad VEC, a vector, to LENGTH, using PAD.
Like `pad-start', but padding is addded to the end, rather than the
beginning."
(declare (vector vec)
(array-length length))
(cond ((>= (length vec) length) vec)
((typep pad 'sequence)
(let ((pad-len (length pad)))
(cond ((= pad-len 0) vec)
((= pad-len 1)
(pad-end vec length (elt pad 0)))
(t
(lret* ((element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec)
(loop for i from (length vec) below length by pad-len do
(replace out pad :start1 i)))))))
(t
(lret* ((element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec)
(fill out pad :start (length vec))
out))))
(defun vector-conc-extend (vector new-elements &optional (extension 0))
"Add NEW-ELEMENTS to the end of VECTOR, an adjustable array with a fill-pointer.
This is the practical equivalent to calling `vector-push-extend' on
each element on NEW-ELEMENTS, but should be faster.
Returns VECTOR."
(declare (type array-length extension))
(cond ((emptyp new-elements))
((single new-elements)
(vector-push-extend (elt new-elements 0) vector))
(t (let* ((size (array-dimension vector 0))
(len1 (length vector))
(len2 (length new-elements))
(diff (- size len1 len2)))
(when (minusp diff)
(adjust-array vector (max extension (- size diff))))
(incf (fill-pointer vector) len2)
(replace vector new-elements :start1 len1))))
vector)
| null | https://raw.githubusercontent.com/ruricolist/serapeum/712cc0fdf5cca9ae2bc82e086f3ee609f99e8d78/vectors.lisp | lisp | different file.
that could result in junk in the vector; see issue
see array-tran.lisp .
This differs from the default Trivia `vector' pattern in that it
works for adjustable vectors with fill pointers.
We don't have to worry about evaluation order.
Ensure LEN | (in-package #:serapeum)
(defsubst ensure-vector (x)
"If X is a vector, return it.
Otherwise, return a vector with X as its sole element."
(if (vectorp x) x
(vector x)))
(-> vect (&rest t) (vector t *))
(defun vect (&rest initial-contents)
"Succinct constructor for adjustable vectors with fill pointers.
(vect 1 2 3)
≡ (make-array 3
:adjustable t
:fill-pointer 3
:initial-contents (list 1 2 3))
The fill pointer is placed after the last element in INITIAL-CONTENTS.
As a constructor this also has a matching definition as a Trivia
pattern for destructing."
(declare (dynamic-extent initial-contents))
(let ((len (length initial-contents)))
(make-array len
:element-type t
:adjustable t
:fill-pointer len
:initial-contents initial-contents)))
(defmacro generate-values-vector-case (vec)
TODO This should use ` tree - case ' , but it would need to be a
`(case (length ,vec)
,@(loop for i from 0 below 20
collect `(,i
(values ,@(loop for j from 0 below i
collect `(aref ,vec ,j)))))
(t (values-list (coerce ,vec 'list)))))
(defun values-vector (vec)
"Return the elements of VEC, a vector, as multiple values.
This is to vectors what `values-list' is to lists."
(declare (type vector vec))
(generate-values-vector-case vec))
(define-compiler-macro vect (&rest inits)
(let ((len (length inits)))
`(make-array ,len
:element-type t
:adjustable t
:fill-pointer ,len
:initial-contents
NB We use to stack - allocate the list of inits , but
# 14 . Note that SBCL does not actually allocate
(list ,@inits))))
(defpattern vect (&rest elts)
(with-unique-names (it)
`(trivia:guard (and ,it (trivia:vector* ,@elts))
(= (length ,it) ,(length elts)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun expand-pad-x (call fn env vec len pad)
"Auxiliary function for `pad-X' compiler macros.
Optimizes some cases where PAD is a constant sequence."
(if (not (typep pad 'sequence)) call
(case (length pad)
(0
(cond ((constantp len env)
vec)
((constantp vec env)
`(progn ,len ,vec))
(t
Evaluate VEC , then LEN , then return STRING .
(with-unique-names (temp)
`(let ((,temp ,vec))
,len
,temp)))))
(1 `(,fn ,vec ,len ',(aref pad 0)))
(t call)))))
(define-compiler-macro pad-start (&whole call vec len
&optional (pad #\Space)
&environment env)
(expand-pad-x call 'pad-start env vec len pad))
(-> pad-start (vector array-length &optional t)
vector)
(defun pad-start (vec length &optional (pad #\Space))
"Pad VEC, a vector, to LENGTH, using PAD.
If VEC is already the same length, or longer, than LENGTH, return VEC
unchanged.
(pad-start \"abc\" 3)
=> \"abc\"
If PAD is a sequence, then it is repeated before VEC to make up LENGTH.
(pad-start \"abc\" 9 \"def\")
=> \"defdefabc\"
If PAD is not a sequence, it is used to fill the remainder of VEC.
(pad-start \"abc\" 6 #\x)
=> \"xxxabc\"
PAD defaults to the space character.
This function is most useful for strings, but it can be used with any
vector. Note that the vector returned has the same element type as
VEC, so PAD must satisfy that element type.
Loosely inspired by ECMA."
(declare (vector vec)
(array-length length))
(cond ((>= (length vec) length) vec)
((typep pad 'sequence)
(let ((pad-len (length pad)))
(cond ((= pad-len 0) vec)
((= pad-len 1)
(pad-start vec length (elt pad 0)))
(t
(lret* ((offset (- length (length vec)))
(element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec :start1 offset)
(loop for i from 0 below offset by pad-len do
(replace out pad :start1 i :end1 offset)))))))
(t
(lret* ((offset (- length (length vec)))
(element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec :start1 offset)
(fill out pad :end offset)
out))))
(define-compiler-macro pad-end (&whole call vec len
&optional (pad #\Space)
&environment env)
(expand-pad-x call 'pad-end env vec len pad))
(-> pad-end (vector array-length &optional t)
vector)
(defun pad-end (vec length &optional (pad #\Space))
"Pad VEC, a vector, to LENGTH, using PAD.
Like `pad-start', but padding is addded to the end, rather than the
beginning."
(declare (vector vec)
(array-length length))
(cond ((>= (length vec) length) vec)
((typep pad 'sequence)
(let ((pad-len (length pad)))
(cond ((= pad-len 0) vec)
((= pad-len 1)
(pad-end vec length (elt pad 0)))
(t
(lret* ((element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec)
(loop for i from (length vec) below length by pad-len do
(replace out pad :start1 i)))))))
(t
(lret* ((element-type (array-element-type vec))
(out (make-array length :element-type element-type)))
(replace out vec)
(fill out pad :start (length vec))
out))))
(defun vector-conc-extend (vector new-elements &optional (extension 0))
"Add NEW-ELEMENTS to the end of VECTOR, an adjustable array with a fill-pointer.
This is the practical equivalent to calling `vector-push-extend' on
each element on NEW-ELEMENTS, but should be faster.
Returns VECTOR."
(declare (type array-length extension))
(cond ((emptyp new-elements))
((single new-elements)
(vector-push-extend (elt new-elements 0) vector))
(t (let* ((size (array-dimension vector 0))
(len1 (length vector))
(len2 (length new-elements))
(diff (- size len1 len2)))
(when (minusp diff)
(adjust-array vector (max extension (- size diff))))
(incf (fill-pointer vector) len2)
(replace vector new-elements :start1 len1))))
vector)
|
a34c268d3e7161d8110f8c4f3c0fe328b9638a95798b0a89ebc07f5dcf345a61 | helium/gateway-config | gateway_config_cli_registry.erl | -module(gateway_config_cli_registry).
-define(CLI_MODULES, [
gateway_config_cli_advertise,
gateway_config_cli_lights,
gateway_config_cli_wifi,
gateway_config_cli_ble
]).
-export([register_cli/0, command/1]).
register_cli() ->
clique:register(?CLI_MODULES).
-spec command([string()]) -> rpc_ok | {rpc_error, non_neg_integer()}.
command(Cmd) ->
%% this is the contents of clique:run but
%% we want to figure out if the command worked
%% or not
M0 = clique_command:match(Cmd),
M1 = clique_parser:parse(M0),
M2 = clique_parser:extract_global_flags(M1),
M3 = clique_parser:validate(M2),
M4 = clique_command:run(M3),
clique:print(M4, Cmd),
case M4 of
{error, {no_matching_spec, _Spec}} ->
{rpc_error, 1};
{_Status, ExitCode, _} when ExitCode == 0 ->
rpc_ok;
{_Status, ExitCode, _} ->
{rpc_error, ExitCode}
end.
| null | https://raw.githubusercontent.com/helium/gateway-config/50a86f7a66e67a8fa175a2a44655920d975617f3/src/cli/gateway_config_cli_registry.erl | erlang | this is the contents of clique:run but
we want to figure out if the command worked
or not | -module(gateway_config_cli_registry).
-define(CLI_MODULES, [
gateway_config_cli_advertise,
gateway_config_cli_lights,
gateway_config_cli_wifi,
gateway_config_cli_ble
]).
-export([register_cli/0, command/1]).
register_cli() ->
clique:register(?CLI_MODULES).
-spec command([string()]) -> rpc_ok | {rpc_error, non_neg_integer()}.
command(Cmd) ->
M0 = clique_command:match(Cmd),
M1 = clique_parser:parse(M0),
M2 = clique_parser:extract_global_flags(M1),
M3 = clique_parser:validate(M2),
M4 = clique_command:run(M3),
clique:print(M4, Cmd),
case M4 of
{error, {no_matching_spec, _Spec}} ->
{rpc_error, 1};
{_Status, ExitCode, _} when ExitCode == 0 ->
rpc_ok;
{_Status, ExitCode, _} ->
{rpc_error, ExitCode}
end.
|
fc621f884343a1840fde6b708e97877fe6e3ab1d4fa4e1d37123b5e79e6f7bf4 | atlas-engineer/nyxt | editor.lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(nyxt:define-package :nyxt/editor-mode
(:documentation "Mode for editors."))
(in-package :nyxt/editor-mode)
(define-mode editor-mode ()
"Mode for editor modes to extend.
Importantly, it is required to implement the methods `get-content',
`set-content', `markup' for each editor-mode. This will allow your mode
to get/set content from/to the file (which is necessary for operation).
To install the mode implementing the following, add this snippet to your config
(define-configuration nyxt/editor-mode::editor-buffer
((default-modes (cons 'your-editor-mode %slot-value%))))"
((keyscheme-map
(define-keyscheme-map "editor-mode" ()
keyscheme:default
(list
"C-r" 'reload-current-buffer
"f11" 'toggle-fullscreen)
keyscheme:cua
(list
"C-o" 'editor-open-file
"C-s" 'editor-write-file
"C-q" 'delete-current-buffer
"C-tab" 'switch-buffer)
keyscheme:emacs
(list
"C-x C-f" 'editor-open-file
"C-x C-s" 'editor-write-file
"C-x C-k" 'delete-current-buffer
"C-x b" 'switch-buffer)
keyscheme:vi-normal
(list
"C-o" 'editor-open-file
"w" 'editor-write-file
"R" 'reload-current-buffer
"g b" 'switch-buffer
"D" 'delete-current-buffer))))
(:toggler-command-p nil))
;; IMPORTANT: Implement this method specializing on your class extending editor-mode.
(export-always 'get-content)
(defgeneric get-content (editor-submode)
(:method ((editor editor-mode))
(declare (ignore editor))
(echo-warning "Editor buffer cannot edit files without configured editor mode."))
(:documentation "Get the content of the editor."))
;; IMPORTANT: Implement this method specializing on your class extending editor-mode.
(export-always 'set-content)
(defgeneric set-content (editor-submode content)
(:method ((editor editor-mode) (content t))
(declare (ignore editor))
(echo-warning "Editor buffer cannot edit files without configured editor mode.
See `describe-class editor-mode' for details."))
(:documentation "Set the content of the editor."))
;; IMPORTANT: Implement this method specializing on your class extending editor-mode.
(export-always 'markup)
(defgeneric markup (editor-submode)
(:method ((editor editor-mode))
(spinneret:with-html-string
(:head
(:nstyle (style (buffer editor))))
(:body
(:p "Please configure an editor mode to use an editor buffer. See "
(:code "describe-class") " for " (:code "editor-buffer")
" to see the list of functions to implement."))))
(:documentation "Produce:
- A string/byte-array of the initial buffer contents.
- (optional, \"text/html\" is not provided) content type text."))
(define-class editor-buffer (network-buffer ; Questionable, but needed for `buffer-load'.
context-buffer modable-buffer document-buffer input-buffer)
((nyxt:title "*Editor*"))
(:export-class-name-p t)
(:export-accessor-names-p t)
(:export-predicate-name-p t)
(:accessor-name-transformer (class*:make-name-transformer name))
(:metaclass user-class)
(:documentation "Each editor buffer matches a file. Each editor buffer
contains an `nyxt/editor-mode:editor-mode' instance (or a subclass thereof)."))
(defmethod nyxt:default-modes :around ((buffer editor-buffer))
;; REVIEW: Really remove document-mode from editor-buffer?
;; FIXME: How to disable the annoying base-mode bindings in the editor!?
(set-difference (call-next-method) '(document-mode base-mode)))
(defmethod file ((buffer editor-buffer))
(uiop:parse-native-namestring (quri:uri-path (url buffer))))
(define-internal-scheme "editor"
(lambda (url buffer)
(let ((mode (find-submode 'editor-mode buffer))
(file (quri:uri-path (quri:uri url))))
(uiop:chdir (uiop:pathname-directory-pathname file))
(run-thread "editor content setting"
(sleep 2)
(set-content mode (uiop:read-file-string file)))
(markup mode))))
(defmethod editor ((editor-buffer editor-buffer))
(let ((mode (find-submode 'editor-mode editor-buffer)))
(unless (eq 'editor-mode (sera:class-name-of mode))
mode)))
(defmethod write-file-with-editor ((buffer editor-buffer) &key (if-exists :error))
(cond
((editor buffer)
(handler-case
(alexandria:write-string-into-file (get-content (editor buffer))
(file buffer)
:if-exists if-exists)
(file-error (e)
(echo-warning "Cannot write ~a: ~a" (file buffer) e)
nil)))
(t
(echo-warning "Editor buffer cannot write file without configured editor mode.")
nil)))
(defun prompt-for-editor-file ()
(uiop:native-namestring
(pathname
(prompt1
:prompt "Open file"
:extra-modes 'nyxt/file-manager-mode:file-manager-mode
:input (uiop:native-namestring (uiop:getcwd))
:sources
(list (make-instance 'nyxt/file-manager-mode:file-source
:name "Existing file"
:actions-on-return #'identity)
(make-instance 'prompter:raw-source
:name "Create new file"))))))
(define-command editor-open-file (&key (buffer (current-buffer)) (file (prompt-for-editor-file)))
"Open a file in the internal editor."
(buffer-load (quri:make-uri :scheme "editor" :path file) :buffer buffer))
(define-command editor-write-file (&key (buffer (current-buffer)) (if-exists :error))
"Write the FILE of the BUFFER to storage."
(if (uiop:file-exists-p (file buffer))
(if-confirm ((format nil "Overwrite ~s?" (file buffer))
:yes "overwrite" :no "cancel")
(echo "File ~s ~:[not ~;~]saved." (file buffer)
(write-file-with-editor buffer :if-exists :overwrite))
(echo "File ~s not saved." (file buffer)))
(echo "File ~s ~:[not ~;~]saved." (file buffer)
(write-file-with-editor buffer :if-exists if-exists))))
(define-command-global edit-file (&optional (file (prompt-for-editor-file)))
"Open a new editor and query a FILE to edit in it."
(let ((buffer (make-instance 'editor-buffer
:url (quri:make-uri :scheme "editor" :path file))))
(set-current-buffer buffer)))
| null | https://raw.githubusercontent.com/atlas-engineer/nyxt/a90478daeec2e8843c971cd9a22255cab6bfaab7/source/mode/editor.lisp | lisp | IMPORTANT: Implement this method specializing on your class extending editor-mode.
IMPORTANT: Implement this method specializing on your class extending editor-mode.
IMPORTANT: Implement this method specializing on your class extending editor-mode.
Questionable, but needed for `buffer-load'.
REVIEW: Really remove document-mode from editor-buffer?
FIXME: How to disable the annoying base-mode bindings in the editor!? | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(nyxt:define-package :nyxt/editor-mode
(:documentation "Mode for editors."))
(in-package :nyxt/editor-mode)
(define-mode editor-mode ()
"Mode for editor modes to extend.
Importantly, it is required to implement the methods `get-content',
`set-content', `markup' for each editor-mode. This will allow your mode
to get/set content from/to the file (which is necessary for operation).
To install the mode implementing the following, add this snippet to your config
(define-configuration nyxt/editor-mode::editor-buffer
((default-modes (cons 'your-editor-mode %slot-value%))))"
((keyscheme-map
(define-keyscheme-map "editor-mode" ()
keyscheme:default
(list
"C-r" 'reload-current-buffer
"f11" 'toggle-fullscreen)
keyscheme:cua
(list
"C-o" 'editor-open-file
"C-s" 'editor-write-file
"C-q" 'delete-current-buffer
"C-tab" 'switch-buffer)
keyscheme:emacs
(list
"C-x C-f" 'editor-open-file
"C-x C-s" 'editor-write-file
"C-x C-k" 'delete-current-buffer
"C-x b" 'switch-buffer)
keyscheme:vi-normal
(list
"C-o" 'editor-open-file
"w" 'editor-write-file
"R" 'reload-current-buffer
"g b" 'switch-buffer
"D" 'delete-current-buffer))))
(:toggler-command-p nil))
(export-always 'get-content)
(defgeneric get-content (editor-submode)
(:method ((editor editor-mode))
(declare (ignore editor))
(echo-warning "Editor buffer cannot edit files without configured editor mode."))
(:documentation "Get the content of the editor."))
(export-always 'set-content)
(defgeneric set-content (editor-submode content)
(:method ((editor editor-mode) (content t))
(declare (ignore editor))
(echo-warning "Editor buffer cannot edit files without configured editor mode.
See `describe-class editor-mode' for details."))
(:documentation "Set the content of the editor."))
(export-always 'markup)
(defgeneric markup (editor-submode)
(:method ((editor editor-mode))
(spinneret:with-html-string
(:head
(:nstyle (style (buffer editor))))
(:body
(:p "Please configure an editor mode to use an editor buffer. See "
(:code "describe-class") " for " (:code "editor-buffer")
" to see the list of functions to implement."))))
(:documentation "Produce:
- A string/byte-array of the initial buffer contents.
- (optional, \"text/html\" is not provided) content type text."))
context-buffer modable-buffer document-buffer input-buffer)
((nyxt:title "*Editor*"))
(:export-class-name-p t)
(:export-accessor-names-p t)
(:export-predicate-name-p t)
(:accessor-name-transformer (class*:make-name-transformer name))
(:metaclass user-class)
(:documentation "Each editor buffer matches a file. Each editor buffer
contains an `nyxt/editor-mode:editor-mode' instance (or a subclass thereof)."))
(defmethod nyxt:default-modes :around ((buffer editor-buffer))
(set-difference (call-next-method) '(document-mode base-mode)))
(defmethod file ((buffer editor-buffer))
(uiop:parse-native-namestring (quri:uri-path (url buffer))))
(define-internal-scheme "editor"
(lambda (url buffer)
(let ((mode (find-submode 'editor-mode buffer))
(file (quri:uri-path (quri:uri url))))
(uiop:chdir (uiop:pathname-directory-pathname file))
(run-thread "editor content setting"
(sleep 2)
(set-content mode (uiop:read-file-string file)))
(markup mode))))
(defmethod editor ((editor-buffer editor-buffer))
(let ((mode (find-submode 'editor-mode editor-buffer)))
(unless (eq 'editor-mode (sera:class-name-of mode))
mode)))
(defmethod write-file-with-editor ((buffer editor-buffer) &key (if-exists :error))
(cond
((editor buffer)
(handler-case
(alexandria:write-string-into-file (get-content (editor buffer))
(file buffer)
:if-exists if-exists)
(file-error (e)
(echo-warning "Cannot write ~a: ~a" (file buffer) e)
nil)))
(t
(echo-warning "Editor buffer cannot write file without configured editor mode.")
nil)))
(defun prompt-for-editor-file ()
(uiop:native-namestring
(pathname
(prompt1
:prompt "Open file"
:extra-modes 'nyxt/file-manager-mode:file-manager-mode
:input (uiop:native-namestring (uiop:getcwd))
:sources
(list (make-instance 'nyxt/file-manager-mode:file-source
:name "Existing file"
:actions-on-return #'identity)
(make-instance 'prompter:raw-source
:name "Create new file"))))))
(define-command editor-open-file (&key (buffer (current-buffer)) (file (prompt-for-editor-file)))
"Open a file in the internal editor."
(buffer-load (quri:make-uri :scheme "editor" :path file) :buffer buffer))
(define-command editor-write-file (&key (buffer (current-buffer)) (if-exists :error))
"Write the FILE of the BUFFER to storage."
(if (uiop:file-exists-p (file buffer))
(if-confirm ((format nil "Overwrite ~s?" (file buffer))
:yes "overwrite" :no "cancel")
(echo "File ~s ~:[not ~;~]saved." (file buffer)
(write-file-with-editor buffer :if-exists :overwrite))
(echo "File ~s not saved." (file buffer)))
(echo "File ~s ~:[not ~;~]saved." (file buffer)
(write-file-with-editor buffer :if-exists if-exists))))
(define-command-global edit-file (&optional (file (prompt-for-editor-file)))
"Open a new editor and query a FILE to edit in it."
(let ((buffer (make-instance 'editor-buffer
:url (quri:make-uri :scheme "editor" :path file))))
(set-current-buffer buffer)))
|
5edcf03b477d4675583cdad76cf1e3a7789a3d1959128fe7bde126ad623e3826 | xtdb/xtdb | psql.clj | (ns ^:no-doc xtdb.jdbc.psql
(:require [clojure.tools.logging :as log]
[xtdb.jdbc :as j]
[xtdb.system :as sys]
[juxt.clojars-mirrors.nextjdbc.v1v2v674.next.jdbc :as jdbc]
[juxt.clojars-mirrors.nextjdbc.v1v2v674.next.jdbc.result-set :as jdbcr]))
(defn- check-tx-time-col [pool]
(when-not (= "timestamp with time zone"
(-> (jdbc/execute-one! pool
["SELECT DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'tx_events' AND COLUMN_NAME = 'tx_time'"]
{:builder-fn jdbcr/as-unqualified-lower-maps})
:data_type))
(log/warn (str "`tx_time` column not in UTC format. "
"See -1.12.1 for more details."))))
(defn ->dialect {::sys/args {:drop-table? {:spec ::sys/boolean, :default false}}}
[{:keys [drop-table?]}]
(reify j/Dialect
(db-type [_] :postgresql)
(setup-schema! [_ pool]
(when drop-table?
(jdbc/execute! pool ["DROP TABLE IF EXISTS tx_events"]))
(doto pool
(jdbc/execute! ["
CREATE TABLE IF NOT EXISTS tx_events (
event_offset SERIAL PRIMARY KEY,
event_key VARCHAR,
tx_time TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
topic VARCHAR NOT NULL,
v BYTEA NOT NULL,
compacted INTEGER NOT NULL)"])
(jdbc/execute! ["DROP INDEX IF EXISTS tx_events_event_key_idx"])
(jdbc/execute! ["CREATE INDEX IF NOT EXISTS tx_events_event_key_idx_2 ON tx_events(event_key)"])
(check-tx-time-col)))
(ensure-serializable-identity-seq! [_ tx table-name]
we have to take a table write lock in Postgres , because auto - increments are n't guaranteed to be increasing , even between transactions with ' serializable ' isolation level
;; `table-name` is trusted
(jdbc/execute! tx [(format "LOCK TABLE %s IN SHARE ROW EXCLUSIVE MODE" table-name)]))))
| null | https://raw.githubusercontent.com/xtdb/xtdb/c285d76d9f5d1b7e872a3ec6b9dd33be8e641867/modules/jdbc/src/xtdb/jdbc/psql.clj | clojure | `table-name` is trusted | (ns ^:no-doc xtdb.jdbc.psql
(:require [clojure.tools.logging :as log]
[xtdb.jdbc :as j]
[xtdb.system :as sys]
[juxt.clojars-mirrors.nextjdbc.v1v2v674.next.jdbc :as jdbc]
[juxt.clojars-mirrors.nextjdbc.v1v2v674.next.jdbc.result-set :as jdbcr]))
(defn- check-tx-time-col [pool]
(when-not (= "timestamp with time zone"
(-> (jdbc/execute-one! pool
["SELECT DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'tx_events' AND COLUMN_NAME = 'tx_time'"]
{:builder-fn jdbcr/as-unqualified-lower-maps})
:data_type))
(log/warn (str "`tx_time` column not in UTC format. "
"See -1.12.1 for more details."))))
(defn ->dialect {::sys/args {:drop-table? {:spec ::sys/boolean, :default false}}}
[{:keys [drop-table?]}]
(reify j/Dialect
(db-type [_] :postgresql)
(setup-schema! [_ pool]
(when drop-table?
(jdbc/execute! pool ["DROP TABLE IF EXISTS tx_events"]))
(doto pool
(jdbc/execute! ["
CREATE TABLE IF NOT EXISTS tx_events (
event_offset SERIAL PRIMARY KEY,
event_key VARCHAR,
tx_time TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP,
topic VARCHAR NOT NULL,
v BYTEA NOT NULL,
compacted INTEGER NOT NULL)"])
(jdbc/execute! ["DROP INDEX IF EXISTS tx_events_event_key_idx"])
(jdbc/execute! ["CREATE INDEX IF NOT EXISTS tx_events_event_key_idx_2 ON tx_events(event_key)"])
(check-tx-time-col)))
(ensure-serializable-identity-seq! [_ tx table-name]
we have to take a table write lock in Postgres , because auto - increments are n't guaranteed to be increasing , even between transactions with ' serializable ' isolation level
(jdbc/execute! tx [(format "LOCK TABLE %s IN SHARE ROW EXCLUSIVE MODE" table-name)]))))
|
24c0797e981947814ac64609f9efdf5c5143bf64035b180922ed3164390ca4b7 | exercism/babashka | space_age_test.clj | (ns space-age-test
(:require [clojure.test :refer [deftest is]]
space-age))
(defn- rounds-to
[expected actual]
(is (= (Math/round (* 100.0 expected))
(Math/round (* 100.0 actual)))))
(deftest age-in-earth-years
(rounds-to 31.69 (space-age/on-earth 1000000000)))
(deftest age-in-mercury-years
(let [seconds 2134835688]
(rounds-to 67.65 (space-age/on-earth seconds))
(rounds-to 280.88 (space-age/on-mercury seconds))))
(deftest age-in-venus-years
(let [seconds 189839836]
(rounds-to 6.02 (space-age/on-earth seconds))
(rounds-to 9.78 (space-age/on-venus seconds))))
(deftest age-on-mars
(let [seconds 2329871239]
(rounds-to 73.83 (space-age/on-earth seconds))
(rounds-to 39.25 (space-age/on-mars seconds))))
(deftest age-on-jupiter
(let [seconds 901876382]
(rounds-to 28.58 (space-age/on-earth seconds))
(rounds-to 2.41 (space-age/on-jupiter seconds))))
(deftest age-on-saturn
(let [seconds 3000000000]
(rounds-to 95.06 (space-age/on-earth seconds))
(rounds-to 3.23 (space-age/on-saturn seconds))))
(deftest age-on-uranus
(let [seconds 3210123456]
(rounds-to 101.72 (space-age/on-earth seconds))
(rounds-to 1.21 (space-age/on-uranus seconds))))
(deftest age-on-neptune
(let [seconds 8210123456]
(rounds-to 260.16 (space-age/on-earth seconds))
(rounds-to 1.58 (space-age/on-neptune seconds))))
| null | https://raw.githubusercontent.com/exercism/babashka/7375f1938ff95b242320313eaeedb8eca31a1b5b/exercises/practice/space-age/test/space_age_test.clj | clojure | (ns space-age-test
(:require [clojure.test :refer [deftest is]]
space-age))
(defn- rounds-to
[expected actual]
(is (= (Math/round (* 100.0 expected))
(Math/round (* 100.0 actual)))))
(deftest age-in-earth-years
(rounds-to 31.69 (space-age/on-earth 1000000000)))
(deftest age-in-mercury-years
(let [seconds 2134835688]
(rounds-to 67.65 (space-age/on-earth seconds))
(rounds-to 280.88 (space-age/on-mercury seconds))))
(deftest age-in-venus-years
(let [seconds 189839836]
(rounds-to 6.02 (space-age/on-earth seconds))
(rounds-to 9.78 (space-age/on-venus seconds))))
(deftest age-on-mars
(let [seconds 2329871239]
(rounds-to 73.83 (space-age/on-earth seconds))
(rounds-to 39.25 (space-age/on-mars seconds))))
(deftest age-on-jupiter
(let [seconds 901876382]
(rounds-to 28.58 (space-age/on-earth seconds))
(rounds-to 2.41 (space-age/on-jupiter seconds))))
(deftest age-on-saturn
(let [seconds 3000000000]
(rounds-to 95.06 (space-age/on-earth seconds))
(rounds-to 3.23 (space-age/on-saturn seconds))))
(deftest age-on-uranus
(let [seconds 3210123456]
(rounds-to 101.72 (space-age/on-earth seconds))
(rounds-to 1.21 (space-age/on-uranus seconds))))
(deftest age-on-neptune
(let [seconds 8210123456]
(rounds-to 260.16 (space-age/on-earth seconds))
(rounds-to 1.58 (space-age/on-neptune seconds))))
|
|
b2d6b7cc630d42645c00cd1e3eb1be2ee275bd08673b244a3c15a77e41712c89 | philnguyen/soft-contract | fact.rkt | #lang racket
(require soft-contract/fake-contract)
(define (factorial x)
(if (zero? x)
1
(* x (factorial (sub1 x)))))
(provide
(contract-out
[factorial (-> (>=/c 0) (>=/c 0))]))
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/sym-exe/fact.rkt | racket | #lang racket
(require soft-contract/fake-contract)
(define (factorial x)
(if (zero? x)
1
(* x (factorial (sub1 x)))))
(provide
(contract-out
[factorial (-> (>=/c 0) (>=/c 0))]))
|
|
13b8ebcdd1f7215d28361722d3fdc1655c9bf651bb6dc8b67f196ce2bf18cf16 | blancas/kern | i18n.clj | Copyright ( c ) 2013 . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Support for a simple i18n scheme."
:author "Armando Blancas"}
blancas.kern.i18n)
(def ^:private default
{ :unexpected "unexpected %s"
:expecting "expecting %s"
:comma ", "
:or " or %s"
:err-pos "%sline %d column %d\n"
:eof "end of input"
:letter "letter"
:lower "lowercase letter"
:upper "uppercase letter"
:whitespace "whitespace"
:space "space"
:new-line "new line"
:tab "tab"
:digit "digit"
:hex-digit "hexadecimal digit"
:oct-digit "octal digit"
:alpha-num "letter or digit"
:end-comment "end of comment"
:char-lit "character literal"
:end-char "end of character literal"
:esc-code-b "escaped code: b, t, n, f, r, ', \\"
:esc-code-c "escaped code: b, t, n, f, r, ', \\, ?, a, v, 0, ooo, uhhhh, xhh"
:esc-code-j "escaped code: b, t, n, f, r, ', \\, ooo, hhhh"
:esc-code-h "escaped code: b, t, n, f, r, ', \\, ?, a, v, 0, nnn, onnn, xnnnn"
:string-lit "string literal"
:end-string "end of string literal"
:end-of "end of "
:dec-lit "decimal literal"
:oct-lit "octal literal"
:hex-lit "hex literal"
:float-lit "floating-point literal"
:reserved "%s is a reserved name"
})
(def ^:private text (atom default))
(defn i18n-merge
"Merges m into the text map for customization."
[m] (swap! text merge m))
(defn i18n
"Gets or sets the value for the supplied key."
([k] (k (deref text)))
([k v] (swap! text assoc k v)))
(defn fmt
"Formats a string with a key and more arguments."
[k & more]
(apply format (i18n k) more))
(defn di18n
"Returns a Delay instance with the value for the supplied key.
Useful in (def)'ed expressions that evaluate too soon."
[k] (delay (k (deref text))))
(defn dfmt
"Returns a Delay instance with a string formatted with a key and more
arguments. Useful in (def)'ed expressions that evaluate too soon."
[k & more]
(delay (apply format (i18n k) more)))
| null | https://raw.githubusercontent.com/blancas/kern/3ef65e559658c06a321a9ca7c85a541edc7b9ff2/src/main/clojure/blancas/kern/i18n.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) 2013 . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns ^{:doc "Support for a simple i18n scheme."
:author "Armando Blancas"}
blancas.kern.i18n)
(def ^:private default
{ :unexpected "unexpected %s"
:expecting "expecting %s"
:comma ", "
:or " or %s"
:err-pos "%sline %d column %d\n"
:eof "end of input"
:letter "letter"
:lower "lowercase letter"
:upper "uppercase letter"
:whitespace "whitespace"
:space "space"
:new-line "new line"
:tab "tab"
:digit "digit"
:hex-digit "hexadecimal digit"
:oct-digit "octal digit"
:alpha-num "letter or digit"
:end-comment "end of comment"
:char-lit "character literal"
:end-char "end of character literal"
:esc-code-b "escaped code: b, t, n, f, r, ', \\"
:esc-code-c "escaped code: b, t, n, f, r, ', \\, ?, a, v, 0, ooo, uhhhh, xhh"
:esc-code-j "escaped code: b, t, n, f, r, ', \\, ooo, hhhh"
:esc-code-h "escaped code: b, t, n, f, r, ', \\, ?, a, v, 0, nnn, onnn, xnnnn"
:string-lit "string literal"
:end-string "end of string literal"
:end-of "end of "
:dec-lit "decimal literal"
:oct-lit "octal literal"
:hex-lit "hex literal"
:float-lit "floating-point literal"
:reserved "%s is a reserved name"
})
(def ^:private text (atom default))
(defn i18n-merge
"Merges m into the text map for customization."
[m] (swap! text merge m))
(defn i18n
"Gets or sets the value for the supplied key."
([k] (k (deref text)))
([k v] (swap! text assoc k v)))
(defn fmt
"Formats a string with a key and more arguments."
[k & more]
(apply format (i18n k) more))
(defn di18n
"Returns a Delay instance with the value for the supplied key.
Useful in (def)'ed expressions that evaluate too soon."
[k] (delay (k (deref text))))
(defn dfmt
"Returns a Delay instance with a string formatted with a key and more
arguments. Useful in (def)'ed expressions that evaluate too soon."
[k & more]
(delay (apply format (i18n k) more)))
|
05e416a57d52b39cd2ba42b66e43f57e79a7ca0cda90393a051f04741fdc19f4 | ocaml/ocaml | odoc_comments.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Analysis of comments. *)
open Odoc_types
(** This variable contains the regular expression representing a blank but not a '\n'.*)
let simple_blank = "[ \013\009\012]"
module type Texter =
sig
(** Return a text structure from a string. *)
val text_of_string : string -> text
end
module Info_retriever =
functor (MyTexter : Texter) ->
struct
let create_see _file s =
try
let lexbuf = Lexing.from_string s in
let (see_ref, s) = Odoc_parser.see_info Odoc_see_lexer.main lexbuf in
(see_ref, MyTexter.text_of_string s)
with
| Odoc_text.Text_syntax (l, c, s) ->
raise (Failure (Odoc_messages.text_parse_error l c s))
| _ ->
raise (Failure ("Unknown error while parsing @see tag: "^s))
let retrieve_info fun_lex file (s : string) =
try
Odoc_comments_global.init ();
Odoc_lexer.comments_level := 0;
let lexbuf = Lexing.from_string s in
match Odoc_parser.main fun_lex lexbuf with
None ->
(0, None)
| Some (desc, remain_opt) ->
let mem_nb_chars = !Odoc_comments_global.nb_chars in
begin match remain_opt with
None ->
()
| Some s ->
let lexbuf2 = Lexing.from_string s in
Odoc_parser.info_part2 Odoc_lexer.elements lexbuf2
end;
(mem_nb_chars,
Some
{
i_desc = (match desc with "" -> None | _ -> Some (MyTexter.text_of_string desc));
i_authors = !Odoc_comments_global.authors;
i_version = !Odoc_comments_global.version;
i_sees = (List.map (create_see file) !Odoc_comments_global.sees) ;
i_since = !Odoc_comments_global.since;
i_before = Odoc_merge.merge_before_tags
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.before)
;
i_deprecated =
(match !Odoc_comments_global.deprecated with
None -> None | Some s -> Some (MyTexter.text_of_string s));
i_params =
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.params);
i_raised_exceptions =
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.raised_exceptions);
i_return_value =
(match !Odoc_comments_global.return_value with
None -> None | Some s -> Some (MyTexter.text_of_string s)) ;
i_custom = (List.map
(fun (tag, s) -> (tag, MyTexter.text_of_string s))
!Odoc_comments_global.customs) ;
i_alerts = [] ;
}
)
with e ->
let (l, c, message) = match e with
| Failure s -> (!Odoc_lexer.line_number + 1, 0, s)
| Odoc_text.Text_syntax (l, c, s) -> (l, c, Odoc_messages.text_parse_error l c s)
| _other -> (0, 0, Odoc_messages.parse_error)
in begin
incr Odoc_global.errors;
prerr_endline (Odoc_messages.error_location file l c ^ message);
(0, None)
end
(** Return true if the given string contains a blank line. *)
let blank_line s =
try
let _ = Str.search_forward (Str.regexp ("['\n']"^simple_blank^"*['\n']")) s 0 in
(* a blank line was before the comment *)
true
with
Not_found ->
false
let retrieve_info_special file (s : string) =
retrieve_info Odoc_lexer.main file s
let retrieve_info_simple _file (s : string) =
Odoc_comments_global.init ();
Odoc_lexer.comments_level := 0;
let lexbuf = Lexing.from_string s in
match Odoc_parser.main Odoc_lexer.simple lexbuf with
None ->
(0, None)
| Some _ ->
(!Odoc_comments_global.nb_chars, Some Odoc_types.dummy_info)
(** Return true if the given string contains a blank line outside a simple comment. *)
let blank_line_outside_simple file s =
let rec iter s2 =
match retrieve_info_simple file s2 with
(_, None) ->
blank_line s2
| (len, Some _) ->
try
let pos = Str.search_forward (Str.regexp_string "(*") s2 0 in
let s_before = String.sub s2 0 pos in
let s_after = String.sub s2 len ((String.length s2) - len) in
(blank_line s_before) || (iter s_after)
with
Not_found ->
(* we shouldn't get here *)
false
in
iter s
let all_special file s =
let rec iter acc n s2 =
match retrieve_info_special file s2 with
(_, None) ->
(n, acc)
| (n2, Some i) ->
let new_s = String.sub s2 n2 ((String.length s2) - n2) in
iter (acc @ [i]) (n + n2) new_s
in
iter [] 0 s
let just_after_special file s =
match retrieve_info_special file s with
(_, None) ->
(0, None)
| (len, Some d) ->
(* we must not have a simple comment or a blank line before. *)
match retrieve_info_simple file (String.sub s 0 len) with
(_, None) ->
(
try
(* if the special comment is the stop comment (**/**),
then we must not associate it. *)
let pos = Str.search_forward (Str.regexp_string "(**") s 0 in
if blank_line (String.sub s 0 pos) ||
d.Odoc_types.i_desc = Some [Odoc_types.Raw "/*"]
then
(0, None)
else
(len, Some d)
with
Not_found ->
(* should not occur *)
(0, None)
)
| (_, Some _) ->
(0, None)
let first_special file s =
retrieve_info_special file s
let get_comments f_create_ele file s =
let (assoc_com, ele_coms) =
(* get the comments *)
let (len, special_coms) = all_special file s in
(* if there is no blank line after the special comments, and
if the last special comment is not the stop special comment, then the
last special comments must be associated to the element. *)
match List.rev special_coms with
[] ->
(None, [])
| h :: q ->
if (blank_line_outside_simple file
(String.sub s len ((String.length s) - len)) )
|| h.Odoc_types.i_desc = Some [Odoc_types.Raw "/*"]
then
(None, special_coms)
else
(Some h, List.rev q)
in
let ele_comments =
List.fold_left
(fun acc -> fun sc ->
match sc.Odoc_types.i_desc with
None ->
acc
| Some t ->
acc @ [f_create_ele t])
[]
ele_coms
in
(assoc_com, ele_comments)
end
module Basic_info_retriever = Info_retriever (Odoc_text.Texter)
let info_of_string s =
let dummy = Odoc_types.dummy_info in
let s2 = Printf.sprintf "(** %s *)" s in
let (_, i_opt) = Basic_info_retriever.first_special "-" s2 in
match i_opt with
None -> dummy
| Some i -> i
let info_of_comment_file modlist f =
try
let s = Odoc_misc.input_file_as_string f in
let i = info_of_string s in
Odoc_cross.assoc_comments_info "" modlist i
with
Sys_error s ->
failwith s
| null | https://raw.githubusercontent.com/ocaml/ocaml/1e52236624bad1c80b3c46857723a35c43974297/ocamldoc/odoc_comments.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Analysis of comments.
* This variable contains the regular expression representing a blank but not a '\n'.
* Return a text structure from a string.
* Return true if the given string contains a blank line.
a blank line was before the comment
* Return true if the given string contains a blank line outside a simple comment.
we shouldn't get here
we must not have a simple comment or a blank line before.
if the special comment is the stop comment (**/*
should not occur
get the comments
if there is no blank line after the special comments, and
if the last special comment is not the stop special comment, then the
last special comments must be associated to the element. | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Odoc_types
let simple_blank = "[ \013\009\012]"
module type Texter =
sig
val text_of_string : string -> text
end
module Info_retriever =
functor (MyTexter : Texter) ->
struct
let create_see _file s =
try
let lexbuf = Lexing.from_string s in
let (see_ref, s) = Odoc_parser.see_info Odoc_see_lexer.main lexbuf in
(see_ref, MyTexter.text_of_string s)
with
| Odoc_text.Text_syntax (l, c, s) ->
raise (Failure (Odoc_messages.text_parse_error l c s))
| _ ->
raise (Failure ("Unknown error while parsing @see tag: "^s))
let retrieve_info fun_lex file (s : string) =
try
Odoc_comments_global.init ();
Odoc_lexer.comments_level := 0;
let lexbuf = Lexing.from_string s in
match Odoc_parser.main fun_lex lexbuf with
None ->
(0, None)
| Some (desc, remain_opt) ->
let mem_nb_chars = !Odoc_comments_global.nb_chars in
begin match remain_opt with
None ->
()
| Some s ->
let lexbuf2 = Lexing.from_string s in
Odoc_parser.info_part2 Odoc_lexer.elements lexbuf2
end;
(mem_nb_chars,
Some
{
i_desc = (match desc with "" -> None | _ -> Some (MyTexter.text_of_string desc));
i_authors = !Odoc_comments_global.authors;
i_version = !Odoc_comments_global.version;
i_sees = (List.map (create_see file) !Odoc_comments_global.sees) ;
i_since = !Odoc_comments_global.since;
i_before = Odoc_merge.merge_before_tags
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.before)
;
i_deprecated =
(match !Odoc_comments_global.deprecated with
None -> None | Some s -> Some (MyTexter.text_of_string s));
i_params =
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.params);
i_raised_exceptions =
(List.map (fun (n, s) ->
(n, MyTexter.text_of_string s)) !Odoc_comments_global.raised_exceptions);
i_return_value =
(match !Odoc_comments_global.return_value with
None -> None | Some s -> Some (MyTexter.text_of_string s)) ;
i_custom = (List.map
(fun (tag, s) -> (tag, MyTexter.text_of_string s))
!Odoc_comments_global.customs) ;
i_alerts = [] ;
}
)
with e ->
let (l, c, message) = match e with
| Failure s -> (!Odoc_lexer.line_number + 1, 0, s)
| Odoc_text.Text_syntax (l, c, s) -> (l, c, Odoc_messages.text_parse_error l c s)
| _other -> (0, 0, Odoc_messages.parse_error)
in begin
incr Odoc_global.errors;
prerr_endline (Odoc_messages.error_location file l c ^ message);
(0, None)
end
let blank_line s =
try
let _ = Str.search_forward (Str.regexp ("['\n']"^simple_blank^"*['\n']")) s 0 in
true
with
Not_found ->
false
let retrieve_info_special file (s : string) =
retrieve_info Odoc_lexer.main file s
let retrieve_info_simple _file (s : string) =
Odoc_comments_global.init ();
Odoc_lexer.comments_level := 0;
let lexbuf = Lexing.from_string s in
match Odoc_parser.main Odoc_lexer.simple lexbuf with
None ->
(0, None)
| Some _ ->
(!Odoc_comments_global.nb_chars, Some Odoc_types.dummy_info)
let blank_line_outside_simple file s =
let rec iter s2 =
match retrieve_info_simple file s2 with
(_, None) ->
blank_line s2
| (len, Some _) ->
try
let pos = Str.search_forward (Str.regexp_string "(*") s2 0 in
let s_before = String.sub s2 0 pos in
let s_after = String.sub s2 len ((String.length s2) - len) in
(blank_line s_before) || (iter s_after)
with
Not_found ->
false
in
iter s
let all_special file s =
let rec iter acc n s2 =
match retrieve_info_special file s2 with
(_, None) ->
(n, acc)
| (n2, Some i) ->
let new_s = String.sub s2 n2 ((String.length s2) - n2) in
iter (acc @ [i]) (n + n2) new_s
in
iter [] 0 s
let just_after_special file s =
match retrieve_info_special file s with
(_, None) ->
(0, None)
| (len, Some d) ->
match retrieve_info_simple file (String.sub s 0 len) with
(_, None) ->
(
try
then we must not associate it. *)
let pos = Str.search_forward (Str.regexp_string "(**") s 0 in
if blank_line (String.sub s 0 pos) ||
d.Odoc_types.i_desc = Some [Odoc_types.Raw "/*"]
then
(0, None)
else
(len, Some d)
with
Not_found ->
(0, None)
)
| (_, Some _) ->
(0, None)
let first_special file s =
retrieve_info_special file s
let get_comments f_create_ele file s =
let (assoc_com, ele_coms) =
let (len, special_coms) = all_special file s in
match List.rev special_coms with
[] ->
(None, [])
| h :: q ->
if (blank_line_outside_simple file
(String.sub s len ((String.length s) - len)) )
|| h.Odoc_types.i_desc = Some [Odoc_types.Raw "/*"]
then
(None, special_coms)
else
(Some h, List.rev q)
in
let ele_comments =
List.fold_left
(fun acc -> fun sc ->
match sc.Odoc_types.i_desc with
None ->
acc
| Some t ->
acc @ [f_create_ele t])
[]
ele_coms
in
(assoc_com, ele_comments)
end
module Basic_info_retriever = Info_retriever (Odoc_text.Texter)
let info_of_string s =
let dummy = Odoc_types.dummy_info in
let s2 = Printf.sprintf "(** %s *)" s in
let (_, i_opt) = Basic_info_retriever.first_special "-" s2 in
match i_opt with
None -> dummy
| Some i -> i
let info_of_comment_file modlist f =
try
let s = Odoc_misc.input_file_as_string f in
let i = info_of_string s in
Odoc_cross.assoc_comments_info "" modlist i
with
Sys_error s ->
failwith s
|
4a6e88615cdeabd269f13ce397e8b58bef61b6deb37ac1ce3572a8f53d5b02fd | 8c6794b6/guile-tjit | t-br-if-eq-01.scm | ;; Simple loop with `br-if-eq'.
(define (loop n acc)
(if (eq? n 0)
acc
(loop (- n 1) (+ acc 1))))
(loop 1000 0)
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/test-suite/tjit/t-br-if-eq-01.scm | scheme | Simple loop with `br-if-eq'. |
(define (loop n acc)
(if (eq? n 0)
acc
(loop (- n 1) (+ acc 1))))
(loop 1000 0)
|
f7c9946c9d4fb36beb2db4e23afa19c9321687aaccd524fcc430175832435f0c | arbor/antiope | MessagesSpec.hs | module Antiope.SQS.MessagesSpec
( spec
) where
import HaskellWorks.Hspec.Hedgehog
import Hedgehog
import Test.Hspec
# ANN module ( " HLint : ignore Redundant do " : : String ) #
spec :: Spec
spec = describe "Antiope.SQS.MessagesSpec" $ do
it "Implement me" $ require $ property $ do
True === True
| null | https://raw.githubusercontent.com/arbor/antiope/86ad3df07b8d3fd5d2c8bef4111a73b85850e1ba/antiope-sqs/test/Antiope/SQS/MessagesSpec.hs | haskell | module Antiope.SQS.MessagesSpec
( spec
) where
import HaskellWorks.Hspec.Hedgehog
import Hedgehog
import Test.Hspec
# ANN module ( " HLint : ignore Redundant do " : : String ) #
spec :: Spec
spec = describe "Antiope.SQS.MessagesSpec" $ do
it "Implement me" $ require $ property $ do
True === True
|
|
0f310d0338a570ad3bebadbffa55382d81f38ec515a09da2b225268cd9504c3b | Simre1/haskell-game | Scene.hs | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
module Scene.Level.Scene where
import Control.Applicative ((<|>))
import Control.Monad.IO.Class (liftIO)
import Data.Bool (bool)
import Polysemy (Embed, Sem, Members)
import Polysemy.Input (Input)
import ECS.Apecs (ApecsSystem, executeApecsSystem, runApecs)
import Sigma (Signal, liftAction, withInitialization)
import GameInput
import MyWindow (MyWindow)
import Scene.Level.Initialize.CollisionHandler (initializeCollisionHandlers)
import Scene.Level.Initialize.Player (initializePlayer)
import Scene.Level.Step.Delete (deleteOutOfBounds)
import Scene.Level.Step.Enemy (stepEnemies)
import Scene.Level.Step.Physics (stepPhysics)
import Scene.Level.Step.Player (stepPlayer)
import Scene.Level.Step.Scenario (scenarioSignal, level1Waves)
import Scene.Level.World (World, initWorld)
import Scene.Level.WorldAccessors (isPlayerAlive)
import Scene.Scenes (Scene(..))
levelScene :: Members [Embed IO, Input GameInput, MyWindow] r => Signal (Sem (ApecsSystem World : r)) () -> Signal (Sem r) (Maybe Scene)
levelScene render = runApecs (liftIO initWorld) $ withInitialization (executeApecsSystem $ initializeCollisionHandlers *> initializePlayer) $ \_ ->
const <$> step <*> render
where
step :: Members [Embed IO, Input GameInput, ApecsSystem World] r => Signal (Sem r) (Maybe Scene)
step = (<|>) <$> scenarioSignal (level1Waves, StartGame) <*> liftAction (executeApecsSystem $ stepPlayer *> stepEnemies *> deleteOutOfBounds *> stepPhysics *> (bool (Just StartGame) Nothing <$> isPlayerAlive))
| null | https://raw.githubusercontent.com/Simre1/haskell-game/272a0674157aedc7b0e0ee00da8d3a464903dc67/app/Scene/Level/Scene.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
module Scene.Level.Scene where
import Control.Applicative ((<|>))
import Control.Monad.IO.Class (liftIO)
import Data.Bool (bool)
import Polysemy (Embed, Sem, Members)
import Polysemy.Input (Input)
import ECS.Apecs (ApecsSystem, executeApecsSystem, runApecs)
import Sigma (Signal, liftAction, withInitialization)
import GameInput
import MyWindow (MyWindow)
import Scene.Level.Initialize.CollisionHandler (initializeCollisionHandlers)
import Scene.Level.Initialize.Player (initializePlayer)
import Scene.Level.Step.Delete (deleteOutOfBounds)
import Scene.Level.Step.Enemy (stepEnemies)
import Scene.Level.Step.Physics (stepPhysics)
import Scene.Level.Step.Player (stepPlayer)
import Scene.Level.Step.Scenario (scenarioSignal, level1Waves)
import Scene.Level.World (World, initWorld)
import Scene.Level.WorldAccessors (isPlayerAlive)
import Scene.Scenes (Scene(..))
levelScene :: Members [Embed IO, Input GameInput, MyWindow] r => Signal (Sem (ApecsSystem World : r)) () -> Signal (Sem r) (Maybe Scene)
levelScene render = runApecs (liftIO initWorld) $ withInitialization (executeApecsSystem $ initializeCollisionHandlers *> initializePlayer) $ \_ ->
const <$> step <*> render
where
step :: Members [Embed IO, Input GameInput, ApecsSystem World] r => Signal (Sem r) (Maybe Scene)
step = (<|>) <$> scenarioSignal (level1Waves, StartGame) <*> liftAction (executeApecsSystem $ stepPlayer *> stepEnemies *> deleteOutOfBounds *> stepPhysics *> (bool (Just StartGame) Nothing <$> isPlayerAlive))
|
|
88ac3c24fe99c0218d2a76d166daa11c72f019d27db438ee41ec182e652194a7 | yrashk/erlang | ex_grid.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(ex_grid).
-behavoiur(wx_object).
%% Client API
-export([start/1]).
%% wx_object callbacks
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
%% Setup sizers
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxGrid"}]),
Grid1 = create_grid1(Panel),
%% Add to sizers
Options = [{flag, ?wxEXPAND}, {proportion, 1}],
wxSizer:add(Sizer, Grid1, Options),
wxSizer:add(MainSizer, Sizer, Options),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Callbacks handled as normal gen_server callbacks
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
%% Async Events are handled in handle_event as in handle_info
handle_event(Ev = #wx{}, State = #state{}) ->
demo:format(State#state.config, "Got Event ~p\n", [Ev]),
{noreply, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Local functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
create_grid1(Panel) ->
Grid = wxGrid:new(Panel, 2, []),
wxGrid:createGrid(Grid, 100, 5),
Font = wxFont:new(16, ?wxFONTFAMILY_SWISS,
?wxFONTSTYLE_NORMAL,
?wxFONTWEIGHT_NORMAL, []),
Fun =
fun(Int) ->
wxGrid:setCellValue(Grid, Int, 0, "Value"),
wxGrid:setCellValue(Grid, Int, 1, "Value"),
wxGrid:setCellValue(Grid, Int, 2, "Value"),
wxGrid:setCellValue(Grid, Int, 3, "Read only"),
wxGrid:setCellTextColour(Grid, Int, 3, ?wxWHITE),
wxGrid:setReadOnly(Grid, Int, 3, [{isReadOnly,true}]),
wxGrid:setCellValue(Grid, Int, 4, "Value"),
case Int rem 4 of
0 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxRED);
1 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxGREEN),
wxGrid:setCellTextColour(Grid, Int, 2, {255,215,0,255});
2 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxBLUE);
_ -> wxGrid:setCellBackgroundColour(Grid, Int, 1, ?wxCYAN),
wxGrid:setCellValue(Grid, Int, 1,
"Centered\nhorizontally"),
wxGrid:setCellAlignment(Grid, Int, 4,
0,?wxALIGN_CENTER),
wxGrid:setCellValue(Grid, Int, 4,
"Centered\nvertically"),
wxGrid:setCellAlignment(Grid, Int, 1,
?wxALIGN_CENTER,0),
wxGrid:setCellTextColour(Grid, Int, 3, ?wxBLACK),
wxGrid:setCellAlignment(Grid, Int, 2,
?wxALIGN_CENTER,
?wxALIGN_CENTER),
wxGrid:setCellFont(Grid, Int, 0, Font),
wxGrid:setCellValue(Grid, Int, 2,
"Centered vertically\nand horizontally"),
wxGrid:setRowSize(Grid, Int, 80)
end
end,
wx:foreach(Fun, lists:seq(0,99)),
wxGrid:setColSize(Grid, 2, 150),
wxGrid:connect(Grid, grid_cell_change),
Grid.
| null | https://raw.githubusercontent.com/yrashk/erlang/e1282325ed75e52a98d58f5bd9fb0fa27896173f/lib/wx/examples/demo/ex_grid.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Client API
wx_object callbacks
Setup sizers
Add to sizers
Callbacks handled as normal gen_server callbacks
Async Events are handled in handle_event as in handle_info
Local functions
| Copyright Ericsson AB 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(ex_grid).
-behavoiur(wx_object).
-export([start/1]).
-export([init/1, terminate/2, code_change/3,
handle_info/2, handle_call/3, handle_event/2]).
-include_lib("wx/include/wx.hrl").
-record(state,
{
parent,
config
}).
start(Config) ->
wx_object:start_link(?MODULE, Config, []).
init(Config) ->
wx:batch(fun() -> do_init(Config) end).
do_init(Config) ->
Parent = proplists:get_value(parent, Config),
Panel = wxPanel:new(Parent, []),
MainSizer = wxBoxSizer:new(?wxVERTICAL),
Sizer = wxStaticBoxSizer:new(?wxVERTICAL, Panel,
[{label, "wxGrid"}]),
Grid1 = create_grid1(Panel),
Options = [{flag, ?wxEXPAND}, {proportion, 1}],
wxSizer:add(Sizer, Grid1, Options),
wxSizer:add(MainSizer, Sizer, Options),
wxPanel:setSizer(Panel, MainSizer),
{Panel, #state{parent=Panel, config=Config}}.
handle_info(Msg, State) ->
demo:format(State#state.config, "Got Info ~p\n", [Msg]),
{noreply, State}.
handle_call(Msg, _From, State) ->
demo:format(State#state.config, "Got Call ~p\n", [Msg]),
{reply,{error, nyi}, State}.
handle_event(Ev = #wx{}, State = #state{}) ->
demo:format(State#state.config, "Got Event ~p\n", [Ev]),
{noreply, State}.
code_change(_, _, State) ->
{stop, ignore, State}.
terminate(_Reason, _State) ->
ok.
create_grid1(Panel) ->
Grid = wxGrid:new(Panel, 2, []),
wxGrid:createGrid(Grid, 100, 5),
Font = wxFont:new(16, ?wxFONTFAMILY_SWISS,
?wxFONTSTYLE_NORMAL,
?wxFONTWEIGHT_NORMAL, []),
Fun =
fun(Int) ->
wxGrid:setCellValue(Grid, Int, 0, "Value"),
wxGrid:setCellValue(Grid, Int, 1, "Value"),
wxGrid:setCellValue(Grid, Int, 2, "Value"),
wxGrid:setCellValue(Grid, Int, 3, "Read only"),
wxGrid:setCellTextColour(Grid, Int, 3, ?wxWHITE),
wxGrid:setReadOnly(Grid, Int, 3, [{isReadOnly,true}]),
wxGrid:setCellValue(Grid, Int, 4, "Value"),
case Int rem 4 of
0 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxRED);
1 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxGREEN),
wxGrid:setCellTextColour(Grid, Int, 2, {255,215,0,255});
2 -> wxGrid:setCellBackgroundColour(Grid, Int, 3, ?wxBLUE);
_ -> wxGrid:setCellBackgroundColour(Grid, Int, 1, ?wxCYAN),
wxGrid:setCellValue(Grid, Int, 1,
"Centered\nhorizontally"),
wxGrid:setCellAlignment(Grid, Int, 4,
0,?wxALIGN_CENTER),
wxGrid:setCellValue(Grid, Int, 4,
"Centered\nvertically"),
wxGrid:setCellAlignment(Grid, Int, 1,
?wxALIGN_CENTER,0),
wxGrid:setCellTextColour(Grid, Int, 3, ?wxBLACK),
wxGrid:setCellAlignment(Grid, Int, 2,
?wxALIGN_CENTER,
?wxALIGN_CENTER),
wxGrid:setCellFont(Grid, Int, 0, Font),
wxGrid:setCellValue(Grid, Int, 2,
"Centered vertically\nand horizontally"),
wxGrid:setRowSize(Grid, Int, 80)
end
end,
wx:foreach(Fun, lists:seq(0,99)),
wxGrid:setColSize(Grid, 2, 150),
wxGrid:connect(Grid, grid_cell_change),
Grid.
|
77bf3c9cb25fb65810bedb86b9344c0cd02d02430345217650bf51490745ddf3 | arttuka/reagent-material-ui | browse_gallery_two_tone.cljs | (ns reagent-mui.icons.browse-gallery-two-tone
"Imports @mui/icons-material/BrowseGalleryTwoTone as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def browse-gallery-two-tone (create-svg-icon [(e "path" #js {"d" "M9 5c-3.86 0-7 3.14-7 7s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm2.79 11.21L8 12.41V7h2v4.59l3.21 3.21-1.42 1.41z", "opacity" ".3"}) (e "path" #js {"d" "M9 3c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9zm0 16c-3.86 0-7-3.14-7-7s3.14-7 7-7 7 3.14 7 7-3.14 7-7 7z"}) (e "path" #js {"d" "M10 7H8v5.41l3.79 3.8 1.42-1.42-3.21-3.2zm7.99-3.48v2.16C20.36 6.8 22 9.21 22 12c0 2.79-1.64 5.2-4.01 6.32v2.16C21.48 19.24 24 15.91 24 12s-2.52-7.24-6.01-8.48z"})]
"BrowseGalleryTwoTone"))
| null | https://raw.githubusercontent.com/arttuka/reagent-material-ui/c7cd0d7c661ab9df5b0aed0213a6653a9a3f28ea/src/icons/reagent_mui/icons/browse_gallery_two_tone.cljs | clojure | (ns reagent-mui.icons.browse-gallery-two-tone
"Imports @mui/icons-material/BrowseGalleryTwoTone as a Reagent component."
(:require-macros [reagent-mui.util :refer [create-svg-icon e]])
(:require [react :as react]
["@mui/material/SvgIcon" :as SvgIcon]
[reagent-mui.util]))
(def browse-gallery-two-tone (create-svg-icon [(e "path" #js {"d" "M9 5c-3.86 0-7 3.14-7 7s3.14 7 7 7 7-3.14 7-7-3.14-7-7-7zm2.79 11.21L8 12.41V7h2v4.59l3.21 3.21-1.42 1.41z", "opacity" ".3"}) (e "path" #js {"d" "M9 3c-4.97 0-9 4.03-9 9s4.03 9 9 9 9-4.03 9-9-4.03-9-9-9zm0 16c-3.86 0-7-3.14-7-7s3.14-7 7-7 7 3.14 7 7-3.14 7-7 7z"}) (e "path" #js {"d" "M10 7H8v5.41l3.79 3.8 1.42-1.42-3.21-3.2zm7.99-3.48v2.16C20.36 6.8 22 9.21 22 12c0 2.79-1.64 5.2-4.01 6.32v2.16C21.48 19.24 24 15.91 24 12s-2.52-7.24-6.01-8.48z"})]
"BrowseGalleryTwoTone"))
|
|
3d018e886cc219734f13f8d0f02a68001bf1b4db339fcea6d709742a12749b45 | input-output-hk/offchain-metadata-tools | Transform.hs | module Test.Cardano.Metadata.Transform
( tests
) where
import Data.Validation ( Validation (Failure, Success) )
import Data.Word
import Hedgehog ( forAll, property, unOpaque, (===) )
import qualified Hedgehog as H ( Property )
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( Assertion, testCase, (@?=) )
import Test.Tasty.Hedgehog
-- import Test.Cardano.Helpers (prop_functor_laws)
import Cardano.Metadata.Transform
import qualified Test.Cardano.Metadata.Generators as Gen
tests :: TestTree
tests = testGroup "Transform algebra tests"
[ testProperty "Transform/functor/law-abiding" prop_functor_laws
, testProperty "Transform/applicative/law-abiding" prop_applicative_laws
, testProperty "Transform/monad/law-abiding" prop_monad_laws
, testCase "Transform/desirable-properties" unit_desirable_properties
]
unit_desirable_properties :: Assertion
unit_desirable_properties = do
-- Can make transforms
let t1 = mkTransform (const (Left "fail"))
t2 = mkTransform Right
-- Combine them using the applicative instance
let tSum = t1 *> t2
-- And get the expected result
tSum `apply` "dummy" @?= Left "fail"
-- Additionally, we can do the same with Validation
let v1 = mkTransform (const (Failure ["fail1"]))
v2 = mkTransform (const (Failure ["fail2"]))
v3 = mkTransform Success
let vSum = v1 *> v2 *> v3
vSum `apply` "dummy" @?= Failure ["fail1", "fail2"]
prop_functor_laws :: H.Property
prop_functor_laws = property $ do
ofa <- forAll (Gen.transform Gen.eitherWord8)
let fa = unOpaque ofa
let obs = flip apply (4 :: Word8)
-- Identity
obs (fmap id fa) === obs (id fa)
-- Composition
let plusTwo = (+ 2)
mulThree = (* 3)
obs (fmap (mulThree . plusTwo) fa) === obs ((fmap mulThree . fmap plusTwo) fa)
prop_applicative_laws :: H.Property
prop_applicative_laws = property $ do
fa <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
ffb <- unOpaque <$> forAll (Gen.transform (fmap (+) <$> Gen.eitherWord8))
ffc <- unOpaque <$> forAll (Gen.transform (fmap (*) <$> Gen.eitherWord8))
let obs = flip apply (4 :: Word8)
-- Identity
obs (pure id <*> fa) === obs (fa)
-- Composition
obs (pure (.) <*> ffc <*> ffb <*> fa) === obs (ffc <*> (ffb <*> fa))
Homomorphism
a <- forAll (Gen.word8 Range.constantBounded)
b <- forAll (Gen.word8 Range.constantBounded)
let f = (+)
obs (pure (f a) <*> pure b) === (obs (pure (f a b) :: Transform r (Either Word8) Word8))
-- Interchange
y <- forAll (Gen.word8 Range.constantBounded)
z <- forAll (Gen.word8 Range.constantBounded)
let u = pure (+ z)
obs (u <*> pure y) === obs (pure ($ y) <*> u :: Transform Word8 (Either Word8) Word8)
prop_monad_laws :: H.Property
prop_monad_laws = property $ do
a <- forAll (Gen.word8 Range.constantBounded)
let k = pure
m <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
mh <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
let h = const mh
let obs = flip apply (4 :: Word8)
-- Left identity
obs (return a >>= k) === obs (k a)
-- Right identity
obs (m >>= return) === obs (m)
Associativity
obs (m >>= (\x -> k x >>= h)) === obs ((m >>= k) >>= h)
| null | https://raw.githubusercontent.com/input-output-hk/offchain-metadata-tools/794f08cedbf555e9d207bccc45c08abbcf98add9/metadata-lib/test/Test/Cardano/Metadata/Transform.hs | haskell | import Test.Cardano.Helpers (prop_functor_laws)
Can make transforms
Combine them using the applicative instance
And get the expected result
Additionally, we can do the same with Validation
Identity
Composition
Identity
Composition
Interchange
Left identity
Right identity | module Test.Cardano.Metadata.Transform
( tests
) where
import Data.Validation ( Validation (Failure, Success) )
import Data.Word
import Hedgehog ( forAll, property, unOpaque, (===) )
import qualified Hedgehog as H ( Property )
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Test.Tasty ( TestTree, testGroup )
import Test.Tasty.HUnit ( Assertion, testCase, (@?=) )
import Test.Tasty.Hedgehog
import Cardano.Metadata.Transform
import qualified Test.Cardano.Metadata.Generators as Gen
tests :: TestTree
tests = testGroup "Transform algebra tests"
[ testProperty "Transform/functor/law-abiding" prop_functor_laws
, testProperty "Transform/applicative/law-abiding" prop_applicative_laws
, testProperty "Transform/monad/law-abiding" prop_monad_laws
, testCase "Transform/desirable-properties" unit_desirable_properties
]
unit_desirable_properties :: Assertion
unit_desirable_properties = do
let t1 = mkTransform (const (Left "fail"))
t2 = mkTransform Right
let tSum = t1 *> t2
tSum `apply` "dummy" @?= Left "fail"
let v1 = mkTransform (const (Failure ["fail1"]))
v2 = mkTransform (const (Failure ["fail2"]))
v3 = mkTransform Success
let vSum = v1 *> v2 *> v3
vSum `apply` "dummy" @?= Failure ["fail1", "fail2"]
prop_functor_laws :: H.Property
prop_functor_laws = property $ do
ofa <- forAll (Gen.transform Gen.eitherWord8)
let fa = unOpaque ofa
let obs = flip apply (4 :: Word8)
obs (fmap id fa) === obs (id fa)
let plusTwo = (+ 2)
mulThree = (* 3)
obs (fmap (mulThree . plusTwo) fa) === obs ((fmap mulThree . fmap plusTwo) fa)
prop_applicative_laws :: H.Property
prop_applicative_laws = property $ do
fa <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
ffb <- unOpaque <$> forAll (Gen.transform (fmap (+) <$> Gen.eitherWord8))
ffc <- unOpaque <$> forAll (Gen.transform (fmap (*) <$> Gen.eitherWord8))
let obs = flip apply (4 :: Word8)
obs (pure id <*> fa) === obs (fa)
obs (pure (.) <*> ffc <*> ffb <*> fa) === obs (ffc <*> (ffb <*> fa))
Homomorphism
a <- forAll (Gen.word8 Range.constantBounded)
b <- forAll (Gen.word8 Range.constantBounded)
let f = (+)
obs (pure (f a) <*> pure b) === (obs (pure (f a b) :: Transform r (Either Word8) Word8))
y <- forAll (Gen.word8 Range.constantBounded)
z <- forAll (Gen.word8 Range.constantBounded)
let u = pure (+ z)
obs (u <*> pure y) === obs (pure ($ y) <*> u :: Transform Word8 (Either Word8) Word8)
prop_monad_laws :: H.Property
prop_monad_laws = property $ do
a <- forAll (Gen.word8 Range.constantBounded)
let k = pure
m <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
mh <- unOpaque <$> forAll (Gen.transform Gen.eitherWord8)
let h = const mh
let obs = flip apply (4 :: Word8)
obs (return a >>= k) === obs (k a)
obs (m >>= return) === obs (m)
Associativity
obs (m >>= (\x -> k x >>= h)) === obs ((m >>= k) >>= h)
|
5ca80e9e06e72706027951ac4e92edd4e3b00a08f6cd3419a49acf8f6751fb54 | tonyfloatersu/solution-haskell-craft-of-FP | Chapter7.hs | -------------------------------------------------------------------------
--
: The Craft of Functional Programming , 3e
( c ) Addison - Wesley , 1996 - 2011 .
--
Chapter 7
--
-------------------------------------------------------------------------
module Chapter7 where
-- Defining functions over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-- For pedagogical reasons, this chapter repeats many of the definitions in the
-- standard Prelude. They are repeated in this file, and so the original
definitions have to be hidden when the Prelude is imported :
import Prelude hiding (Word,id,head,tail,null,sum,concat,(++),zip,take,getLine)
import qualified Prelude
import Chapter5 (digits,isEven)
import Test.QuickCheck
-- Pattern matching revisited
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^
-- An example function using guards ...
mystery :: Integer -> Integer -> Integer
mystery x y
| x==0 = y
| otherwise = x
-- ... or pattern matching
mystery' :: Integer -> Integer -> Integer
mystery' 0 y = y
mystery' x _ = x
To join two strings
joinStrings :: (String,String) -> String
joinStrings (st1,st2) = st1 ++ "\t" ++ st2
-- Lists and list patterns
-- ^^^^^^^^^^^^^^^^^^^^^^^
From the Prelude ...
head :: [a] -> a
head (x:_) = x
tail :: [a] -> [a]
tail (_:xs) = xs
null :: [a] -> Bool
null [] = True
null (_:_) = False
-- The case construction
-- ^^^^^^^^^^^^^^^^^^^^^
Return the first digit in a string .
firstDigit :: String -> Char
firstDigit st
= case (digits st) of
[] -> '\0'
(x:_) -> x
-- Primitive recursion over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The sum of a list of Ints .
sum :: [Integer] -> Integer
sum [] = 0
sum (x:xs) = x + sum xs
-- Property to test the re-implementation of sum
-- against the version in the prelude.
prop_sum :: [Integer] -> Bool
prop_sum xs = sum xs == Prelude.sum xs
-- Finding primitive recursive definitions
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
a list of lists .
concat :: [[a]] -> [a]
concat [] = []
concat (x:xs) = x ++ concat xs
Joining two lists
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x:(xs++ys)
-- Testing whether something is a member of a list.
Renamed to elem ' as we use the elem from Prelude
-- elsewhere in the file.
elem' :: Integer -> [Integer] -> Bool
elem' x [] = False
elem' x (y:ys) = (x==y) || (elem' x ys)
-- To double every element of an integer list
doubleAll :: [Integer] -> [Integer]
doubleAll xs = [ 2*x | x<-xs ]
doubleAll' [] = []
doubleAll' (x:xs) = 2*x : doubleAll' xs
-- To select the even elements from an integer list.
selectEven :: [Integer] -> [Integer]
selectEven xs = [ x | x<-xs , isEven x ]
selectEven' [] = []
selectEven' (x:xs)
| isEven x = x : selectEven' xs
| otherwise = selectEven' xs
-- To sort a list of numbers into ascending order.
iSort :: [Integer] -> [Integer]
iSort [] = []
iSort (x:xs) = ins x (iSort xs)
-- To insert an element at the right place into a sorted list.
ins :: Integer -> [Integer] -> [Integer]
ins x [] = [x]
ins x (y:ys)
| x <= y = x:(y:ys)
| otherwise = y : ins x ys
-- General recursions over lists
-- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Zipping together two lists .
zip :: [a] -> [b] -> [(a,b)]
zip (x:xs) (y:ys) = (x,y) : zip xs ys
zip (x:xs) [] = []
zip [] zs = []
-- Taking a given number of elements from a list.
take :: Int -> [a] -> [a]
take 0 _ = []
take _ [] = []
take n (x:xs)
| n>0 = x : take (n-1) xs
take _ _ = error "PreludeList.take: negative argument"
-- Quicksort over lists.
qSort :: [Integer] -> [Integer]
qSort [] = []
qSort (x:xs)
= qSort [ y | y<-xs , y<=x] ++ [x] ++ qSort [ y | y<-xs , y>x]
-- Example: Text Processing
^^^^^^^^^^^^^^^^^^^^^^^^
-- The `whitespace' characters.
whitespace :: String
whitespace = ['\n','\t',' ']
-- Get a word from the front of a string.
getWord :: String -> String
getWord [] = []
getWord (x:xs)
| elem x whitespace = []
| otherwise = x : getWord xs
In a similar way , the first word of a string can be dropped .
dropWord :: String -> String
dropWord [] = []
dropWord (x:xs)
| elem x whitespace = (x:xs)
| otherwise = dropWord xs
-- To remove the whitespace character(s) from the front of a string.
dropSpace :: String -> String
dropSpace [] = []
dropSpace (x:xs)
| elem x whitespace = dropSpace xs
| otherwise = (x:xs)
-- A word is a string.
type Word = String
-- Splitting a string into words.
splitWords :: String -> [Word]
splitWords st = split (dropSpace st)
split :: String -> [Word]
split [] = []
split st
= (getWord st) : split (dropSpace (dropWord st))
-- Splitting into lines of length at most lineLen
lineLen :: Int
lineLen = 80
-- A line is a list of words.
type Line = [Word]
-- Getting a line from a list of words.
getLine :: Int -> [Word] -> Line
getLine len [] = []
getLine len (w:ws)
| length w <= len = w : restOfLine
| otherwise = []
where
newlen = len - (length w + 1)
restOfLine = getLine newlen ws
Dropping the first line from a list of words .
dropLine :: Int -> [Word] -> Line
dropLine = dropLine -- DUMMY DEFINITION
-- Splitting into lines.
splitLines :: [Word] -> [Line]
splitLines [] = []
splitLines ws
= getLine lineLen ws
: splitLines (dropLine lineLen ws)
-- To fill a text string into lines, we write
fill :: String -> [Line]
fill = splitLines . splitWords
| null | https://raw.githubusercontent.com/tonyfloatersu/solution-haskell-craft-of-FP/0d4090ef28417c82a7b01e4a764f657641cb83f3/Chapter7.hs | haskell | -----------------------------------------------------------------------
-----------------------------------------------------------------------
Defining functions over lists
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
For pedagogical reasons, this chapter repeats many of the definitions in the
standard Prelude. They are repeated in this file, and so the original
Pattern matching revisited
^^^^^^^^^^^^^^^^^^^^^^^^^^
An example function using guards ...
... or pattern matching
Lists and list patterns
^^^^^^^^^^^^^^^^^^^^^^^
The case construction
^^^^^^^^^^^^^^^^^^^^^
Primitive recursion over lists
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Property to test the re-implementation of sum
against the version in the prelude.
Finding primitive recursive definitions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Testing whether something is a member of a list.
elsewhere in the file.
To double every element of an integer list
To select the even elements from an integer list.
To sort a list of numbers into ascending order.
To insert an element at the right place into a sorted list.
General recursions over lists
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Taking a given number of elements from a list.
Quicksort over lists.
Example: Text Processing
The `whitespace' characters.
Get a word from the front of a string.
To remove the whitespace character(s) from the front of a string.
A word is a string.
Splitting a string into words.
Splitting into lines of length at most lineLen
A line is a list of words.
Getting a line from a list of words.
DUMMY DEFINITION
Splitting into lines.
To fill a text string into lines, we write | : The Craft of Functional Programming , 3e
( c ) Addison - Wesley , 1996 - 2011 .
Chapter 7
module Chapter7 where
definitions have to be hidden when the Prelude is imported :
import Prelude hiding (Word,id,head,tail,null,sum,concat,(++),zip,take,getLine)
import qualified Prelude
import Chapter5 (digits,isEven)
import Test.QuickCheck
mystery :: Integer -> Integer -> Integer
mystery x y
| x==0 = y
| otherwise = x
mystery' :: Integer -> Integer -> Integer
mystery' 0 y = y
mystery' x _ = x
To join two strings
joinStrings :: (String,String) -> String
joinStrings (st1,st2) = st1 ++ "\t" ++ st2
From the Prelude ...
head :: [a] -> a
head (x:_) = x
tail :: [a] -> [a]
tail (_:xs) = xs
null :: [a] -> Bool
null [] = True
null (_:_) = False
Return the first digit in a string .
firstDigit :: String -> Char
firstDigit st
= case (digits st) of
[] -> '\0'
(x:_) -> x
The sum of a list of Ints .
sum :: [Integer] -> Integer
sum [] = 0
sum (x:xs) = x + sum xs
prop_sum :: [Integer] -> Bool
prop_sum xs = sum xs == Prelude.sum xs
a list of lists .
concat :: [[a]] -> [a]
concat [] = []
concat (x:xs) = x ++ concat xs
Joining two lists
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x:(xs++ys)
Renamed to elem ' as we use the elem from Prelude
elem' :: Integer -> [Integer] -> Bool
elem' x [] = False
elem' x (y:ys) = (x==y) || (elem' x ys)
doubleAll :: [Integer] -> [Integer]
doubleAll xs = [ 2*x | x<-xs ]
doubleAll' [] = []
doubleAll' (x:xs) = 2*x : doubleAll' xs
selectEven :: [Integer] -> [Integer]
selectEven xs = [ x | x<-xs , isEven x ]
selectEven' [] = []
selectEven' (x:xs)
| isEven x = x : selectEven' xs
| otherwise = selectEven' xs
iSort :: [Integer] -> [Integer]
iSort [] = []
iSort (x:xs) = ins x (iSort xs)
ins :: Integer -> [Integer] -> [Integer]
ins x [] = [x]
ins x (y:ys)
| x <= y = x:(y:ys)
| otherwise = y : ins x ys
Zipping together two lists .
zip :: [a] -> [b] -> [(a,b)]
zip (x:xs) (y:ys) = (x,y) : zip xs ys
zip (x:xs) [] = []
zip [] zs = []
take :: Int -> [a] -> [a]
take 0 _ = []
take _ [] = []
take n (x:xs)
| n>0 = x : take (n-1) xs
take _ _ = error "PreludeList.take: negative argument"
qSort :: [Integer] -> [Integer]
qSort [] = []
qSort (x:xs)
= qSort [ y | y<-xs , y<=x] ++ [x] ++ qSort [ y | y<-xs , y>x]
^^^^^^^^^^^^^^^^^^^^^^^^
whitespace :: String
whitespace = ['\n','\t',' ']
getWord :: String -> String
getWord [] = []
getWord (x:xs)
| elem x whitespace = []
| otherwise = x : getWord xs
In a similar way , the first word of a string can be dropped .
dropWord :: String -> String
dropWord [] = []
dropWord (x:xs)
| elem x whitespace = (x:xs)
| otherwise = dropWord xs
dropSpace :: String -> String
dropSpace [] = []
dropSpace (x:xs)
| elem x whitespace = dropSpace xs
| otherwise = (x:xs)
type Word = String
splitWords :: String -> [Word]
splitWords st = split (dropSpace st)
split :: String -> [Word]
split [] = []
split st
= (getWord st) : split (dropSpace (dropWord st))
lineLen :: Int
lineLen = 80
type Line = [Word]
getLine :: Int -> [Word] -> Line
getLine len [] = []
getLine len (w:ws)
| length w <= len = w : restOfLine
| otherwise = []
where
newlen = len - (length w + 1)
restOfLine = getLine newlen ws
Dropping the first line from a list of words .
dropLine :: Int -> [Word] -> Line
splitLines :: [Word] -> [Line]
splitLines [] = []
splitLines ws
= getLine lineLen ws
: splitLines (dropLine lineLen ws)
fill :: String -> [Line]
fill = splitLines . splitWords
|
e017eb462769377b95c6154a7fa53284a96ea44b22e2a11b58cbd1ac57deb6fe | jeapostrophe/remix | datalog.rkt | #lang racket/base
(require datalog/runtime
(prefix-in stx: datalog/stx)
(for-syntax racket/base
remix/stx/raw
datalog/private/compiler
datalog/parse
syntax/parse))
(define-syntax (datalog stx)
(syntax-parse stx
[(_ thy:expr s:str ...)
(with-syntax
([(stmt ...)
(compile-program
(parse-program
(syntax-strings->input-port
(syntax-source stx)
(syntax->list #'(s ...)))))])
(syntax/loc stx
(stx:datalog thy stmt ...)))]))
(provide make-theory
datalog)
| null | https://raw.githubusercontent.com/jeapostrophe/remix/982529019d12252b5f6ab49c17a1a8283ccfb9df/datalog.rkt | racket | #lang racket/base
(require datalog/runtime
(prefix-in stx: datalog/stx)
(for-syntax racket/base
remix/stx/raw
datalog/private/compiler
datalog/parse
syntax/parse))
(define-syntax (datalog stx)
(syntax-parse stx
[(_ thy:expr s:str ...)
(with-syntax
([(stmt ...)
(compile-program
(parse-program
(syntax-strings->input-port
(syntax-source stx)
(syntax->list #'(s ...)))))])
(syntax/loc stx
(stx:datalog thy stmt ...)))]))
(provide make-theory
datalog)
|
|
5595ad6e55ecd32bbf5af64d6a2e0b4c07fd0e88877b97d7dbe17ced9c21a141 | kcsongor/generic-lens | Collect.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
-----------------------------------------------------------------------------
-- |
-- Module : Data.Generics.Internal.Families.Collect
Copyright : ( C ) 2020
-- License : BSD3
Maintainer : < >
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Data.Generics.Internal.Families.Collect
( CollectTotalType
, CollectPartialType
, CollectField
, CollectFieldsOrdered
, TypeStat (..)
, type (\\)
) where
import Data.Type.Bool (If)
import Data.Type.Equality (type (==))
import GHC.Generics
import GHC.TypeLits (Symbol, CmpSymbol)
import Data.Generics.Product.Internal.HList (type (++))
import Data.Generics.Internal.Families.Has (GTypes)
data TypeStat
= TypeStat
{ _containsNone :: [Symbol]
, _containsMultiple :: [Symbol]
, _containsOne :: [Symbol]
}
type EmptyStat = 'TypeStat '[] '[] '[]
type family CollectTotalType t f :: TypeStat where
CollectTotalType t (C1 ('MetaCons ctor _ _) f)
= AddToStat ctor (CountType t f) EmptyStat
CollectTotalType t (M1 _ _ r)
= CollectTotalType t r
CollectTotalType t (l :+: r)
= MergeStat (CollectTotalType t l) (CollectTotalType t r)
type family CollectField t f :: TypeStat where
CollectField t (C1 ('MetaCons ctor _ _) f)
= AddToStat ctor (CountField t f) EmptyStat
CollectField t (M1 _ _ r)
= CollectField t r
CollectField t (l :+: r)
= MergeStat (CollectField t l) (CollectField t r)
type family AddToStat (ctor :: Symbol) (count :: Count) (st :: TypeStat) :: TypeStat where
AddToStat ctor 'None ('TypeStat n m o) = 'TypeStat (ctor ': n) m o
AddToStat ctor 'Multiple ('TypeStat n m o) = 'TypeStat n (ctor ': m) o
AddToStat ctor 'One ('TypeStat n m o) = 'TypeStat n m (ctor ': o)
type family MergeStat (st1 :: TypeStat) (st2 :: TypeStat) :: TypeStat where
MergeStat ('TypeStat n m o) ('TypeStat n' m' o') = 'TypeStat (n ++ n') (m ++ m') (o ++ o')
type family CountType t f :: Count where
CountType t (S1 _ (Rec0 t))
= 'One
CountType t (l :*: r)
= CountType t l <|> CountType t r
CountType t _
= 'None
type family CountField (field :: Symbol) f :: Count where
CountField field (S1 ('MetaSel ('Just field) _ _ _) _)
= 'One
CountField field (l :*: r)
= CountField field l <|> CountField field r
CountField _ _
= 'None
type family CollectPartialType t f :: [Symbol] where
CollectPartialType t (l :+: r)
= CollectPartialType t l ++ CollectPartialType t r
CollectPartialType t (C1 ('MetaCons ctor _ _) f)
= If (t == GTypes f) '[ctor] '[]
CollectPartialType t (D1 _ f)
= CollectPartialType t f
data Count
= None
| One
| Multiple
type family (a :: Count) <|> (b :: Count) :: Count where
'None <|> b = b
a <|> 'None = a
a <|> b = 'Multiple
type family (a :: Count) <&> (b :: Count) :: Count where
a <&> a = a
_ <&> _ = 'Multiple
type family CollectFieldsOrdered (r :: * -> *) :: [Symbol] where
CollectFieldsOrdered (l :*: r)
= Merge (CollectFieldsOrdered l) (CollectFieldsOrdered r)
CollectFieldsOrdered (S1 ('MetaSel ('Just name) _ _ _) _)
= '[name]
CollectFieldsOrdered (M1 _ m a)
= CollectFieldsOrdered a
CollectFieldsOrdered _
= '[]
type family Merge (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Merge xs '[] = xs
Merge '[] ys = ys
Merge (x ': xs) (y ': ys) = Merge' (CmpSymbol x y) x y xs ys
type family Merge' (ord :: Ordering) (x :: Symbol) (y :: Symbol) (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Merge' 'LT x y xs ys = x ': Merge xs (y ': ys)
Merge' _ x y xs ys = y ': Merge (x ': xs) ys
type family (xs :: [Symbol]) \\ (ys :: [Symbol]) :: [Symbol] where
xs \\ '[] = xs
'[] \\ xs = '[]
(x ': xs) \\ (y ': ys) = Sub' (CmpSymbol x y) x y xs ys
infixr 5 \\
type family Sub' (ord :: Ordering) (x :: Symbol) (y :: Symbol) (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Sub' 'LT x y xs ys = x ': (xs \\ y ': ys)
Sub' 'GT x _ xs ys = (x ': xs) \\ ys
Sub' 'EQ _ _ xs ys = xs \\ ys
| null | https://raw.githubusercontent.com/kcsongor/generic-lens/8e1fc7dcf444332c474fca17110d4bc554db08c8/generic-lens-core/src/Data/Generics/Internal/Families/Collect.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE PolyKinds #
# LANGUAGE TypeFamilies #
---------------------------------------------------------------------------
|
Module : Data.Generics.Internal.Families.Collect
License : BSD3
Stability : experimental
Portability : non-portable
--------------------------------------------------------------------------- | # LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
Copyright : ( C ) 2020
Maintainer : < >
module Data.Generics.Internal.Families.Collect
( CollectTotalType
, CollectPartialType
, CollectField
, CollectFieldsOrdered
, TypeStat (..)
, type (\\)
) where
import Data.Type.Bool (If)
import Data.Type.Equality (type (==))
import GHC.Generics
import GHC.TypeLits (Symbol, CmpSymbol)
import Data.Generics.Product.Internal.HList (type (++))
import Data.Generics.Internal.Families.Has (GTypes)
data TypeStat
= TypeStat
{ _containsNone :: [Symbol]
, _containsMultiple :: [Symbol]
, _containsOne :: [Symbol]
}
type EmptyStat = 'TypeStat '[] '[] '[]
type family CollectTotalType t f :: TypeStat where
CollectTotalType t (C1 ('MetaCons ctor _ _) f)
= AddToStat ctor (CountType t f) EmptyStat
CollectTotalType t (M1 _ _ r)
= CollectTotalType t r
CollectTotalType t (l :+: r)
= MergeStat (CollectTotalType t l) (CollectTotalType t r)
type family CollectField t f :: TypeStat where
CollectField t (C1 ('MetaCons ctor _ _) f)
= AddToStat ctor (CountField t f) EmptyStat
CollectField t (M1 _ _ r)
= CollectField t r
CollectField t (l :+: r)
= MergeStat (CollectField t l) (CollectField t r)
type family AddToStat (ctor :: Symbol) (count :: Count) (st :: TypeStat) :: TypeStat where
AddToStat ctor 'None ('TypeStat n m o) = 'TypeStat (ctor ': n) m o
AddToStat ctor 'Multiple ('TypeStat n m o) = 'TypeStat n (ctor ': m) o
AddToStat ctor 'One ('TypeStat n m o) = 'TypeStat n m (ctor ': o)
type family MergeStat (st1 :: TypeStat) (st2 :: TypeStat) :: TypeStat where
MergeStat ('TypeStat n m o) ('TypeStat n' m' o') = 'TypeStat (n ++ n') (m ++ m') (o ++ o')
type family CountType t f :: Count where
CountType t (S1 _ (Rec0 t))
= 'One
CountType t (l :*: r)
= CountType t l <|> CountType t r
CountType t _
= 'None
type family CountField (field :: Symbol) f :: Count where
CountField field (S1 ('MetaSel ('Just field) _ _ _) _)
= 'One
CountField field (l :*: r)
= CountField field l <|> CountField field r
CountField _ _
= 'None
type family CollectPartialType t f :: [Symbol] where
CollectPartialType t (l :+: r)
= CollectPartialType t l ++ CollectPartialType t r
CollectPartialType t (C1 ('MetaCons ctor _ _) f)
= If (t == GTypes f) '[ctor] '[]
CollectPartialType t (D1 _ f)
= CollectPartialType t f
data Count
= None
| One
| Multiple
type family (a :: Count) <|> (b :: Count) :: Count where
'None <|> b = b
a <|> 'None = a
a <|> b = 'Multiple
type family (a :: Count) <&> (b :: Count) :: Count where
a <&> a = a
_ <&> _ = 'Multiple
type family CollectFieldsOrdered (r :: * -> *) :: [Symbol] where
CollectFieldsOrdered (l :*: r)
= Merge (CollectFieldsOrdered l) (CollectFieldsOrdered r)
CollectFieldsOrdered (S1 ('MetaSel ('Just name) _ _ _) _)
= '[name]
CollectFieldsOrdered (M1 _ m a)
= CollectFieldsOrdered a
CollectFieldsOrdered _
= '[]
type family Merge (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Merge xs '[] = xs
Merge '[] ys = ys
Merge (x ': xs) (y ': ys) = Merge' (CmpSymbol x y) x y xs ys
type family Merge' (ord :: Ordering) (x :: Symbol) (y :: Symbol) (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Merge' 'LT x y xs ys = x ': Merge xs (y ': ys)
Merge' _ x y xs ys = y ': Merge (x ': xs) ys
type family (xs :: [Symbol]) \\ (ys :: [Symbol]) :: [Symbol] where
xs \\ '[] = xs
'[] \\ xs = '[]
(x ': xs) \\ (y ': ys) = Sub' (CmpSymbol x y) x y xs ys
infixr 5 \\
type family Sub' (ord :: Ordering) (x :: Symbol) (y :: Symbol) (xs :: [Symbol]) (ys :: [Symbol]) :: [Symbol] where
Sub' 'LT x y xs ys = x ': (xs \\ y ': ys)
Sub' 'GT x _ xs ys = (x ': xs) \\ ys
Sub' 'EQ _ _ xs ys = xs \\ ys
|
5152a20ef71ae34e0cc5c3d44ae80aed185d81a12c73d51034201d81d616949f | andorp/bead | TestData.hs | {-# LANGUAGE OverloadedStrings #-}
module Bead.Persistence.SQL.TestData where
import Bead.Domain.Entities
import Bead.Domain.Relationships
import Bead.Domain.Shared.Evaluation
course = Course "name" "desc" TestScriptSimple
group = Group "name" "desc"
time = read "2014-06-09 12:55:27 UTC"
sbm = Submission (SimpleSubmission "submission") time
sbm2 = Submission (ZippedSubmission "submission2") time
ballot = aspectsFromList [BallotBox]
normal = aspectsFromList []
asg = Assignment "name" "desc" ballot time time binaryConfig
asg2 = Assignment "name2" "desc2" normal time time (percentageConfig 0.1)
ast = Assessment "title" "this is an assessment" time binaryConfig
ast2 = Assessment "title2" "this is an assessment 2" time (percentageConfig 0.1)
user1name = Username "USER1"
user2name = Username "USER2"
user1 = User Student user1name (Email "email2") "name2" (TimeZoneName "UTC") (Language "hu") (Uid "USR01")
user2 = User Student user2name (Email "email2") "name2" (TimeZoneName "UTC") (Language "es") (Uid "USR02")
ev = Evaluation (binaryResult Passed) "written"
ev2 = Evaluation (percentageResult 0.01) "escrito"
cmt = Comment "comment" "User" time CT_Student
scr = Score ()
reg = UserRegistration "username" "email" "name" "token" time
script = TestScript "name" "desc" "notes" "script" TestScriptSimple
script2 = TestScript "name2" "desc2" "notes2" "script2" TestScriptZipped
case1 = TestCase "name" "desc" (SimpleTestCase "blah") "info"
case2 = TestCase "name2" "desc2" (ZippedTestCase "zipped") "info"
fbTestResult = Feedback (TestResult True) time
fbMsgStudent = Feedback (MessageForStudent "student") time
fbMsgForAdmin = Feedback (MessageForAdmin "admin") time
fbEvaluated = Feedback (Evaluated (percentageResult 0.1) "eval" "author") time
| null | https://raw.githubusercontent.com/andorp/bead/280dc9c3d5cfe1b9aac0f2f802c705ae65f02ac2/src/Bead/Persistence/SQL/TestData.hs | haskell | # LANGUAGE OverloadedStrings # | module Bead.Persistence.SQL.TestData where
import Bead.Domain.Entities
import Bead.Domain.Relationships
import Bead.Domain.Shared.Evaluation
course = Course "name" "desc" TestScriptSimple
group = Group "name" "desc"
time = read "2014-06-09 12:55:27 UTC"
sbm = Submission (SimpleSubmission "submission") time
sbm2 = Submission (ZippedSubmission "submission2") time
ballot = aspectsFromList [BallotBox]
normal = aspectsFromList []
asg = Assignment "name" "desc" ballot time time binaryConfig
asg2 = Assignment "name2" "desc2" normal time time (percentageConfig 0.1)
ast = Assessment "title" "this is an assessment" time binaryConfig
ast2 = Assessment "title2" "this is an assessment 2" time (percentageConfig 0.1)
user1name = Username "USER1"
user2name = Username "USER2"
user1 = User Student user1name (Email "email2") "name2" (TimeZoneName "UTC") (Language "hu") (Uid "USR01")
user2 = User Student user2name (Email "email2") "name2" (TimeZoneName "UTC") (Language "es") (Uid "USR02")
ev = Evaluation (binaryResult Passed) "written"
ev2 = Evaluation (percentageResult 0.01) "escrito"
cmt = Comment "comment" "User" time CT_Student
scr = Score ()
reg = UserRegistration "username" "email" "name" "token" time
script = TestScript "name" "desc" "notes" "script" TestScriptSimple
script2 = TestScript "name2" "desc2" "notes2" "script2" TestScriptZipped
case1 = TestCase "name" "desc" (SimpleTestCase "blah") "info"
case2 = TestCase "name2" "desc2" (ZippedTestCase "zipped") "info"
fbTestResult = Feedback (TestResult True) time
fbMsgStudent = Feedback (MessageForStudent "student") time
fbMsgForAdmin = Feedback (MessageForAdmin "admin") time
fbEvaluated = Feedback (Evaluated (percentageResult 0.1) "eval" "author") time
|
f310e0e3c880259f181b77a1dab875ea438016b20a6a40dc4458d9823cf17428 | dyoo/whalesong | module-scoping.rkt | #lang whalesong
(require "module-scoping-helper.rkt")
0
0
x+1
1
1
x++
x
(get-x)
x=0
x
(get-x)
x++
x
(get-x)
x++
x
(get-x)
x+1
x
(get-x)
| null | https://raw.githubusercontent.com/dyoo/whalesong/636e0b4e399e4523136ab45ef4cd1f5a84e88cdc/whalesong/tests/more-tests/module-scoping.rkt | racket | #lang whalesong
(require "module-scoping-helper.rkt")
0
0
x+1
1
1
x++
x
(get-x)
x=0
x
(get-x)
x++
x
(get-x)
x++
x
(get-x)
x+1
x
(get-x)
|
|
6f783bc86c7561c69133536657d9612982363a718b65141bde850aa137a974f5 | ocaml/merlin | parenthesize.ml | module MyList = struct
[@@@ocaml.warning "-65"]
type 'a t =
| (::) of 'a * 'a t
| []
type u = ()
let (mod) = ()
let random = 1
end
let _ = MyList.
| null | https://raw.githubusercontent.com/ocaml/merlin/e576bc75f11323ec8489d2e58a701264f5a7fe0e/tests/test-dirs/completion/parenthesize.t/parenthesize.ml | ocaml | module MyList = struct
[@@@ocaml.warning "-65"]
type 'a t =
| (::) of 'a * 'a t
| []
type u = ()
let (mod) = ()
let random = 1
end
let _ = MyList.
|
|
e197380894908dae46525f52ffee571f28a4bfb10b29c1e65afdf440499964c5 | MyDataFlow/ttalk-server | toppage_handler.erl | %% Feel free to use, reuse and abuse the code in this file.
%% @doc Pastebin handler.
-module(toppage_handler).
%% Standard callbacks.
-export([init/3]).
-export([allowed_methods/2]).
-export([content_types_provided/2]).
-export([content_types_accepted/2]).
-export([resource_exists/2]).
%% Custom callbacks.
-export([create_paste/2]).
-export([paste_html/2]).
-export([paste_text/2]).
init(_Transport, _Req, []) ->
% For the random number generator:
{X, Y, Z} = now(),
random:seed(X, Y, Z),
{upgrade, protocol, cowboy_rest}.
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>], Req, State}.
content_types_provided(Req, State) ->
{[
{{<<"text">>, <<"plain">>, []}, paste_text},
{{<<"text">>, <<"html">>, []}, paste_html}
], Req, State}.
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"x-www-form-urlencoded">>, []}, create_paste}],
Req, State}.
resource_exists(Req, _State) ->
case cowboy_req:binding(paste_id, Req) of
{undefined, Req2} ->
{true, Req2, index};
{PasteID, Req2} ->
case valid_path(PasteID) and file_exists(PasteID) of
true -> {true, Req2, PasteID};
false -> {false, Req2, PasteID}
end
end.
create_paste(Req, State) ->
PasteID = new_paste_id(),
{ok, [{<<"paste">>, Paste}], Req3} = cowboy_req:body_qs(Req),
ok = file:write_file(full_path(PasteID), Paste),
case cowboy_req:method(Req3) of
{<<"POST">>, Req4} ->
{{true, <<$/, PasteID/binary>>}, Req4, State};
{_, Req4} ->
{true, Req4, State}
end.
paste_html(Req, index) ->
{read_file("index.html"), Req, index};
paste_html(Req, Paste) ->
{Style, Req2} = cowboy_req:qs_val(<<"lang">>, Req, plain),
{format_html(Paste, Style), Req2, Paste}.
paste_text(Req, index) ->
{read_file("index.txt"), Req, index};
paste_text(Req, Paste) ->
{Style, Req2} = cowboy_req:qs_val(<<"lang">>, Req, plain),
{format_text(Paste, Style), Req2, Paste}.
% Private
read_file(Name) ->
{ok, Binary} = file:read_file(full_path(Name)),
Binary.
full_path(Name) ->
filename:join([code:priv_dir(rest_pastebin), Name]).
file_exists(Name) ->
case file:read_file_info(full_path(Name)) of
{ok, _Info} -> true;
{error, _Reason} -> false
end.
valid_path(<<>>) -> true;
valid_path(<<$., _T/binary>>) -> false;
valid_path(<<$/, _T/binary>>) -> false;
valid_path(<<_Char, T/binary>>) -> valid_path(T).
new_paste_id() ->
Initial = random:uniform(62) - 1,
new_paste_id(<<Initial>>, 7).
new_paste_id(Bin, 0) ->
Chars = <<"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890">>,
<< <<(binary_part(Chars, B, 1))/binary>> || <<B>> <= Bin >>;
new_paste_id(Bin, Rem) ->
Next = random:uniform(62) - 1,
new_paste_id(<<Bin/binary, Next>>, Rem - 1).
format_html(Paste, plain) ->
Text = escape_html_chars(read_file(Paste)),
<<"<!DOCTYPE html><html>",
"<head><title>paste</title></head>",
"<body><pre><code>", Text/binary, "</code></pre></body></html>\n">>;
format_html(Paste, Lang) ->
highlight(full_path(Paste), Lang, "html").
format_text(Paste, plain) ->
read_file(Paste);
format_text(Paste, Lang) ->
highlight(full_path(Paste), Lang, "ansi").
highlight(Path, Lang, Type) ->
Path1 = binary_to_list(Path),
Lang1 = binary_to_list(Lang),
os:cmd(["highlight --syntax=", Lang1,
" --doc-title=paste ",
" --out-format=", Type,
" --include-style ", Path1]).
% Escape some HTML characters that might make a fuss
escape_html_chars(Bin) ->
<< <<(escape_html_char(B))/binary>> || <<B>> <= Bin >>.
escape_html_char($<) -> <<"<">>;
escape_html_char($>) -> <<">">>;
escape_html_char($&) -> <<"&">>;
escape_html_char(C) -> <<C>>.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/deps/cowboy/examples/rest_pastebin/src/toppage_handler.erl | erlang | Feel free to use, reuse and abuse the code in this file.
@doc Pastebin handler.
Standard callbacks.
Custom callbacks.
For the random number generator:
Private
Escape some HTML characters that might make a fuss |
-module(toppage_handler).
-export([init/3]).
-export([allowed_methods/2]).
-export([content_types_provided/2]).
-export([content_types_accepted/2]).
-export([resource_exists/2]).
-export([create_paste/2]).
-export([paste_html/2]).
-export([paste_text/2]).
init(_Transport, _Req, []) ->
{X, Y, Z} = now(),
random:seed(X, Y, Z),
{upgrade, protocol, cowboy_rest}.
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>], Req, State}.
content_types_provided(Req, State) ->
{[
{{<<"text">>, <<"plain">>, []}, paste_text},
{{<<"text">>, <<"html">>, []}, paste_html}
], Req, State}.
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"x-www-form-urlencoded">>, []}, create_paste}],
Req, State}.
resource_exists(Req, _State) ->
case cowboy_req:binding(paste_id, Req) of
{undefined, Req2} ->
{true, Req2, index};
{PasteID, Req2} ->
case valid_path(PasteID) and file_exists(PasteID) of
true -> {true, Req2, PasteID};
false -> {false, Req2, PasteID}
end
end.
create_paste(Req, State) ->
PasteID = new_paste_id(),
{ok, [{<<"paste">>, Paste}], Req3} = cowboy_req:body_qs(Req),
ok = file:write_file(full_path(PasteID), Paste),
case cowboy_req:method(Req3) of
{<<"POST">>, Req4} ->
{{true, <<$/, PasteID/binary>>}, Req4, State};
{_, Req4} ->
{true, Req4, State}
end.
paste_html(Req, index) ->
{read_file("index.html"), Req, index};
paste_html(Req, Paste) ->
{Style, Req2} = cowboy_req:qs_val(<<"lang">>, Req, plain),
{format_html(Paste, Style), Req2, Paste}.
paste_text(Req, index) ->
{read_file("index.txt"), Req, index};
paste_text(Req, Paste) ->
{Style, Req2} = cowboy_req:qs_val(<<"lang">>, Req, plain),
{format_text(Paste, Style), Req2, Paste}.
read_file(Name) ->
{ok, Binary} = file:read_file(full_path(Name)),
Binary.
full_path(Name) ->
filename:join([code:priv_dir(rest_pastebin), Name]).
file_exists(Name) ->
case file:read_file_info(full_path(Name)) of
{ok, _Info} -> true;
{error, _Reason} -> false
end.
valid_path(<<>>) -> true;
valid_path(<<$., _T/binary>>) -> false;
valid_path(<<$/, _T/binary>>) -> false;
valid_path(<<_Char, T/binary>>) -> valid_path(T).
new_paste_id() ->
Initial = random:uniform(62) - 1,
new_paste_id(<<Initial>>, 7).
new_paste_id(Bin, 0) ->
Chars = <<"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890">>,
<< <<(binary_part(Chars, B, 1))/binary>> || <<B>> <= Bin >>;
new_paste_id(Bin, Rem) ->
Next = random:uniform(62) - 1,
new_paste_id(<<Bin/binary, Next>>, Rem - 1).
format_html(Paste, plain) ->
Text = escape_html_chars(read_file(Paste)),
<<"<!DOCTYPE html><html>",
"<head><title>paste</title></head>",
"<body><pre><code>", Text/binary, "</code></pre></body></html>\n">>;
format_html(Paste, Lang) ->
highlight(full_path(Paste), Lang, "html").
format_text(Paste, plain) ->
read_file(Paste);
format_text(Paste, Lang) ->
highlight(full_path(Paste), Lang, "ansi").
highlight(Path, Lang, Type) ->
Path1 = binary_to_list(Path),
Lang1 = binary_to_list(Lang),
os:cmd(["highlight --syntax=", Lang1,
" --doc-title=paste ",
" --out-format=", Type,
" --include-style ", Path1]).
escape_html_chars(Bin) ->
<< <<(escape_html_char(B))/binary>> || <<B>> <= Bin >>.
escape_html_char($<) -> <<"<">>;
escape_html_char($>) -> <<">">>;
escape_html_char($&) -> <<"&">>;
escape_html_char(C) -> <<C>>.
|
5e140ed7f4df0b1f369ab6f4f1316001cdcecf4f7725ee0fdc89c31c1535ff7e | vikram/lisplibraries | swank-source-file-cache.lisp | ;;;; Source-file cache
;;;
To robustly find source locations in CMUCL and SBCL it 's useful to
;;; have the exact source code that the loaded code was compiled from.
;;; In this source we can accurately find the right location, and from
;;; that location we can extract a "snippet" of code to show what the
definition looks like . Emacs can use this snippet in a best - match
;;; search to locate the right definition, which works well even if
;;; the buffer has been modified.
;;;
;;; The idea is that if a definition previously started with
;;; `(define-foo bar' then it probably still does.
;;;
;;; Whenever we see that the file on disk has the same
;;; `file-write-date' as a location we're looking for we cache the
;;; whole file inside Lisp. That way we will still have the matching
;;; version even if the file is later modified on disk. If the file is
;;; later recompiled and reloaded then we replace our cache entry.
;;;
;;; This code has been placed in the Public Domain. All warranties
;;; are disclaimed.
(in-package :swank-backend)
(defvar *cache-sourcecode* t
"When true complete source files are cached.
The cache is used to keep known good copies of the source text which
correspond to the loaded code. Finding definitions is much more
reliable when the exact source is available, so we cache it in case it
gets edited on disk later.")
(defvar *source-file-cache* (make-hash-table :test 'equal)
"Cache of source file contents.
Maps from truename to source-cache-entry structure.")
(defstruct (source-cache-entry
(:conc-name source-cache-entry.)
(:constructor make-source-cache-entry (text date)))
text date)
(defimplementation buffer-first-change (filename)
"Load a file into the cache when the user modifies its buffer.
This is a win if the user then saves the file and tries to M-. into it."
(unless (source-cached-p filename)
(ignore-errors
(source-cache-get filename (file-write-date filename))))
nil)
(defun get-source-code (filename code-date)
"Return the source code for FILENAME as written on DATE in a string.
If the exact version cannot be found then return the current one from disk."
(or (source-cache-get filename code-date)
(read-file filename)))
(defun source-cache-get (filename date)
"Return the source code for FILENAME as written on DATE in a string.
Return NIL if the right version cannot be found."
(when *cache-sourcecode*
(let ((entry (gethash filename *source-file-cache*)))
(cond ((and entry (equal date (source-cache-entry.date entry)))
Cache hit .
(source-cache-entry.text entry))
((or (null entry)
(not (equal date (source-cache-entry.date entry))))
;; Cache miss.
(if (equal (file-write-date filename) date)
;; File on disk has the correct version.
(let ((source (read-file filename)))
(setf (gethash filename *source-file-cache*)
(make-source-cache-entry source date))
source)
nil))))))
(defun source-cached-p (filename)
"Is any version of FILENAME in the source cache?"
(if (gethash filename *source-file-cache*) t))
(defun read-file (filename)
"Return the entire contents of FILENAME as a string."
(with-open-file (s filename :direction :input
:external-format (or (guess-external-format filename)
(find-external-format "latin-1")
:default))
(let ((string (make-string (file-length s))))
(read-sequence string s)
string)))
;;;; Snippets
(defvar *source-snippet-size* 256
"Maximum number of characters in a snippet of source code.
Snippets at the beginning of definitions are used to tell Emacs what
the definitions looks like, so that it can accurately find them by
text search.")
(defun read-snippet (stream &optional position)
"Read a string of upto *SOURCE-SNIPPET-SIZE* characters from STREAM.
If POSITION is given, set the STREAM's file position first."
(when position
(file-position stream position))
#+sbcl (skip-comments-and-whitespace stream)
(read-upto-n-chars stream *source-snippet-size*))
(defun read-snippet-from-string (string &optional position)
(with-input-from-string (s string)
(read-snippet s position)))
(defun skip-comments-and-whitespace (stream)
(case (peek-char nil stream)
((#\Space #\Tab #\Newline #\Linefeed #\Page)
(read-char stream)
(skip-comments-and-whitespace stream))
(#\;
(read-line stream)
(skip-comments-and-whitespace stream))))
(defun read-upto-n-chars (stream n)
"Return a string of upto N chars from STREAM."
(let* ((string (make-string n))
(chars (read-sequence string stream)))
(subseq string 0 chars)))
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/slime/swank-source-file-cache.lisp | lisp | Source-file cache
have the exact source code that the loaded code was compiled from.
In this source we can accurately find the right location, and from
that location we can extract a "snippet" of code to show what the
search to locate the right definition, which works well even if
the buffer has been modified.
The idea is that if a definition previously started with
`(define-foo bar' then it probably still does.
Whenever we see that the file on disk has the same
`file-write-date' as a location we're looking for we cache the
whole file inside Lisp. That way we will still have the matching
version even if the file is later modified on disk. If the file is
later recompiled and reloaded then we replace our cache entry.
This code has been placed in the Public Domain. All warranties
are disclaimed.
Cache miss.
File on disk has the correct version.
Snippets
| To robustly find source locations in CMUCL and SBCL it 's useful to
definition looks like . Emacs can use this snippet in a best - match
(in-package :swank-backend)
(defvar *cache-sourcecode* t
"When true complete source files are cached.
The cache is used to keep known good copies of the source text which
correspond to the loaded code. Finding definitions is much more
reliable when the exact source is available, so we cache it in case it
gets edited on disk later.")
(defvar *source-file-cache* (make-hash-table :test 'equal)
"Cache of source file contents.
Maps from truename to source-cache-entry structure.")
(defstruct (source-cache-entry
(:conc-name source-cache-entry.)
(:constructor make-source-cache-entry (text date)))
text date)
(defimplementation buffer-first-change (filename)
"Load a file into the cache when the user modifies its buffer.
This is a win if the user then saves the file and tries to M-. into it."
(unless (source-cached-p filename)
(ignore-errors
(source-cache-get filename (file-write-date filename))))
nil)
(defun get-source-code (filename code-date)
"Return the source code for FILENAME as written on DATE in a string.
If the exact version cannot be found then return the current one from disk."
(or (source-cache-get filename code-date)
(read-file filename)))
(defun source-cache-get (filename date)
"Return the source code for FILENAME as written on DATE in a string.
Return NIL if the right version cannot be found."
(when *cache-sourcecode*
(let ((entry (gethash filename *source-file-cache*)))
(cond ((and entry (equal date (source-cache-entry.date entry)))
Cache hit .
(source-cache-entry.text entry))
((or (null entry)
(not (equal date (source-cache-entry.date entry))))
(if (equal (file-write-date filename) date)
(let ((source (read-file filename)))
(setf (gethash filename *source-file-cache*)
(make-source-cache-entry source date))
source)
nil))))))
(defun source-cached-p (filename)
"Is any version of FILENAME in the source cache?"
(if (gethash filename *source-file-cache*) t))
(defun read-file (filename)
"Return the entire contents of FILENAME as a string."
(with-open-file (s filename :direction :input
:external-format (or (guess-external-format filename)
(find-external-format "latin-1")
:default))
(let ((string (make-string (file-length s))))
(read-sequence string s)
string)))
(defvar *source-snippet-size* 256
"Maximum number of characters in a snippet of source code.
Snippets at the beginning of definitions are used to tell Emacs what
the definitions looks like, so that it can accurately find them by
text search.")
(defun read-snippet (stream &optional position)
"Read a string of upto *SOURCE-SNIPPET-SIZE* characters from STREAM.
If POSITION is given, set the STREAM's file position first."
(when position
(file-position stream position))
#+sbcl (skip-comments-and-whitespace stream)
(read-upto-n-chars stream *source-snippet-size*))
(defun read-snippet-from-string (string &optional position)
(with-input-from-string (s string)
(read-snippet s position)))
(defun skip-comments-and-whitespace (stream)
(case (peek-char nil stream)
((#\Space #\Tab #\Newline #\Linefeed #\Page)
(read-char stream)
(skip-comments-and-whitespace stream))
(read-line stream)
(skip-comments-and-whitespace stream))))
(defun read-upto-n-chars (stream n)
"Return a string of upto N chars from STREAM."
(let* ((string (make-string n))
(chars (read-sequence string stream)))
(subseq string 0 chars)))
|
bf3629a2127083cc9f78bd340a4636c6fa38d9526a014f2655695cff7953ed85 | HealthSamurai/igpop | loader_test.clj | (ns igpop.loader-test
(:require [igpop.loader :as sut]
[clojure.test :refer :all]
[clojure.java.io :as io]
[matcho.core :as matcho]))
(def project-path (.getPath (io/resource "test-project")))
(deftest read-yaml-test
(let [result (sut/read-yaml (io/file project-path "ig.yaml"))]
(is (map? result))
(is (= "Test project" (:description result)))))
(deftest get-inlined-valuesets-test
(let [inlined-valueset {:id "inlined-valueset"
:concepts [{:code "test1", :display "Test1"}
{:code "test2", :display "Test2"}]}
test-profile {:basic
{:elements
{:id {:description "Test id", :type "string"}
:gender {:description "male | female | other | unknown"
:type "code"
:valueset {:id "fhir:administrative-genders"}}
:testElement {:description "Element with inlined valueset"
:type "code"
:valueset inlined-valueset}}}}
profiles {:TestProfile test-profile}
result (sut/get-inlined-valuesets {:profiles profiles, :valuesets {}})]
(is (contains? result :valuesets) "Result should contain valuesets.")
(is (not-empty (:valuesets result)) "Extracted valueset should not be empty.")
(is (contains? (:valuesets result) :inlined-valueset)
"Inlined valueset should be found among valuesets.")
(is (= (dissoc inlined-valueset :id)
(get-in result [:valuesets :inlined-valueset]))
"Extracted valueset should not contain its id.")
(is (not (contains? (:valuesets result) :fhir:administrative-genders))
"Non-inlined valueset should not be added to valuesets.")))
(deftest test-loader
(testing "parse-name"
(matcho/match
(sut/parse-name "Patient.yaml")
{:to [:source :Patient :basic]
:format :yaml})
;; (matcho/match
( sut / parse - name " pr.Patient.example.pt1.yaml " )
;; {:to [:source :Patient :basic :example :pt1]
;; :format :yaml})
(matcho/match
(sut/parse-name "Patient" "lab.yaml")
{:to [:source :Patient :lab]
:format :yaml})
(matcho/match
(sut/parse-name "vs.dic1.yaml")
{:to [:valuesets :dic1]
:format :yaml})
(matcho/match
(sut/parse-name "vs.dic1.csv")
{:to [:valuesets :dic1 :concepts]
:format :csv})
(matcho/match
(sut/parse-name "cs.system1.yaml")
{:to [:codesystems :system1]
:format :yaml})
(matcho/match
(sut/parse-name "cs.system2.csv")
{:to [:codesystems :system2 :concepts]
:format :csv}))
(def project (sut/load-project project-path))
( println ( sut / build - profiles { } " resources " ) )
(io/file project-path "ig.yaml")
(io/file project-path "igpop-fhir-4.0.0")
(comment (matcho/match
(:base project)
nil))
(matcho/match
(:source project)
{:Patient {:basic {:elements {}}}})
(matcho/match
(:Patient (:profiles project))
{:lab-report {}
:basic {}})
(second (get-in project [:profiles :Patient :basic :elements]))
(get-in project [:source :Patient :basic :description])
(get-in project [:profiles :Patient :basic :elements :gender :valueset :id])
(keys project)
(keys project)
(matcho/match
(get-in project [:valuesets :dict1])
{:concepts [{:code "male" :display "Male"}]})
(is (not (nil? (get-in project [:docs :pages :welcome]))))
(is (not (nil? (get-in project [:docs :menu]))))
(get-in project [:docs :pages]))
| null | https://raw.githubusercontent.com/HealthSamurai/igpop/d4e6f6100b78c20a1706add125e90ff7d912e3d4/test/igpop/loader_test.clj | clojure | (matcho/match
{:to [:source :Patient :basic :example :pt1]
:format :yaml}) | (ns igpop.loader-test
(:require [igpop.loader :as sut]
[clojure.test :refer :all]
[clojure.java.io :as io]
[matcho.core :as matcho]))
(def project-path (.getPath (io/resource "test-project")))
(deftest read-yaml-test
(let [result (sut/read-yaml (io/file project-path "ig.yaml"))]
(is (map? result))
(is (= "Test project" (:description result)))))
(deftest get-inlined-valuesets-test
(let [inlined-valueset {:id "inlined-valueset"
:concepts [{:code "test1", :display "Test1"}
{:code "test2", :display "Test2"}]}
test-profile {:basic
{:elements
{:id {:description "Test id", :type "string"}
:gender {:description "male | female | other | unknown"
:type "code"
:valueset {:id "fhir:administrative-genders"}}
:testElement {:description "Element with inlined valueset"
:type "code"
:valueset inlined-valueset}}}}
profiles {:TestProfile test-profile}
result (sut/get-inlined-valuesets {:profiles profiles, :valuesets {}})]
(is (contains? result :valuesets) "Result should contain valuesets.")
(is (not-empty (:valuesets result)) "Extracted valueset should not be empty.")
(is (contains? (:valuesets result) :inlined-valueset)
"Inlined valueset should be found among valuesets.")
(is (= (dissoc inlined-valueset :id)
(get-in result [:valuesets :inlined-valueset]))
"Extracted valueset should not contain its id.")
(is (not (contains? (:valuesets result) :fhir:administrative-genders))
"Non-inlined valueset should not be added to valuesets.")))
(deftest test-loader
(testing "parse-name"
(matcho/match
(sut/parse-name "Patient.yaml")
{:to [:source :Patient :basic]
:format :yaml})
( sut / parse - name " pr.Patient.example.pt1.yaml " )
(matcho/match
(sut/parse-name "Patient" "lab.yaml")
{:to [:source :Patient :lab]
:format :yaml})
(matcho/match
(sut/parse-name "vs.dic1.yaml")
{:to [:valuesets :dic1]
:format :yaml})
(matcho/match
(sut/parse-name "vs.dic1.csv")
{:to [:valuesets :dic1 :concepts]
:format :csv})
(matcho/match
(sut/parse-name "cs.system1.yaml")
{:to [:codesystems :system1]
:format :yaml})
(matcho/match
(sut/parse-name "cs.system2.csv")
{:to [:codesystems :system2 :concepts]
:format :csv}))
(def project (sut/load-project project-path))
( println ( sut / build - profiles { } " resources " ) )
(io/file project-path "ig.yaml")
(io/file project-path "igpop-fhir-4.0.0")
(comment (matcho/match
(:base project)
nil))
(matcho/match
(:source project)
{:Patient {:basic {:elements {}}}})
(matcho/match
(:Patient (:profiles project))
{:lab-report {}
:basic {}})
(second (get-in project [:profiles :Patient :basic :elements]))
(get-in project [:source :Patient :basic :description])
(get-in project [:profiles :Patient :basic :elements :gender :valueset :id])
(keys project)
(keys project)
(matcho/match
(get-in project [:valuesets :dict1])
{:concepts [{:code "male" :display "Male"}]})
(is (not (nil? (get-in project [:docs :pages :welcome]))))
(is (not (nil? (get-in project [:docs :menu]))))
(get-in project [:docs :pages]))
|
b4b38a4a7f96b76bafefab6de27667f33d976b54b4200b7c6f043c54a8826b49 | larcenists/larceny | srfi-126-test.body.scm | ;;; This doesn't test weakness, external representation, and quasiquote.
(test-begin "SRFI-126")
(test-group "constructors & inspection"
(test-group "eq"
(let ((tables (list (make-eq-hashtable)
(make-eq-hashtable 10)
(make-eq-hashtable #f #f)
(make-hashtable #f eq?)
(alist->eq-hashtable '((a . b) (c . d)))
(alist->eq-hashtable 10 '((a . b) (c . d)))
(alist->eq-hashtable #f #f '((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label #f (hashtable-hash-function table))
(test-eq label eq? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))))
(test-group "eqv"
(let ((tables (list (make-eqv-hashtable)
(make-eqv-hashtable 10)
(make-eqv-hashtable #f #f)
(make-hashtable #f eqv?)
(alist->eqv-hashtable '((a . b) (c . d)))
(alist->eqv-hashtable 10 '((a . b) (c . d)))
(alist->eqv-hashtable #f #f '((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label #f (hashtable-hash-function table))
(test-eq label eqv? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))))
(test-group "equal"
(let ((tables (list (make-hashtable equal-hash equal?)
(make-hashtable equal-hash equal? 10)
(make-hashtable equal-hash equal? #f #f)
(alist->hashtable equal-hash equal?
'((a . b) (c . d)))
(alist->hashtable equal-hash equal? 10
'((a . b) (c . d)))
(alist->hashtable equal-hash equal? #f #f
'((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label equal-hash (hashtable-hash-function table))
(test-eq label equal? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))
(let ((table (make-hashtable (cons equal-hash equal-hash) equal?)))
(let ((hash (hashtable-hash-function table)))
(test-assert (or (eq? equal-hash hash)
(and (eq? equal-hash (car hash))
(eq? equal-hash (cdr hash))))))))))
(test-group "procedures"
(test-group "basics"
(let ((table (make-eq-hashtable)))
(test-group "ref"
(test-error (hashtable-ref table 'a))
(test-eq 'b (hashtable-ref table 'a 'b))
(test-assert (not (hashtable-contains? table 'a)))
(test-eqv 0 (hashtable-size table)))
(test-group "set"
(hashtable-set! table 'a 'c)
(test-eq 'c (hashtable-ref table 'a))
(test-eq 'c (hashtable-ref table 'a 'b))
(test-assert (hashtable-contains? table 'a))
(test-eqv 1 (hashtable-size table)))
(test-group "delete"
(hashtable-delete! table 'a)
(test-error (hashtable-ref table 'a))
(test-eq 'b (hashtable-ref table 'a 'b))
(test-assert (not (hashtable-contains? table 'a)))
(test-eqv 0 (hashtable-size table)))))
(test-group "advanced"
(let ((table (make-eq-hashtable)))
(test-group "lookup"
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-assert (not found?))))
(test-group "update"
(test-error (hashtable-update! table 'a (lambda (x) (+ x 1))))
(hashtable-update! table 'a (lambda (x) (+ x 1)) 0)
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv 1 x)
(test-assert found?))
(hashtable-update! table 'a (lambda (x) (+ x 1)))
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv x 2)
(test-assert found?))
(hashtable-update! table 'a (lambda (x) (+ x 1)) 0)
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv x 3)
(test-assert found?)))
(test-group "intern"
(test-eqv 0 (hashtable-intern! table 'b (lambda () 0)))
(test-eqv 0 (hashtable-intern! table 'b (lambda () 1))))))
(test-group "copy/clear"
(let ((table (alist->hashtable equal-hash equal? '((a . b)))))
(test-group "copy"
(let ((table2 (hashtable-copy table)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(test-error (hashtable-set! table2 'a 'c)))
(let ((table2 (hashtable-copy table #f)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(test-error (hashtable-set! table2 'a 'c)))
(let ((table2 (hashtable-copy table #t)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(hashtable-set! table2 'a 'c)
(test-eq 'c (hashtable-ref table2 'a)))
(let ((table2 (hashtable-copy table #f #f)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq #f (hashtable-weakness table2))))
(test-group "clear"
(let ((table2 (hashtable-copy table #t)))
(hashtable-clear! table2)
(test-eqv 0 (hashtable-size table2)))
(let ((table2 (hashtable-copy table #t)))
(hashtable-clear! table2 10)
(test-eqv 0 (hashtable-size table2))))
(test-group "empty-copy"
(let ((table2 (hashtable-empty-copy table)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eqv 0 (hashtable-size table2)))
(let ((table2 (hashtable-empty-copy table 10)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eqv 0 (hashtable-size table2))))))
(test-group "keys/values"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= eq? '(a c) (vector->list (hashtable-keys table))))
(test-assert (lset= eq? '(b d) (vector->list (hashtable-values table))))
(let-values (((keys values) (hashtable-entries table)))
(test-assert (lset= eq? '(a c) (vector->list keys)))
(test-assert (lset= eq? '(b d) (vector->list values))))
(test-assert (lset= eq? '(a c) (hashtable-key-list table)))
(test-assert (lset= eq? '(b d) (hashtable-value-list table)))
(let-values (((keys values) (hashtable-entry-lists table)))
(test-assert (lset= eq? '(a c) keys))
(test-assert (lset= eq? '(b d) values)))))
(test-group "iteration"
(test-group "walk"
(let ((keys '())
(values '()))
(hashtable-walk (alist->eq-hashtable '((a . b) (c . d)))
(lambda (k v)
(set! keys (cons k keys))
(set! values (cons v values))))
(test-assert (lset= eq? '(a c) keys))
(test-assert (lset= eq? '(b d) values))))
(test-group "update-all"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(hashtable-update-all! table
(lambda (k v)
(string->symbol (string-append (symbol->string v) "x"))))
(test-assert (lset= eq? '(a c) (hashtable-key-list table)))
(test-assert (lset= eq? '(bx dx) (hashtable-value-list table)))))
(test-group "prune"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(hashtable-prune! table (lambda (k v) (eq? k 'a)))
(test-assert (not (hashtable-contains? table 'a)))
(test-assert (hashtable-contains? table 'c))))
(test-group "merge"
(let ((table (alist->eq-hashtable '((a . b) (c . d))))
(table2 (alist->eq-hashtable '((a . x) (e . f)))))
(hashtable-merge! table table2)
(test-assert (lset= eq? '(a c e) (hashtable-key-list table)))
(test-assert (lset= eq? '(x d f) (hashtable-value-list table)))))
(test-group "sum"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= eq? '(a b c d)
(hashtable-sum table '()
(lambda (k v acc)
(lset-adjoin eq? acc k v)))))))
(test-group "map->lset"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= equal? '((a . b) (c . d))
(hashtable-map->lset table cons)))))
(test-group "find"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(let-values (((k v f?) (hashtable-find table
(lambda (k v)
(eq? k 'a)))))
(test-assert (and f? (eq? k 'a) (eq? v 'b))))
(let-values (((k v f?) (hashtable-find table (lambda (k v) #f))))
(test-assert (not f?)))))
(test-group "misc"
(test-group "empty?"
(test-assert (hashtable-empty? (alist->eq-hashtable '())))
(test-assert (not (hashtable-empty? (alist->eq-hashtable '((a . b)))))))
(test-group "pop!"
(test-error (hashtable-pop! (make-eq-hashtable)))
(let ((table (alist->eq-hashtable '((a . b)))))
(let-values (((k v) (hashtable-pop! table)))
(test-eq 'a k)
(test-eq 'b v)
(test-assert (hashtable-empty? table)))))
(test-group "inc!"
(let ((table (alist->eq-hashtable '((a . 0)))))
(hashtable-inc! table 'a)
(test-eqv 1 (hashtable-ref table 'a))
(hashtable-inc! table 'a 2)
(test-eqv 3 (hashtable-ref table 'a))))
(test-group "dec!"
(let ((table (alist->eq-hashtable '((a . 0)))))
(hashtable-dec! table 'a)
(test-eqv -1 (hashtable-ref table 'a))
(hashtable-dec! table 'a 2)
(test-eqv -3 (hashtable-ref table 'a)))))))
(test-group "hashing"
(test-assert (and (exact-integer? (hash-salt))))
(test-assert (not (negative? (hash-salt))))
(test-assert (= (equal-hash (list "foo" 'bar 42))
(equal-hash (list "foo" 'bar 42))))
(test-assert (= (string-hash (string-copy "foo"))
(string-hash (string-copy "foo"))))
(test-assert (= (string-ci-hash (string-copy "foo"))
(string-ci-hash (string-copy "FOO"))))
(test-assert (= (symbol-hash (string->symbol "foo"))
(symbol-hash (string->symbol "foo")))))
(test-end "SRFI-126")
The following note is commented out for Larceny .
#;
(display
(string-append
"\n"
"NOTE: On implementations using the (r6rs hashtables) library from Larceny,\n"
" 14 tests are expected to fail in relation to make-eq-hashtable and\n"
" make-eqv-hashtable returning hashtables whose hash functions are\n"
" exposed instead of being #f. We have no obvious way to detect this\n"
" within this portable test suite, hence no XFAIL results.\n"))
The following is added for Larceny .
(system "fgrep \"result-kind:\" SRFI-126.log")
(display "Done.\n")
;; Local Variables:
eval : ( put ( quote test - group ) ( quote scheme - indent - function ) 1 )
;; End:
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/lib/SRFI/test/srfi-126-test.body.scm | scheme | This doesn't test weakness, external representation, and quasiquote.
Local Variables:
End: |
(test-begin "SRFI-126")
(test-group "constructors & inspection"
(test-group "eq"
(let ((tables (list (make-eq-hashtable)
(make-eq-hashtable 10)
(make-eq-hashtable #f #f)
(make-hashtable #f eq?)
(alist->eq-hashtable '((a . b) (c . d)))
(alist->eq-hashtable 10 '((a . b) (c . d)))
(alist->eq-hashtable #f #f '((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label #f (hashtable-hash-function table))
(test-eq label eq? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))))
(test-group "eqv"
(let ((tables (list (make-eqv-hashtable)
(make-eqv-hashtable 10)
(make-eqv-hashtable #f #f)
(make-hashtable #f eqv?)
(alist->eqv-hashtable '((a . b) (c . d)))
(alist->eqv-hashtable 10 '((a . b) (c . d)))
(alist->eqv-hashtable #f #f '((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label #f (hashtable-hash-function table))
(test-eq label eqv? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))))
(test-group "equal"
(let ((tables (list (make-hashtable equal-hash equal?)
(make-hashtable equal-hash equal? 10)
(make-hashtable equal-hash equal? #f #f)
(alist->hashtable equal-hash equal?
'((a . b) (c . d)))
(alist->hashtable equal-hash equal? 10
'((a . b) (c . d)))
(alist->hashtable equal-hash equal? #f #f
'((a . b) (c . d))))))
(do ((tables tables (cdr tables))
(i 0 (+ i 1)))
((null? tables))
(let ((table (car tables))
(label (number->string i)))
(test-assert label (hashtable? table))
(test-eq label equal-hash (hashtable-hash-function table))
(test-eq label equal? (hashtable-equivalence-function table))
(test-eq label #f (hashtable-weakness table))
(test-assert label (hashtable-mutable? table))))
(let ((table (make-hashtable (cons equal-hash equal-hash) equal?)))
(let ((hash (hashtable-hash-function table)))
(test-assert (or (eq? equal-hash hash)
(and (eq? equal-hash (car hash))
(eq? equal-hash (cdr hash))))))))))
(test-group "procedures"
(test-group "basics"
(let ((table (make-eq-hashtable)))
(test-group "ref"
(test-error (hashtable-ref table 'a))
(test-eq 'b (hashtable-ref table 'a 'b))
(test-assert (not (hashtable-contains? table 'a)))
(test-eqv 0 (hashtable-size table)))
(test-group "set"
(hashtable-set! table 'a 'c)
(test-eq 'c (hashtable-ref table 'a))
(test-eq 'c (hashtable-ref table 'a 'b))
(test-assert (hashtable-contains? table 'a))
(test-eqv 1 (hashtable-size table)))
(test-group "delete"
(hashtable-delete! table 'a)
(test-error (hashtable-ref table 'a))
(test-eq 'b (hashtable-ref table 'a 'b))
(test-assert (not (hashtable-contains? table 'a)))
(test-eqv 0 (hashtable-size table)))))
(test-group "advanced"
(let ((table (make-eq-hashtable)))
(test-group "lookup"
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-assert (not found?))))
(test-group "update"
(test-error (hashtable-update! table 'a (lambda (x) (+ x 1))))
(hashtable-update! table 'a (lambda (x) (+ x 1)) 0)
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv 1 x)
(test-assert found?))
(hashtable-update! table 'a (lambda (x) (+ x 1)))
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv x 2)
(test-assert found?))
(hashtable-update! table 'a (lambda (x) (+ x 1)) 0)
(let-values (((x found?) (hashtable-lookup table 'a)))
(test-eqv x 3)
(test-assert found?)))
(test-group "intern"
(test-eqv 0 (hashtable-intern! table 'b (lambda () 0)))
(test-eqv 0 (hashtable-intern! table 'b (lambda () 1))))))
(test-group "copy/clear"
(let ((table (alist->hashtable equal-hash equal? '((a . b)))))
(test-group "copy"
(let ((table2 (hashtable-copy table)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(test-error (hashtable-set! table2 'a 'c)))
(let ((table2 (hashtable-copy table #f)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(test-error (hashtable-set! table2 'a 'c)))
(let ((table2 (hashtable-copy table #t)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq 'b (hashtable-ref table2 'a))
(hashtable-set! table2 'a 'c)
(test-eq 'c (hashtable-ref table2 'a)))
(let ((table2 (hashtable-copy table #f #f)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eq #f (hashtable-weakness table2))))
(test-group "clear"
(let ((table2 (hashtable-copy table #t)))
(hashtable-clear! table2)
(test-eqv 0 (hashtable-size table2)))
(let ((table2 (hashtable-copy table #t)))
(hashtable-clear! table2 10)
(test-eqv 0 (hashtable-size table2))))
(test-group "empty-copy"
(let ((table2 (hashtable-empty-copy table)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eqv 0 (hashtable-size table2)))
(let ((table2 (hashtable-empty-copy table 10)))
(test-eq equal-hash (hashtable-hash-function table2))
(test-eq equal? (hashtable-equivalence-function table2))
(test-eqv 0 (hashtable-size table2))))))
(test-group "keys/values"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= eq? '(a c) (vector->list (hashtable-keys table))))
(test-assert (lset= eq? '(b d) (vector->list (hashtable-values table))))
(let-values (((keys values) (hashtable-entries table)))
(test-assert (lset= eq? '(a c) (vector->list keys)))
(test-assert (lset= eq? '(b d) (vector->list values))))
(test-assert (lset= eq? '(a c) (hashtable-key-list table)))
(test-assert (lset= eq? '(b d) (hashtable-value-list table)))
(let-values (((keys values) (hashtable-entry-lists table)))
(test-assert (lset= eq? '(a c) keys))
(test-assert (lset= eq? '(b d) values)))))
(test-group "iteration"
(test-group "walk"
(let ((keys '())
(values '()))
(hashtable-walk (alist->eq-hashtable '((a . b) (c . d)))
(lambda (k v)
(set! keys (cons k keys))
(set! values (cons v values))))
(test-assert (lset= eq? '(a c) keys))
(test-assert (lset= eq? '(b d) values))))
(test-group "update-all"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(hashtable-update-all! table
(lambda (k v)
(string->symbol (string-append (symbol->string v) "x"))))
(test-assert (lset= eq? '(a c) (hashtable-key-list table)))
(test-assert (lset= eq? '(bx dx) (hashtable-value-list table)))))
(test-group "prune"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(hashtable-prune! table (lambda (k v) (eq? k 'a)))
(test-assert (not (hashtable-contains? table 'a)))
(test-assert (hashtable-contains? table 'c))))
(test-group "merge"
(let ((table (alist->eq-hashtable '((a . b) (c . d))))
(table2 (alist->eq-hashtable '((a . x) (e . f)))))
(hashtable-merge! table table2)
(test-assert (lset= eq? '(a c e) (hashtable-key-list table)))
(test-assert (lset= eq? '(x d f) (hashtable-value-list table)))))
(test-group "sum"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= eq? '(a b c d)
(hashtable-sum table '()
(lambda (k v acc)
(lset-adjoin eq? acc k v)))))))
(test-group "map->lset"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(test-assert (lset= equal? '((a . b) (c . d))
(hashtable-map->lset table cons)))))
(test-group "find"
(let ((table (alist->eq-hashtable '((a . b) (c . d)))))
(let-values (((k v f?) (hashtable-find table
(lambda (k v)
(eq? k 'a)))))
(test-assert (and f? (eq? k 'a) (eq? v 'b))))
(let-values (((k v f?) (hashtable-find table (lambda (k v) #f))))
(test-assert (not f?)))))
(test-group "misc"
(test-group "empty?"
(test-assert (hashtable-empty? (alist->eq-hashtable '())))
(test-assert (not (hashtable-empty? (alist->eq-hashtable '((a . b)))))))
(test-group "pop!"
(test-error (hashtable-pop! (make-eq-hashtable)))
(let ((table (alist->eq-hashtable '((a . b)))))
(let-values (((k v) (hashtable-pop! table)))
(test-eq 'a k)
(test-eq 'b v)
(test-assert (hashtable-empty? table)))))
(test-group "inc!"
(let ((table (alist->eq-hashtable '((a . 0)))))
(hashtable-inc! table 'a)
(test-eqv 1 (hashtable-ref table 'a))
(hashtable-inc! table 'a 2)
(test-eqv 3 (hashtable-ref table 'a))))
(test-group "dec!"
(let ((table (alist->eq-hashtable '((a . 0)))))
(hashtable-dec! table 'a)
(test-eqv -1 (hashtable-ref table 'a))
(hashtable-dec! table 'a 2)
(test-eqv -3 (hashtable-ref table 'a)))))))
(test-group "hashing"
(test-assert (and (exact-integer? (hash-salt))))
(test-assert (not (negative? (hash-salt))))
(test-assert (= (equal-hash (list "foo" 'bar 42))
(equal-hash (list "foo" 'bar 42))))
(test-assert (= (string-hash (string-copy "foo"))
(string-hash (string-copy "foo"))))
(test-assert (= (string-ci-hash (string-copy "foo"))
(string-ci-hash (string-copy "FOO"))))
(test-assert (= (symbol-hash (string->symbol "foo"))
(symbol-hash (string->symbol "foo")))))
(test-end "SRFI-126")
The following note is commented out for Larceny .
(display
(string-append
"\n"
"NOTE: On implementations using the (r6rs hashtables) library from Larceny,\n"
" 14 tests are expected to fail in relation to make-eq-hashtable and\n"
" make-eqv-hashtable returning hashtables whose hash functions are\n"
" exposed instead of being #f. We have no obvious way to detect this\n"
" within this portable test suite, hence no XFAIL results.\n"))
The following is added for Larceny .
(system "fgrep \"result-kind:\" SRFI-126.log")
(display "Done.\n")
eval : ( put ( quote test - group ) ( quote scheme - indent - function ) 1 )
|
6b4214e25657e4227ff7541fa29d061450ea8df41d8c71f2ebbdacb4bc3b22b6 | argp/bap | fixpoint.ml | (**************************************************************************)
(* *)
: a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
(* described in file LICENSE. *)
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
(**************************************************************************)
Copyright ( c ) 2010 - 2012 Technische Universitaet Muenchen
* < >
* All rights reserved .
* Markus W. Weissmann <>
* All rights reserved. *)
maximum fixpoint point calculation with the work list algorithm ;
to implement a concrete analysis , implement a module that satisfies
the Rules signature . Such a module in the Analysis functor gives a
complete analysis / optimization module that works on a CFG .
to implement a concrete analysis, implement a module that satisfies
the Rules signature. Such a module in the Analysis functor gives a
complete analysis/optimization module that works on a CFG.
*)
type direction = Forward | Backward
module type Analysis = sig
type data
type edge
type vertex
type g
val direction : direction
val join : data -> data -> data
val equal : data -> data -> bool
val analyze : edge -> data -> data
end
(** Minimal graph signature for work list algorithm *)
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
val dst : t -> V.t
val src : t -> V.t
end
val fold_vertex : (V.t -> 'a -> 'a) -> t -> 'a -> 'a
val succ_e : t -> V.t -> E.t list
val pred_e : t -> V.t -> E.t list
val succ : t -> V.t -> V.t list
val pred : t -> V.t -> V.t list
end
module Make
(G : G)
(A : Analysis with type g = G.t with type edge = G.E.t
with type vertex = G.V.t) =
struct
module M = Map.Make(G.V)
module N = Set.Make(G.V)
let analyze initial g =
let (nodes, data) =
G.fold_vertex
(fun vertex (n, m) ->
(N.add vertex n, M.add vertex (initial vertex) m))
g (N.empty, M.empty)
in
(* generate an associative map to quickly find the incoming
* (outgoing) edges of a node during the anaysis store a pair of
* a partially applied analysis function and the corresponding
* 'partner' node *)
let nodemap : ((A.data -> A.data) * G.V.t) list M.t =
let add = match A.direction with
| Forward ->
(fun n ->
let preds = G.pred_e g n in
List.map
(fun edge -> (A.analyze edge, G.E.src edge))
preds)
| Backward ->
(fun n ->
let succs = G.succ_e g n in
List.map
(fun edge -> (A.analyze edge, G.E.dst edge))
succs)
in
G.fold_vertex (fun vertex m -> M.add vertex (add vertex) m) g M.empty
in
let rec worklist (data : A.data M.t) (wl : N.t) =
(* 'meet' an arbitrary number of data-sets *)
let meet ~default = function
| [] -> default
| [x] -> x
| x::xs -> List.fold_left (fun a b -> A.join a b) x xs
in
analyze one node , creating a new data - set and node - worklist
as necessary
as necessary *)
let analyze_node analysis n d wl =
match analysis d n with
| None -> (d, wl)
| Some d' -> (d', N.add n wl)
in
try
(* get some node from the node-set -- this will eventually trigger
an exception *)
let n = N.choose wl in
(* remove the chosen node from the set *)
let wl = N.remove n wl in
let (f, ns) = match A.direction with
analyze all INCOMING edges of all SUCCESSOR nodes of the
node to be processed
node to be processed *)
| Forward ->
process one node : analyze all it 's incoming edges
and merge the resulting data ;
if the result is different to the previously stored data
for this node , return a new tuple , else None
and merge the resulting data;
if the result is different to the previously stored data
for this node, return a new tuple, else None *)
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, src) -> f (M.find src data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.succ g n)
(* analyze all OUTGOING edges of all PREDECESSOR nodes
of the node to be processed *)
| Backward ->
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, dst) -> f (M.find dst data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.pred g n)
in
(* analyze all successor nodes by analyzing all of their
predecessor edges *)
let (data, wl) =
List.fold_left (fun (d, wl) n -> analyze_node f n d wl)
(data, wl) ns
in
(* do a recursive call: the recursion will eventually end with a
* Not_found exception when no nodes are left in the work list *)
worklist data wl
with Not_found -> data
in
let data = worklist data nodes in
(fun n -> M.find n data)
end
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/ocamlgraph/src/fixpoint.ml | ocaml | ************************************************************************
This software is free software; you can redistribute it and/or
described in file LICENSE.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
************************************************************************
* Minimal graph signature for work list algorithm
generate an associative map to quickly find the incoming
* (outgoing) edges of a node during the anaysis store a pair of
* a partially applied analysis function and the corresponding
* 'partner' node
'meet' an arbitrary number of data-sets
get some node from the node-set -- this will eventually trigger
an exception
remove the chosen node from the set
analyze all OUTGOING edges of all PREDECESSOR nodes
of the node to be processed
analyze all successor nodes by analyzing all of their
predecessor edges
do a recursive call: the recursion will eventually end with a
* Not_found exception when no nodes are left in the work list | : a generic graph library for OCaml
Copyright ( C ) 2004 - 2010
, and
modify it under the terms of the GNU Library General Public
License version 2.1 , with the special exception on linking
Copyright ( c ) 2010 - 2012 Technische Universitaet Muenchen
* < >
* All rights reserved .
* Markus W. Weissmann <>
* All rights reserved. *)
maximum fixpoint point calculation with the work list algorithm ;
to implement a concrete analysis , implement a module that satisfies
the Rules signature . Such a module in the Analysis functor gives a
complete analysis / optimization module that works on a CFG .
to implement a concrete analysis, implement a module that satisfies
the Rules signature. Such a module in the Analysis functor gives a
complete analysis/optimization module that works on a CFG.
*)
type direction = Forward | Backward
module type Analysis = sig
type data
type edge
type vertex
type g
val direction : direction
val join : data -> data -> data
val equal : data -> data -> bool
val analyze : edge -> data -> data
end
module type G = sig
type t
module V : Sig.COMPARABLE
module E : sig
type t
val dst : t -> V.t
val src : t -> V.t
end
val fold_vertex : (V.t -> 'a -> 'a) -> t -> 'a -> 'a
val succ_e : t -> V.t -> E.t list
val pred_e : t -> V.t -> E.t list
val succ : t -> V.t -> V.t list
val pred : t -> V.t -> V.t list
end
module Make
(G : G)
(A : Analysis with type g = G.t with type edge = G.E.t
with type vertex = G.V.t) =
struct
module M = Map.Make(G.V)
module N = Set.Make(G.V)
let analyze initial g =
let (nodes, data) =
G.fold_vertex
(fun vertex (n, m) ->
(N.add vertex n, M.add vertex (initial vertex) m))
g (N.empty, M.empty)
in
let nodemap : ((A.data -> A.data) * G.V.t) list M.t =
let add = match A.direction with
| Forward ->
(fun n ->
let preds = G.pred_e g n in
List.map
(fun edge -> (A.analyze edge, G.E.src edge))
preds)
| Backward ->
(fun n ->
let succs = G.succ_e g n in
List.map
(fun edge -> (A.analyze edge, G.E.dst edge))
succs)
in
G.fold_vertex (fun vertex m -> M.add vertex (add vertex) m) g M.empty
in
let rec worklist (data : A.data M.t) (wl : N.t) =
let meet ~default = function
| [] -> default
| [x] -> x
| x::xs -> List.fold_left (fun a b -> A.join a b) x xs
in
analyze one node , creating a new data - set and node - worklist
as necessary
as necessary *)
let analyze_node analysis n d wl =
match analysis d n with
| None -> (d, wl)
| Some d' -> (d', N.add n wl)
in
try
let n = N.choose wl in
let wl = N.remove n wl in
let (f, ns) = match A.direction with
analyze all INCOMING edges of all SUCCESSOR nodes of the
node to be processed
node to be processed *)
| Forward ->
process one node : analyze all it 's incoming edges
and merge the resulting data ;
if the result is different to the previously stored data
for this node , return a new tuple , else None
and merge the resulting data;
if the result is different to the previously stored data
for this node, return a new tuple, else None *)
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, src) -> f (M.find src data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.succ g n)
| Backward ->
let new_node_data (data : A.data M.t) node =
let edges = M.find node nodemap in
let analysis =
List.map
(fun (f, dst) -> f (M.find dst data)) edges
in
let node_data = M.find node data in
let node_data' = meet ~default:node_data analysis in
if A.equal node_data node_data' then None
else Some (M.add node node_data' data)
in
(new_node_data, G.pred g n)
in
let (data, wl) =
List.fold_left (fun (d, wl) n -> analyze_node f n d wl)
(data, wl) ns
in
worklist data wl
with Not_found -> data
in
let data = worklist data nodes in
(fun n -> M.find n data)
end
|
5627e673d746e5ee8d865306ef717652c17b783328dfb0cc9447ab5aca1dfd9f | edvorg/yet-another-craft | project.clj | (defproject yet-another-craft "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.8.51" :scope "provided"]
[org.clojure/core.async "0.2.374"]
[ring/ring-devel "1.4.0"]
[ring/ring-core "1.4.0"]
[ring/ring-defaults "0.2.0"]
[http-kit "2.1.19"]
[reagent "0.6.0-SNAPSHOT"]
[reagent-forms "0.5.23"]
[reagent-utils "0.1.8"]
[prone "1.1.1"]
[compojure "1.5.0"]
[hiccup "1.0.5"]
[environ "1.0.2"]
[secretary "1.2.3"]
[jarohen/chord "0.7.0"]
[org.clojure/tools.reader "0.10.0"]
[secretary "1.2.3"]
[com.taoensso/timbre "4.3.1"]
[mount "0.1.10"]]
:plugins [[lein-environ "1.0.2"]
[refactor-nrepl "2.3.0-SNAPSHOT"]
[lein-asset-minifier "0.2.8"]
[cider/cider-nrepl "0.13.0-SNAPSHOT"]]
:ring {:handler yet-another-craft.handler/app
:uberwar-name "yet-another-craft.war"}
:min-lein-version "2.5.0"
:uberjar-name "yet-another-craft.jar"
:main yet-another-craft.server
:clean-targets ^{:protect false} [:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:source-paths ["src/clj" "src/cljc"]
:minify-assets
{:assets
{"resources/public/css/site.min.css" "resources/public/css/site.css"}}
:cljsbuild {:builds {:app {:source-paths ["src/cljs" "src/cljc"]
:compiler {:output-to "resources/public/js/app.js"
:output-dir "resources/public/js/out"
:asset-path "js/out"
:optimizations :none
:pretty-print true}}}}
:figwheel {:http-server-root "public"
:server-port 3000
:nrepl-port 7002
:nrepl-middleware ["cider.nrepl/cider-middleware"
"refactor-nrepl.middleware/wrap-refactor"
"cemerick.piggieback/wrap-cljs-repl"]
:css-dirs ["resources/public/css"]
:ring-handler yet-another-craft.handler/app}
:profiles {:dev {:repl-options {:init-ns yet-another-craft.repl
:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl
cider.nrepl.middleware.apropos/wrap-apropos
cider.nrepl.middleware.classpath/wrap-classpath
cider.nrepl.middleware.complete/wrap-complete
cider.nrepl.middleware.info/wrap-info
cider.nrepl.middleware.inspect/wrap-inspect
cider.nrepl.middleware.macroexpand/wrap-macroexpand
cider.nrepl.middleware.ns/wrap-ns
cider.nrepl.middleware.resource/wrap-resource
cider.nrepl.middleware.stacktrace/wrap-stacktrace
cider.nrepl.middleware.test/wrap-test
cider.nrepl.middleware.trace/wrap-trace
cider.nrepl.middleware.undef/wrap-undef]}
:dependencies [[ring/ring-mock "0.3.0"]
[ring/ring-devel "1.4.0"]
[com.cemerick/piggieback "0.2.1"]
[org.clojure/tools.nrepl "0.2.12"]
[pjstadig/humane-test-output "0.8.0"]]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.5.2"]
[lein-cljsbuild "1.1.3"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:env {:dev true}
:cljsbuild {:builds {:app {:source-paths ["env/dev/cljs"]
:compiler {:main "yet-another-craft.dev"
:source-map true}}}}}
:uberjar {:hooks [leiningen.cljsbuild minify-assets.plugin/hooks]
:env {:production true}
:aot :all
:omit-source true
:cljsbuild {:jar true
:builds {:app
{:source-paths ["env/prod/cljs"]
:compiler
{:optimizations :advanced
:pretty-print false}}}}}})
| null | https://raw.githubusercontent.com/edvorg/yet-another-craft/bab8484135233323f253946d1f076d4f792a6326/project.clj | clojure | (defproject yet-another-craft "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.8.51" :scope "provided"]
[org.clojure/core.async "0.2.374"]
[ring/ring-devel "1.4.0"]
[ring/ring-core "1.4.0"]
[ring/ring-defaults "0.2.0"]
[http-kit "2.1.19"]
[reagent "0.6.0-SNAPSHOT"]
[reagent-forms "0.5.23"]
[reagent-utils "0.1.8"]
[prone "1.1.1"]
[compojure "1.5.0"]
[hiccup "1.0.5"]
[environ "1.0.2"]
[secretary "1.2.3"]
[jarohen/chord "0.7.0"]
[org.clojure/tools.reader "0.10.0"]
[secretary "1.2.3"]
[com.taoensso/timbre "4.3.1"]
[mount "0.1.10"]]
:plugins [[lein-environ "1.0.2"]
[refactor-nrepl "2.3.0-SNAPSHOT"]
[lein-asset-minifier "0.2.8"]
[cider/cider-nrepl "0.13.0-SNAPSHOT"]]
:ring {:handler yet-another-craft.handler/app
:uberwar-name "yet-another-craft.war"}
:min-lein-version "2.5.0"
:uberjar-name "yet-another-craft.jar"
:main yet-another-craft.server
:clean-targets ^{:protect false} [:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:source-paths ["src/clj" "src/cljc"]
:minify-assets
{:assets
{"resources/public/css/site.min.css" "resources/public/css/site.css"}}
:cljsbuild {:builds {:app {:source-paths ["src/cljs" "src/cljc"]
:compiler {:output-to "resources/public/js/app.js"
:output-dir "resources/public/js/out"
:asset-path "js/out"
:optimizations :none
:pretty-print true}}}}
:figwheel {:http-server-root "public"
:server-port 3000
:nrepl-port 7002
:nrepl-middleware ["cider.nrepl/cider-middleware"
"refactor-nrepl.middleware/wrap-refactor"
"cemerick.piggieback/wrap-cljs-repl"]
:css-dirs ["resources/public/css"]
:ring-handler yet-another-craft.handler/app}
:profiles {:dev {:repl-options {:init-ns yet-another-craft.repl
:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl
cider.nrepl.middleware.apropos/wrap-apropos
cider.nrepl.middleware.classpath/wrap-classpath
cider.nrepl.middleware.complete/wrap-complete
cider.nrepl.middleware.info/wrap-info
cider.nrepl.middleware.inspect/wrap-inspect
cider.nrepl.middleware.macroexpand/wrap-macroexpand
cider.nrepl.middleware.ns/wrap-ns
cider.nrepl.middleware.resource/wrap-resource
cider.nrepl.middleware.stacktrace/wrap-stacktrace
cider.nrepl.middleware.test/wrap-test
cider.nrepl.middleware.trace/wrap-trace
cider.nrepl.middleware.undef/wrap-undef]}
:dependencies [[ring/ring-mock "0.3.0"]
[ring/ring-devel "1.4.0"]
[com.cemerick/piggieback "0.2.1"]
[org.clojure/tools.nrepl "0.2.12"]
[pjstadig/humane-test-output "0.8.0"]]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.5.2"]
[lein-cljsbuild "1.1.3"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:env {:dev true}
:cljsbuild {:builds {:app {:source-paths ["env/dev/cljs"]
:compiler {:main "yet-another-craft.dev"
:source-map true}}}}}
:uberjar {:hooks [leiningen.cljsbuild minify-assets.plugin/hooks]
:env {:production true}
:aot :all
:omit-source true
:cljsbuild {:jar true
:builds {:app
{:source-paths ["env/prod/cljs"]
:compiler
{:optimizations :advanced
:pretty-print false}}}}}})
|
|
fa5642aa3d8e2ca8beba4255d271677c25a57a7d85f242385b312513ec900061 | input-output-hk/project-icarus-importer | Modifier.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
-- | Wallet info modifier
module Pos.Wallet.Web.Tracking.Modifier
( CAccModifier (..)
, CachedCAccModifier
, VoidModifier
, deleteAndInsertVM
, deleteAndInsertMM
, IndexedMapModifier (..)
, sortedInsertions
, indexedDeletions
, insertIMM
, deleteIMM
, deleteAndInsertIMM
) where
import Universum
import Data.DList (DList)
import Formatting (bprint, build, (%))
import Serokell.Util (listJson, listJsonIndent)
import Pos.Client.Txp.History (TxHistoryEntry (..))
import Pos.Core (Address, HeaderHash)
import Pos.Core.Txp (TxId)
import Pos.Txp.Toil (UtxoModifier)
import Pos.Util.LogSafe (BuildableSafeGen (..), deriveSafeBuildable, secretOnlyF,
secureListF)
import Pos.Util.Modifier (MapModifier)
import qualified Pos.Util.Modifier as MM
import Pos.Wallet.Web.Pending.Types (PtxBlockInfo)
import Pos.Wallet.Web.State (WAddressMeta)
VoidModifier describes a difference between two states .
-- It's (set of added k, set of deleted k) essentially.
type VoidModifier a = MapModifier a ()
data IndexedMapModifier a = IndexedMapModifier
{ immModifier :: MM.MapModifier a Int
, immCounter :: Int
}
sortedInsertions :: IndexedMapModifier a -> [a]
sortedInsertions = map fst . sortWith snd . MM.insertions . immModifier
indexedDeletions :: IndexedMapModifier a -> [a]
indexedDeletions = MM.deletions . immModifier
instance (Eq a, Hashable a) => Semigroup (IndexedMapModifier a) where
IndexedMapModifier m1 c1 <> IndexedMapModifier m2 c2 =
IndexedMapModifier (m1 <> fmap (+ c1) m2) (c1 + c2)
instance (Eq a, Hashable a) => Monoid (IndexedMapModifier a) where
mempty = IndexedMapModifier mempty 0
mappend = (<>)
data CAccModifier = CAccModifier
{ camAddresses :: !(IndexedMapModifier WAddressMeta)
, camUsed :: !(VoidModifier (Address, HeaderHash))
, camChange :: !(VoidModifier (Address, HeaderHash))
, camUtxo :: !UtxoModifier
, camAddedHistory :: !(DList TxHistoryEntry)
, camDeletedHistory :: !(DList TxHistoryEntry)
, camAddedPtxCandidates :: !(DList (TxId, PtxBlockInfo))
, camDeletedPtxCandidates :: !(DList (TxId, TxHistoryEntry))
}
instance Semigroup CAccModifier where
(CAccModifier a b c d ah dh aptx dptx) <> (CAccModifier a1 b1 c1 d1 ah1 dh1 aptx1 dptx1) =
CAccModifier (a <> a1) (b <> b1) (c <> c1) (d <> d1) (ah1 <> ah)
(dh <> dh1) (aptx <> aptx1) (dptx <> dptx1)
instance Monoid CAccModifier where
mempty = CAccModifier mempty mempty mempty mempty mempty mempty mempty mempty
mappend = (<>)
instance BuildableSafeGen CAccModifier where
buildSafeGen sl CAccModifier{..} =
bprint
( "\n added addresses: "%secureListF sl (listJsonIndent 8)
%",\n deleted addresses: "%secureListF sl (listJsonIndent 8)
%",\n used addresses: "%secureListF sl listJson
%",\n change addresses: "%secureListF sl listJson
%",\n local utxo (difference): "%secretOnlyF sl build
%",\n added history entries: "%secureListF sl (listJsonIndent 8)
%",\n deleted history entries: "%secureListF sl (listJsonIndent 8)
%",\n added pending candidates: "%secureListF sl listJson
%",\n deleted pending candidates: "%secureListF sl listJson)
(sortedInsertions camAddresses)
(indexedDeletions camAddresses)
(map (fst . fst) $ MM.insertions camUsed)
(map (fst . fst) $ MM.insertions camChange)
camUtxo
camAddedHistory
camDeletedHistory
(map fst camAddedPtxCandidates)
(map fst camDeletedPtxCandidates)
deriveSafeBuildable ''CAccModifier
-- | `txMempoolToModifier`, once evaluated, is passed around under this type in
-- scope of single request.
type CachedCAccModifier = CAccModifier
----------------------------------------------------------------------------
Funcs
----------------------------------------------------------------------------
| This function is alternative for MapModifier 's @delete@.
-- It doesn't add removable element to delete set
-- if it was inserted before (in contrast with @delete@)
deleteNotDeep :: (Eq k, Hashable k) => k -> MapModifier k v -> MapModifier k v
deleteNotDeep = MM.alter alterDelF
where
alterDelF :: MM.KeyState v -> MM.KeyState v
alterDelF MM.KeyNotFound = MM.KeyDeleted
alterDelF MM.KeyDeleted = MM.KeyDeleted
alterDelF (MM.KeyInserted _) = MM.KeyNotFound
insertIMM
:: (Eq a, Hashable a)
=> a -> IndexedMapModifier a -> IndexedMapModifier a
insertIMM k IndexedMapModifier {..} =
IndexedMapModifier
{ immModifier = MM.insert k immCounter immModifier
, immCounter = immCounter + 1
}
deleteIMM
:: (Eq a, Hashable a)
=> a -> IndexedMapModifier a -> IndexedMapModifier a
deleteIMM k IndexedMapModifier {..} =
IndexedMapModifier
{ immModifier = deleteNotDeep k immModifier
, ..
}
deleteAndInsertIMM
:: (Eq a, Hashable a)
=> [a] -> [a] -> IndexedMapModifier a -> IndexedMapModifier a
deleteAndInsertIMM dels ins mapModifier =
Insert CWAddressMeta coressponding to outputs of tx .
(\mm -> foldl' (flip insertIMM) mm ins) $
Delete CWAddressMeta coressponding to inputs of tx .
foldl' (flip deleteIMM) mapModifier dels
deleteAndInsertVM :: (Eq a, Hashable a) => [a] -> [a] -> VoidModifier a -> VoidModifier a
deleteAndInsertVM dels ins mapModifier = deleteAndInsertMM dels (zip ins $ repeat ()) mapModifier
deleteAndInsertMM :: (Eq k, Hashable k) => [k] -> [(k, v)] -> MM.MapModifier k v -> MM.MapModifier k v
deleteAndInsertMM dels ins mapModifier =
Insert CWAddressMeta coressponding to outputs of tx ( 2 )
(\mm -> foldl' insertAcc mm ins) $
Delete CWAddressMeta coressponding to inputs of tx ( 1 )
foldl' deleteAcc mapModifier dels
where
insertAcc :: (Hashable k, Eq k) => MapModifier k v -> (k, v) -> MapModifier k v
insertAcc modifier (k, v) = MM.insert k v modifier
deleteAcc :: (Hashable k, Eq k) => MapModifier k v -> k -> MapModifier k v
deleteAcc = flip deleteNotDeep
| null | https://raw.githubusercontent.com/input-output-hk/project-icarus-importer/36342f277bcb7f1902e677a02d1ce93e4cf224f0/wallet/src/Pos/Wallet/Web/Tracking/Modifier.hs | haskell | | Wallet info modifier
It's (set of added k, set of deleted k) essentially.
| `txMempoolToModifier`, once evaluated, is passed around under this type in
scope of single request.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
It doesn't add removable element to delete set
if it was inserted before (in contrast with @delete@) | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
module Pos.Wallet.Web.Tracking.Modifier
( CAccModifier (..)
, CachedCAccModifier
, VoidModifier
, deleteAndInsertVM
, deleteAndInsertMM
, IndexedMapModifier (..)
, sortedInsertions
, indexedDeletions
, insertIMM
, deleteIMM
, deleteAndInsertIMM
) where
import Universum
import Data.DList (DList)
import Formatting (bprint, build, (%))
import Serokell.Util (listJson, listJsonIndent)
import Pos.Client.Txp.History (TxHistoryEntry (..))
import Pos.Core (Address, HeaderHash)
import Pos.Core.Txp (TxId)
import Pos.Txp.Toil (UtxoModifier)
import Pos.Util.LogSafe (BuildableSafeGen (..), deriveSafeBuildable, secretOnlyF,
secureListF)
import Pos.Util.Modifier (MapModifier)
import qualified Pos.Util.Modifier as MM
import Pos.Wallet.Web.Pending.Types (PtxBlockInfo)
import Pos.Wallet.Web.State (WAddressMeta)
VoidModifier describes a difference between two states .
type VoidModifier a = MapModifier a ()
data IndexedMapModifier a = IndexedMapModifier
{ immModifier :: MM.MapModifier a Int
, immCounter :: Int
}
sortedInsertions :: IndexedMapModifier a -> [a]
sortedInsertions = map fst . sortWith snd . MM.insertions . immModifier
indexedDeletions :: IndexedMapModifier a -> [a]
indexedDeletions = MM.deletions . immModifier
instance (Eq a, Hashable a) => Semigroup (IndexedMapModifier a) where
IndexedMapModifier m1 c1 <> IndexedMapModifier m2 c2 =
IndexedMapModifier (m1 <> fmap (+ c1) m2) (c1 + c2)
instance (Eq a, Hashable a) => Monoid (IndexedMapModifier a) where
mempty = IndexedMapModifier mempty 0
mappend = (<>)
data CAccModifier = CAccModifier
{ camAddresses :: !(IndexedMapModifier WAddressMeta)
, camUsed :: !(VoidModifier (Address, HeaderHash))
, camChange :: !(VoidModifier (Address, HeaderHash))
, camUtxo :: !UtxoModifier
, camAddedHistory :: !(DList TxHistoryEntry)
, camDeletedHistory :: !(DList TxHistoryEntry)
, camAddedPtxCandidates :: !(DList (TxId, PtxBlockInfo))
, camDeletedPtxCandidates :: !(DList (TxId, TxHistoryEntry))
}
instance Semigroup CAccModifier where
(CAccModifier a b c d ah dh aptx dptx) <> (CAccModifier a1 b1 c1 d1 ah1 dh1 aptx1 dptx1) =
CAccModifier (a <> a1) (b <> b1) (c <> c1) (d <> d1) (ah1 <> ah)
(dh <> dh1) (aptx <> aptx1) (dptx <> dptx1)
instance Monoid CAccModifier where
mempty = CAccModifier mempty mempty mempty mempty mempty mempty mempty mempty
mappend = (<>)
instance BuildableSafeGen CAccModifier where
buildSafeGen sl CAccModifier{..} =
bprint
( "\n added addresses: "%secureListF sl (listJsonIndent 8)
%",\n deleted addresses: "%secureListF sl (listJsonIndent 8)
%",\n used addresses: "%secureListF sl listJson
%",\n change addresses: "%secureListF sl listJson
%",\n local utxo (difference): "%secretOnlyF sl build
%",\n added history entries: "%secureListF sl (listJsonIndent 8)
%",\n deleted history entries: "%secureListF sl (listJsonIndent 8)
%",\n added pending candidates: "%secureListF sl listJson
%",\n deleted pending candidates: "%secureListF sl listJson)
(sortedInsertions camAddresses)
(indexedDeletions camAddresses)
(map (fst . fst) $ MM.insertions camUsed)
(map (fst . fst) $ MM.insertions camChange)
camUtxo
camAddedHistory
camDeletedHistory
(map fst camAddedPtxCandidates)
(map fst camDeletedPtxCandidates)
deriveSafeBuildable ''CAccModifier
type CachedCAccModifier = CAccModifier
Funcs
| This function is alternative for MapModifier 's @delete@.
deleteNotDeep :: (Eq k, Hashable k) => k -> MapModifier k v -> MapModifier k v
deleteNotDeep = MM.alter alterDelF
where
alterDelF :: MM.KeyState v -> MM.KeyState v
alterDelF MM.KeyNotFound = MM.KeyDeleted
alterDelF MM.KeyDeleted = MM.KeyDeleted
alterDelF (MM.KeyInserted _) = MM.KeyNotFound
insertIMM
:: (Eq a, Hashable a)
=> a -> IndexedMapModifier a -> IndexedMapModifier a
insertIMM k IndexedMapModifier {..} =
IndexedMapModifier
{ immModifier = MM.insert k immCounter immModifier
, immCounter = immCounter + 1
}
deleteIMM
:: (Eq a, Hashable a)
=> a -> IndexedMapModifier a -> IndexedMapModifier a
deleteIMM k IndexedMapModifier {..} =
IndexedMapModifier
{ immModifier = deleteNotDeep k immModifier
, ..
}
deleteAndInsertIMM
:: (Eq a, Hashable a)
=> [a] -> [a] -> IndexedMapModifier a -> IndexedMapModifier a
deleteAndInsertIMM dels ins mapModifier =
Insert CWAddressMeta coressponding to outputs of tx .
(\mm -> foldl' (flip insertIMM) mm ins) $
Delete CWAddressMeta coressponding to inputs of tx .
foldl' (flip deleteIMM) mapModifier dels
deleteAndInsertVM :: (Eq a, Hashable a) => [a] -> [a] -> VoidModifier a -> VoidModifier a
deleteAndInsertVM dels ins mapModifier = deleteAndInsertMM dels (zip ins $ repeat ()) mapModifier
deleteAndInsertMM :: (Eq k, Hashable k) => [k] -> [(k, v)] -> MM.MapModifier k v -> MM.MapModifier k v
deleteAndInsertMM dels ins mapModifier =
Insert CWAddressMeta coressponding to outputs of tx ( 2 )
(\mm -> foldl' insertAcc mm ins) $
Delete CWAddressMeta coressponding to inputs of tx ( 1 )
foldl' deleteAcc mapModifier dels
where
insertAcc :: (Hashable k, Eq k) => MapModifier k v -> (k, v) -> MapModifier k v
insertAcc modifier (k, v) = MM.insert k v modifier
deleteAcc :: (Hashable k, Eq k) => MapModifier k v -> k -> MapModifier k v
deleteAcc = flip deleteNotDeep
|
21918e9d66f3836c676cce36dda2aaa0b4474481040835599f2fdf90f4232e87 | p2pcollab/ocaml-p2p | test_vicinity_lwt.ml |
Copyright ( C ) 2019 TG x
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2019 TG x Thoth
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
*)
open Stdint
let u64 = Uint64.of_int64
let pf = Fmt.pf
let out = Fmt.stdout
module Node_id = P2p.Node_id
FIXME
module Make_node ( Node_id : . S.NODE_ID )
: . S.NODE with type nid : = Node_id.t = struct
module = P2p . Node . Make ( Node_id )
include Node
( * * 1 / ( a - b )
module Make_node (Node_id: P2p.S.NODE_ID)
: P2p.S.NODE with type nid := Node_id.t = struct
module Node = P2p.Node.Make (Node_id)
include Node
(** 1 / (a - b) *)
let sim a b =
1. /. Uint64.to_float
(Node_id.to_uint64
(Node_id.distance (Node.id a) (Node.id b)))
end
*)
module Node = P2p.Node.Make (Node_id)
module View = P2p.View.Make (Node_id) (Node)
module Vicinity = P2p_vicinity.Make (Node_id) (Node) (View)
module Io = struct
type t = {
node_id: Node_id.t;
in_chan : Lwt_io.input_channel;
out_chan : Lwt_io.output_channel;
}
let init node_id in_chan out_chan =
{ node_id; in_chan; out_chan }
(** [initiate_gossip t node xchg]
sends [xchg] entries to node [dst]
and returns response *)
let initiate_gossip t dst xchg =
pf out "%a # INITIATE_GOSSIP to node %a\n" Node_id.pp t.node_id Node.pp dst;
pf out "xchg to send:\n%a\n" View.pp xchg;
flush stdout;
let%lwt _ = Lwt_io.write_value t.out_chan xchg in
Lwt_io.read_value t.in_chan
(** [respond_gossip t node xchg]
sends [xchg] entries in response to node [dst] *)
let respond_gossip t dst xchg =
pf out "%a # RESPOND_GOSSIP to node %a\n" Node_id.pp t.node_id Node.pp dst;
pf out "xchg to send:\n%a\n" View.pp xchg;
flush stdout;
let%lwt _ = Lwt_io.write_value t.out_chan xchg in
Lwt.return_unit
(** [gossip_recvd t node view recvd]
is called after entries are received during a gossip exchange;
allows rewriting [recvd] entries with the returned value. *)
let gossip_recvd _t _src recvd _view =
Lwt.return recvd
(** [view_updated node view]
is called when [view] has been updated after a gossip exchange *)
let view_updated t node view =
pf out "%a # VIEW_UPDATED of node %a\n%a\n" Node_id.pp t.node_id Node.pp node View.pp view;
flush stdout;
Lwt.return_unit
let get_xview _t =
View.empty
end
module Vicinity_lwt =
P2p_vicinity_lwt.Make (Node_id) (Node) (View) (Vicinity) (Io)
let rec read_chan ch vc node rnode =
let%lwt recvd = Lwt_io.read_value ch in
pf out "%a # READ_CHAN\n" Node_id.pp (Node.id node);
pf out "recvd:\n%a\n" View.pp recvd;
flush stdout;
let%lwt view = Vicinity_lwt.respond vc rnode recvd in
pf out "recvd:\n%a\n" View.pp recvd;
pf out "view:\n%a\n" View.pp view;
flush stdout;
read_chan ch vc node rnode
let _ = Nocrypto_entropy_lwt.initialize ()
let () =
let view_len = 8 in
let xchg_len = 4 in
let period = 1.0 in
let (in_ch1, out_ch2) = Lwt_io.pipe () in
let (in_ch2, out_ch1) = Lwt_io.pipe () in
let node1 = Node.init (u64 100L) in
let io1 = Io.init (Node.id node1) in_ch1 out_ch1 in
let view1 =
View.add (Node.init (u64 110L))
(View.add (Node.init (u64 120L))
(View.add (Node.init (u64 130L))
(View.add (Node.init (u64 140L))
(View.add (Node.init (u64 150L))
(View.add (Node.init (u64 160L))
(View.add (Node.init (u64 170L))
View.empty)))))) in
let vc1 = Vicinity_lwt.init ~me:node1 ~view:view1 ~view_len ~xchg_len ~period ~io:io1 in
let node2 = Node.init (u64 200L) in
let io2 = Io.init (Node.id node2) in_ch2 out_ch2 in
let view2 =
View.add (Node.init (u64 210L))
(View.add (Node.init (u64 220L))
(View.add (Node.init (u64 230L))
(View.add (Node.init (u64 240L))
(View.add (Node.init (u64 250L))
(View.add (Node.init (u64 260L))
(View.add (Node.init (u64 270L))
View.empty)))))) in
let vc2 = Vicinity_lwt.init ~me:node2 ~view:view2 ~view_len ~xchg_len ~period ~io:io2 in
let timeout = Lwt_unix.sleep 5.5 in
Lwt_main.run @@
Lwt.choose [ Vicinity_lwt.run vc1;
Vicinity_lwt.run vc2;
read_chan in_ch1 vc1 node1 node2;
read_chan in_ch2 vc2 node2 node1;
Lwt.map (fun () -> Vicinity_lwt.shutdown vc1;
Vicinity_lwt.shutdown vc2) timeout ]
| null | https://raw.githubusercontent.com/p2pcollab/ocaml-p2p/121e86efff0e6edd7c498d17bf4cab7787991888/test/p2p-vicinity-lwt/test_vicinity_lwt.ml | ocaml | * 1 / (a - b)
* [initiate_gossip t node xchg]
sends [xchg] entries to node [dst]
and returns response
* [respond_gossip t node xchg]
sends [xchg] entries in response to node [dst]
* [gossip_recvd t node view recvd]
is called after entries are received during a gossip exchange;
allows rewriting [recvd] entries with the returned value.
* [view_updated node view]
is called when [view] has been updated after a gossip exchange |
Copyright ( C ) 2019 TG x
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2019 TG x Thoth
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
*)
open Stdint
let u64 = Uint64.of_int64
let pf = Fmt.pf
let out = Fmt.stdout
module Node_id = P2p.Node_id
FIXME
module Make_node ( Node_id : . S.NODE_ID )
: . S.NODE with type nid : = Node_id.t = struct
module = P2p . Node . Make ( Node_id )
include Node
( * * 1 / ( a - b )
module Make_node (Node_id: P2p.S.NODE_ID)
: P2p.S.NODE with type nid := Node_id.t = struct
module Node = P2p.Node.Make (Node_id)
include Node
let sim a b =
1. /. Uint64.to_float
(Node_id.to_uint64
(Node_id.distance (Node.id a) (Node.id b)))
end
*)
module Node = P2p.Node.Make (Node_id)
module View = P2p.View.Make (Node_id) (Node)
module Vicinity = P2p_vicinity.Make (Node_id) (Node) (View)
module Io = struct
type t = {
node_id: Node_id.t;
in_chan : Lwt_io.input_channel;
out_chan : Lwt_io.output_channel;
}
let init node_id in_chan out_chan =
{ node_id; in_chan; out_chan }
let initiate_gossip t dst xchg =
pf out "%a # INITIATE_GOSSIP to node %a\n" Node_id.pp t.node_id Node.pp dst;
pf out "xchg to send:\n%a\n" View.pp xchg;
flush stdout;
let%lwt _ = Lwt_io.write_value t.out_chan xchg in
Lwt_io.read_value t.in_chan
let respond_gossip t dst xchg =
pf out "%a # RESPOND_GOSSIP to node %a\n" Node_id.pp t.node_id Node.pp dst;
pf out "xchg to send:\n%a\n" View.pp xchg;
flush stdout;
let%lwt _ = Lwt_io.write_value t.out_chan xchg in
Lwt.return_unit
let gossip_recvd _t _src recvd _view =
Lwt.return recvd
let view_updated t node view =
pf out "%a # VIEW_UPDATED of node %a\n%a\n" Node_id.pp t.node_id Node.pp node View.pp view;
flush stdout;
Lwt.return_unit
let get_xview _t =
View.empty
end
module Vicinity_lwt =
P2p_vicinity_lwt.Make (Node_id) (Node) (View) (Vicinity) (Io)
let rec read_chan ch vc node rnode =
let%lwt recvd = Lwt_io.read_value ch in
pf out "%a # READ_CHAN\n" Node_id.pp (Node.id node);
pf out "recvd:\n%a\n" View.pp recvd;
flush stdout;
let%lwt view = Vicinity_lwt.respond vc rnode recvd in
pf out "recvd:\n%a\n" View.pp recvd;
pf out "view:\n%a\n" View.pp view;
flush stdout;
read_chan ch vc node rnode
let _ = Nocrypto_entropy_lwt.initialize ()
let () =
let view_len = 8 in
let xchg_len = 4 in
let period = 1.0 in
let (in_ch1, out_ch2) = Lwt_io.pipe () in
let (in_ch2, out_ch1) = Lwt_io.pipe () in
let node1 = Node.init (u64 100L) in
let io1 = Io.init (Node.id node1) in_ch1 out_ch1 in
let view1 =
View.add (Node.init (u64 110L))
(View.add (Node.init (u64 120L))
(View.add (Node.init (u64 130L))
(View.add (Node.init (u64 140L))
(View.add (Node.init (u64 150L))
(View.add (Node.init (u64 160L))
(View.add (Node.init (u64 170L))
View.empty)))))) in
let vc1 = Vicinity_lwt.init ~me:node1 ~view:view1 ~view_len ~xchg_len ~period ~io:io1 in
let node2 = Node.init (u64 200L) in
let io2 = Io.init (Node.id node2) in_ch2 out_ch2 in
let view2 =
View.add (Node.init (u64 210L))
(View.add (Node.init (u64 220L))
(View.add (Node.init (u64 230L))
(View.add (Node.init (u64 240L))
(View.add (Node.init (u64 250L))
(View.add (Node.init (u64 260L))
(View.add (Node.init (u64 270L))
View.empty)))))) in
let vc2 = Vicinity_lwt.init ~me:node2 ~view:view2 ~view_len ~xchg_len ~period ~io:io2 in
let timeout = Lwt_unix.sleep 5.5 in
Lwt_main.run @@
Lwt.choose [ Vicinity_lwt.run vc1;
Vicinity_lwt.run vc2;
read_chan in_ch1 vc1 node1 node2;
read_chan in_ch2 vc2 node2 node1;
Lwt.map (fun () -> Vicinity_lwt.shutdown vc1;
Vicinity_lwt.shutdown vc2) timeout ]
|
db9399e38ac478b4db523b195b6459b414c19742c61831cdf6d1c306ee7e80b5 | inaka/elvis_core | pass_atom_naming_convention_elvis_attr.erl | -module(pass_atom_naming_convention_elvis_attr).
-export([for_test/0]).
-elvis([{elvis_style, atom_naming_convention, #{ regex => "^[a-zA-Z\_]+$",
enclosed_atoms => "^[a-zA-Z\_0-9' \-\\\\]+$" }},
{elvis_text_style, line_length, #{limit => 100}}]).
for_test() ->
this_is_not_an_OK_atom,
'and_neither-is_this',
'or_THIS',
'1_of_us_is_wrong',
'\' this nasty atom\'',
'\'',
'\'\'',
'\'startswithbacktick',
'backtick\'inside',
'backtick at the end\''.
| null | https://raw.githubusercontent.com/inaka/elvis_core/468bd3498f1782fd74ef3d8eb1b36217b0b76c11/test/examples/pass_atom_naming_convention_elvis_attr.erl | erlang | -module(pass_atom_naming_convention_elvis_attr).
-export([for_test/0]).
-elvis([{elvis_style, atom_naming_convention, #{ regex => "^[a-zA-Z\_]+$",
enclosed_atoms => "^[a-zA-Z\_0-9' \-\\\\]+$" }},
{elvis_text_style, line_length, #{limit => 100}}]).
for_test() ->
this_is_not_an_OK_atom,
'and_neither-is_this',
'or_THIS',
'1_of_us_is_wrong',
'\' this nasty atom\'',
'\'',
'\'\'',
'\'startswithbacktick',
'backtick\'inside',
'backtick at the end\''.
|
|
ba17e23cf1f00618a35b974abdd12b7c8995ca435d15f8a0ee63847c2baa38d0 | project-oak/hafnium-verification | CiOSVersionNumbers.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
version macros like kCFCoreFoundationVersionNumber_iOS_9_0 are
tied to specific float values , e.g. 1240.1 .
To be found in CoreFoundation / CFBase.h
tied to specific float values, e.g. 1240.1.
To be found in CoreFoundation/CFBase.h *)
type machine_readable_version = float
type human_readable_version = string
type t = machine_readable_version * human_readable_version
let version_numbers : t list =
[ (478.23, "2.0")
; (478.26, "2.1")
; (478.29, "2.2")
; (478.47, "3.0")
; (478.52, "3.1")
; (478.61, "3.2")
; (550.32, "4.0")
; (550.38, "4.1")
; (550.52, "4.3")
; (675.00, "5.0")
; (690.10, "5.1")
; (793.00, "6.1")
; (847.20, "7.0")
; (847.24, "7.1")
; (1140.1, "8.0")
; (1141.14, "8.1")
; (1142.16, "8.2")
; (1144.17, "8.3")
; (1145.15, "8.4")
; (1240.1, "9.0")
; (1241.11, "9.1")
; (1242.13, "9.3")
; (1280.38, "9.4")
; (1348.0, "10.0")
; (1348.22, "10.2") ]
let sort_versions versions =
let compare (version_float1, _) (version_float2, _) =
Float.compare version_float1 version_float2
in
List.sort ~compare versions
let version_of number_s : human_readable_version option =
let epsilon = 0.001 in
let rec version_of_aux version_numbers number =
match version_numbers with
| (version_n, version_s) :: (next_version_n, next_version_s) :: rest ->
if number -. version_n < epsilon && number -. version_n > ~-.epsilon then Some version_s
else if number >= version_n +. epsilon && number <= next_version_n -. epsilon then
Some next_version_s
else version_of_aux ((next_version_n, next_version_s) :: rest) number
| [(version_n, version_s)] ->
if number >= version_n then Some version_s else None
| [] ->
None
in
let number_opt = try Some (float_of_string number_s) with Failure _ -> None in
match number_opt with
| None ->
None
| Some number ->
version_of_aux (sort_versions version_numbers) number
let pp_diff_of_version_opt fmt (expected, actual) =
let option_to_string opt = Option.value ~default:"" opt in
Format.fprintf fmt "Expected: [%s] Found: [%s]" (option_to_string expected)
(option_to_string actual)
| null | https://raw.githubusercontent.com/project-oak/hafnium-verification/6071eff162148e4d25a0fedaea003addac242ace/experiments/ownership-inference/infer/infer/src/al/CiOSVersionNumbers.ml | ocaml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
version macros like kCFCoreFoundationVersionNumber_iOS_9_0 are
tied to specific float values , e.g. 1240.1 .
To be found in CoreFoundation / CFBase.h
tied to specific float values, e.g. 1240.1.
To be found in CoreFoundation/CFBase.h *)
type machine_readable_version = float
type human_readable_version = string
type t = machine_readable_version * human_readable_version
let version_numbers : t list =
[ (478.23, "2.0")
; (478.26, "2.1")
; (478.29, "2.2")
; (478.47, "3.0")
; (478.52, "3.1")
; (478.61, "3.2")
; (550.32, "4.0")
; (550.38, "4.1")
; (550.52, "4.3")
; (675.00, "5.0")
; (690.10, "5.1")
; (793.00, "6.1")
; (847.20, "7.0")
; (847.24, "7.1")
; (1140.1, "8.0")
; (1141.14, "8.1")
; (1142.16, "8.2")
; (1144.17, "8.3")
; (1145.15, "8.4")
; (1240.1, "9.0")
; (1241.11, "9.1")
; (1242.13, "9.3")
; (1280.38, "9.4")
; (1348.0, "10.0")
; (1348.22, "10.2") ]
let sort_versions versions =
let compare (version_float1, _) (version_float2, _) =
Float.compare version_float1 version_float2
in
List.sort ~compare versions
let version_of number_s : human_readable_version option =
let epsilon = 0.001 in
let rec version_of_aux version_numbers number =
match version_numbers with
| (version_n, version_s) :: (next_version_n, next_version_s) :: rest ->
if number -. version_n < epsilon && number -. version_n > ~-.epsilon then Some version_s
else if number >= version_n +. epsilon && number <= next_version_n -. epsilon then
Some next_version_s
else version_of_aux ((next_version_n, next_version_s) :: rest) number
| [(version_n, version_s)] ->
if number >= version_n then Some version_s else None
| [] ->
None
in
let number_opt = try Some (float_of_string number_s) with Failure _ -> None in
match number_opt with
| None ->
None
| Some number ->
version_of_aux (sort_versions version_numbers) number
let pp_diff_of_version_opt fmt (expected, actual) =
let option_to_string opt = Option.value ~default:"" opt in
Format.fprintf fmt "Expected: [%s] Found: [%s]" (option_to_string expected)
(option_to_string actual)
|
|
140adf8d3c0ddd996d5a4bd55abefc5edadd30dfed7043a4d349558d8db5dd1a | VERIMAG-Polyhedra/VPL | Misc.mli | (** This module defines several generic functions to handle lists or strings *)
(** [sublist l i j] returns the sublist of [l] starting at index [i] and ending at index [j-1] *)
val sublist : 'a list -> int -> int -> 'a list
(** [index s c] returns the index of the character [c] in the string [s] if it exists, -1 otherwise *)
val index : string -> char -> int
(** [substring s i] returns the substring of [s] starting at index [i] *)
val substring : string -> int -> string
* [ findi p l ] returns the index of the first element in [ l ] that satisfies predicate [ p ] .
@raise Not_found if no such element exists
@raise Not_found if no such element exists *)
val findi : ('a -> bool) -> 'a list -> int
val array_findi : (int -> 'a -> bool) -> 'a array -> int
val find_res : ('a -> (bool * 'a)) -> 'a list -> 'a
(** [popi l i] returns the list [l] where the [ith] element has been removed.
If i < 0 or i > len(l), it returns l *)
val popi : 'a list -> int -> 'a list
* [ pop eq l x ] returns the list [ l ] where the first element equal to [ x ] ( { i w.r.t } equality function [ eq ] ) is removed
val pop : ('a -> 'a -> bool) -> 'a list -> 'a -> 'a list
(** [pop eq l x] returns the list [l] where all elements equal to [x] ({i w.r.t} equality function [eq]) are removed *)
val popAll : ('a -> 'a -> bool) -> 'a list -> 'a -> 'a list
* [ list_to_string to_string l sep ] returns the string [ to_string l(0 ) ^ sep ^ ... ^ sep ^ to_string l(len(l)-1 ) ]
val list_to_string : ('a -> string) -> 'a list -> string -> string
* [ to_string l sep ] returns the string [ to_string l(0 ) ^ sep ^ ... ^ sep ^ to_string l(len(l)-1 ) ]
val array_to_string : ('a -> string) -> 'a array -> string -> string
(** [list_eq contains l1 l2] returns true if [l1] and [l2] contain the same elements.
[contains x l] returns [true] if x belongs to [l], [false] otherwise *)
val list_eq : ('a -> 'a list -> bool) -> 'a list -> 'a list -> bool
val list_eq2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
(** [range a b] returns the list of consecutive integers from [a] to [b]-1 *)
val range : int -> int -> int list
* [ cmp l ] returns the maximum element of [ l ] { i w.r.t } the comparison function [ cmp ]
@raise Invalid_argument if the input list is empty
@raise Invalid_argument if the input list is empty *)
val max : ('a -> 'a -> int) -> 'a list -> 'a
(** [maxi cmp l] returns the index of the maximum element of [l] {i w.r.t} the comparison function [cmp]
@raise Invalid_argument if the input list is empty *)
val maxi : ('a -> 'a -> int) -> 'a list -> int
(** [min cmp l] returns the minimum element of [l] {i w.r.t} the comparison function [cmp]
@raise Invalid_argument if the input list is empty *)
val min : ('a -> 'a -> int) -> 'a list -> 'a
(** [rem_dupl eq l] removes the multiple occurences of elements in [l], {i w.r.t} equality function [eq] *)
val rem_dupl : ('a -> 'a -> bool) -> 'a list -> 'a list
(** [intersection eq l1 l2] returns the list of elements that are both in [l1] and [l2], {i w.r.t} equality function [eq] *)
val intersection : ('a -> 'a -> bool) -> 'a list -> 'a list -> 'a list
(** [sublist l1 l2] returns [true] if [l1] is a sublist of [l2], [false] otherwise *)
val is_sublist : 'a list -> 'a list -> bool
val map2i : (int -> 'a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ filter_i f l i ] returns the [ i ] first elements of [ l ] that are true through [ f ]
val filter_i: ('a -> bool) -> 'a list -> int -> 'a list * 'a list
(** [string_repeat s i] returns a string composed of [i] times [s] *)
val string_repeat : string -> int -> string
(** [add_tab i s] adds [i] tabulations after each '\n' in [s]. *)
val add_tab : int -> string -> string
val fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b list -> 'a
val fold_right_i : (int -> 'a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val array_fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b array -> 'a
val string_equal : string -> string -> bool
* Builds a list from a length and an initialization function .
@param len the list length
@param f an initialization function : index [ i ] is initialized as [ f i ]
@param raise Invalid_argument if [ len ] < 1
@param len the list length
@param f an initialization function: index [i] is initialized as [f i]
@param raise Invalid_argument if [len] < 1 *)
val init_list : int -> (int -> 'a) -> 'a list
val subtract : 'a list -> 'a list -> 'a list
| null | https://raw.githubusercontent.com/VERIMAG-Polyhedra/VPL/cd78d6e7d120508fd5a694bdb01300477e5646f8/ocaml/misc/Misc.mli | ocaml | * This module defines several generic functions to handle lists or strings
* [sublist l i j] returns the sublist of [l] starting at index [i] and ending at index [j-1]
* [index s c] returns the index of the character [c] in the string [s] if it exists, -1 otherwise
* [substring s i] returns the substring of [s] starting at index [i]
* [popi l i] returns the list [l] where the [ith] element has been removed.
If i < 0 or i > len(l), it returns l
* [pop eq l x] returns the list [l] where all elements equal to [x] ({i w.r.t} equality function [eq]) are removed
* [list_eq contains l1 l2] returns true if [l1] and [l2] contain the same elements.
[contains x l] returns [true] if x belongs to [l], [false] otherwise
* [range a b] returns the list of consecutive integers from [a] to [b]-1
* [maxi cmp l] returns the index of the maximum element of [l] {i w.r.t} the comparison function [cmp]
@raise Invalid_argument if the input list is empty
* [min cmp l] returns the minimum element of [l] {i w.r.t} the comparison function [cmp]
@raise Invalid_argument if the input list is empty
* [rem_dupl eq l] removes the multiple occurences of elements in [l], {i w.r.t} equality function [eq]
* [intersection eq l1 l2] returns the list of elements that are both in [l1] and [l2], {i w.r.t} equality function [eq]
* [sublist l1 l2] returns [true] if [l1] is a sublist of [l2], [false] otherwise
* [string_repeat s i] returns a string composed of [i] times [s]
* [add_tab i s] adds [i] tabulations after each '\n' in [s]. |
val sublist : 'a list -> int -> int -> 'a list
val index : string -> char -> int
val substring : string -> int -> string
* [ findi p l ] returns the index of the first element in [ l ] that satisfies predicate [ p ] .
@raise Not_found if no such element exists
@raise Not_found if no such element exists *)
val findi : ('a -> bool) -> 'a list -> int
val array_findi : (int -> 'a -> bool) -> 'a array -> int
val find_res : ('a -> (bool * 'a)) -> 'a list -> 'a
val popi : 'a list -> int -> 'a list
* [ pop eq l x ] returns the list [ l ] where the first element equal to [ x ] ( { i w.r.t } equality function [ eq ] ) is removed
val pop : ('a -> 'a -> bool) -> 'a list -> 'a -> 'a list
val popAll : ('a -> 'a -> bool) -> 'a list -> 'a -> 'a list
* [ list_to_string to_string l sep ] returns the string [ to_string l(0 ) ^ sep ^ ... ^ sep ^ to_string l(len(l)-1 ) ]
val list_to_string : ('a -> string) -> 'a list -> string -> string
* [ to_string l sep ] returns the string [ to_string l(0 ) ^ sep ^ ... ^ sep ^ to_string l(len(l)-1 ) ]
val array_to_string : ('a -> string) -> 'a array -> string -> string
val list_eq : ('a -> 'a list -> bool) -> 'a list -> 'a list -> bool
val list_eq2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
val range : int -> int -> int list
* [ cmp l ] returns the maximum element of [ l ] { i w.r.t } the comparison function [ cmp ]
@raise Invalid_argument if the input list is empty
@raise Invalid_argument if the input list is empty *)
val max : ('a -> 'a -> int) -> 'a list -> 'a
val maxi : ('a -> 'a -> int) -> 'a list -> int
val min : ('a -> 'a -> int) -> 'a list -> 'a
val rem_dupl : ('a -> 'a -> bool) -> 'a list -> 'a list
val intersection : ('a -> 'a -> bool) -> 'a list -> 'a list -> 'a list
val is_sublist : 'a list -> 'a list -> bool
val map2i : (int -> 'a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ filter_i f l i ] returns the [ i ] first elements of [ l ] that are true through [ f ]
val filter_i: ('a -> bool) -> 'a list -> int -> 'a list * 'a list
val string_repeat : string -> int -> string
val add_tab : int -> string -> string
val fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b list -> 'a
val fold_right_i : (int -> 'a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val array_fold_left_i : (int -> 'a -> 'b -> 'a) -> 'a -> 'b array -> 'a
val string_equal : string -> string -> bool
* Builds a list from a length and an initialization function .
@param len the list length
@param f an initialization function : index [ i ] is initialized as [ f i ]
@param raise Invalid_argument if [ len ] < 1
@param len the list length
@param f an initialization function: index [i] is initialized as [f i]
@param raise Invalid_argument if [len] < 1 *)
val init_list : int -> (int -> 'a) -> 'a list
val subtract : 'a list -> 'a list -> 'a list
|
a6cf9985f23f3cb97395552c3884f05d0a0c5ef7abf9b64cb07393cea4732f16 | onedata/op-worker | trash_test_SUITE.erl | %%%-------------------------------------------------------------------
@author
( C ) 2020 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% Tests of trash.
%%% @end
%%%-------------------------------------------------------------------
-module(trash_test_SUITE).
-author("Jakub Kudzia").
-include("permissions_test.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("distribution_assert.hrl").
-include_lib("onenv_ct/include/oct_background.hrl").
-include_lib("ctool/include/errors.hrl").
-include_lib("ctool/include/http/headers.hrl").
-include_lib("ctool/include/http/codes.hrl").
-include_lib("ctool/include/test/assertions.hrl").
-include_lib("ctool/include/test/performance.hrl").
-include_lib("ctool/include/test/test_utils.hrl").
%% exported for CT
-export([all/0, init_per_suite/1, end_per_suite/1, init_per_testcase/2, end_per_testcase/2]).
%% tests
-export([
trash_dir_should_exist/1,
create_dir_with_trash_dir_name_is_forbidden/1,
create_file_with_trash_dir_name_is_forbidden/1,
remove_trash_dir_is_forbidden/1,
rename_trash_dir_is_forbidden/1,
rename_other_dir_to_trash_dir_is_forbidden/1,
chmod_on_trash_dir_is_forbidden/1,
set_xattr_on_trash_dir_is_forbidden/1,
remove_xattr_on_trash_dir_is_forbidden/1,
set_acl_on_trash_dir_is_forbidden/1,
remove_acl_on_trash_dir_is_forbidden/1,
set_metadata_on_trash_dir_is_forbidden/1,
set_cdmi_metadata_on_trash_dir_is_forbidden/1,
create_share_from_trash_dir_is_forbidden/1,
add_qos_entry_for_trash_dir_is_forbidden/1,
remove_metadata_on_trash_dir_is_forbidden/1,
schedule_replication_transfer_on_trash_dir_is_forbidden/1,
schedule_eviction_transfer_on_trash_dir_is_allowed/1,
schedule_migration_transfer_on_trash_dir_is_forbidden/1,
schedule_replication_transfer_on_space_does_not_replicate_trash/1,
schedule_eviction_transfer_on_space_evicts_trash/1,
schedule_migration_transfer_on_space_does_not_replicate_trash/1,
move_to_trash_should_work/1,
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms/1,
move_to_trash_should_fail_if_required_acl_perm_is_missing/1,
move_to_trash_and_schedule_deletion_should_work/1,
qos_set_on_file_does_not_affect_file_in_trash/1,
qos_set_on_parent_directory_does_not_affect_files_in_trash/1,
qos_set_on_space_directory_does_not_affect_files_in_trash/1,
files_from_trash_are_not_reimported/1,
deletion_lasting_for_4_days_should_succeed/1,
deletion_lasting_for_40_days_should_succeed/1,
deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time/1
]).
all() -> ?ALL([
trash_dir_should_exist,
create_dir_with_trash_dir_name_is_forbidden,
create_file_with_trash_dir_name_is_forbidden,
remove_trash_dir_is_forbidden,
rename_trash_dir_is_forbidden,
rename_other_dir_to_trash_dir_is_forbidden,
chmod_on_trash_dir_is_forbidden,
set_xattr_on_trash_dir_is_forbidden,
remove_xattr_on_trash_dir_is_forbidden,
set_acl_on_trash_dir_is_forbidden,
remove_acl_on_trash_dir_is_forbidden,
set_metadata_on_trash_dir_is_forbidden,
set_cdmi_metadata_on_trash_dir_is_forbidden,
create_share_from_trash_dir_is_forbidden,
add_qos_entry_for_trash_dir_is_forbidden,
remove_metadata_on_trash_dir_is_forbidden,
schedule_replication_transfer_on_trash_dir_is_forbidden,
schedule_eviction_transfer_on_trash_dir_is_allowed,
schedule_migration_transfer_on_trash_dir_is_forbidden,
schedule_replication_transfer_on_space_does_not_replicate_trash,
schedule_eviction_transfer_on_space_evicts_trash,
schedule_migration_transfer_on_space_does_not_replicate_trash,
move_to_trash_should_work,
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms,
move_to_trash_should_fail_if_required_acl_perm_is_missing,
move_to_trash_and_schedule_deletion_should_work,
qos_set_on_file_does_not_affect_file_in_trash,
qos_set_on_parent_directory_does_not_affect_files_in_trash,
qos_set_on_space_directory_does_not_affect_files_in_trash,
files_from_trash_are_not_reimported,
deletion_lasting_for_4_days_should_succeed,
deletion_lasting_for_40_days_should_succeed
TODO VFS-7348 this test should pass when deletion is scheduled as user not by root
% deletion_lasting_for_10_days_should_fail_if_session_is_not_refreshed_within_expected_time
]).
-define(SPACE1_PLACEHOLDER, space1).
-define(SPACE_ID1, oct_background:get_space_id(?SPACE1_PLACEHOLDER)).
-define(SPACE_NAME, oct_background:get_space_name(?SPACE1_PLACEHOLDER)).
-define(SPACE2_PLACEHOLDER, space2).
-define(SPACE_ID2, oct_background:get_space_id(?SPACE2_PLACEHOLDER)).
-define(SPACE_NAME2, oct_background:get_space_name(?SPACE2_PLACEHOLDER)).
-define(SPACE_UUID, ?SPACE_UUID(?SPACE_ID1)).
-define(SPACE_UUID(SpaceId), fslogic_file_id:spaceid_to_space_dir_uuid(SpaceId)).
-define(SPACE_GUID, ?SPACE_GUID(?SPACE_ID1)).
-define(SPACE_GUID(SpaceId), fslogic_file_id:spaceid_to_space_dir_guid(SpaceId)).
-define(TRASH_DIR_GUID(SpaceId), fslogic_file_id:spaceid_to_trash_dir_guid(SpaceId)).
-define(ATTEMPTS, 300).
-define(RAND_NAME(Prefix), <<Prefix/binary, (integer_to_binary(rand:uniform(1000)))/binary>>).
-define(RAND_DIR_NAME, ?RAND_NAME(<<"dir_">>)).
-define(RAND_FILE_NAME, ?RAND_NAME(<<"file_">>)).
%%%===================================================================
%%% Test functions
%%%===================================================================
trash_dir_should_exist(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
TODO VFS-7064 uncomment after introducing links to trash directory
%% % trash dir should be visible in the space on both providers
%% ?assertMatch({ok, [{_, ?TRASH_DIR_NAME}]},
lfm_proxy : , UserSessIdP1 , ? FILE_REF(?SPACE_GUID ) , 0 , 10 ) ) ,
%% ?assertMatch({ok, [{_, ?TRASH_DIR_NAME}]},
lfm_proxy : get_children(P2Node , UserSessIdP2 , ? FILE_REF(?SPACE_GUID ) , 0 , 10 ) ) ,
% trash dir should be empty
?assertMatch({ok, #file_attr{name = ?TRASH_DIR_NAME}},
lfm_proxy:stat(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({ok, #file_attr{name = ?TRASH_DIR_NAME}},
lfm_proxy:stat(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)).
create_dir_with_trash_dir_name_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TODO VFS-7064 change this error to EEXIST after adding link from space to trash directory
?assertMatch({error, ?EPERM},
lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, ?TRASH_DIR_NAME, ?DEFAULT_DIR_PERMS)).
create_file_with_trash_dir_name_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TODO VFS-7064 change this error to EEXIST after adding link from space to trash directory
?assertMatch({error, ?EPERM},
lfm_proxy:create(P1Node, UserSessIdP1, ?SPACE_GUID, ?TRASH_DIR_NAME, ?DEFAULT_FILE_PERMS)).
remove_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({error, ?EPERM},
lfm_proxy:unlink(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({error, ?EPERM},
lfm_proxy:unlink(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))).
rename_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TargetPath = filename:join([?DIRECTORY_SEPARATOR, ?SPACE_NAME, <<"other_trash_name">>]),
?assertMatch({error, ?EPERM},
lfm_proxy:mv(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), TargetPath)).
rename_other_dir_to_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
?assertMatch({error, ?EPERM},
lfm_proxy:mv(P1Node, UserSessIdP1, ?FILE_REF(DirGuid), filename:join([?SPACE_NAME, ?TRASH_DIR_NAME]))).
chmod_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_perms(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 8#777)).
set_xattr_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_xattr(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), #{<<"key">> => <<"value">>})).
remove_xattr_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:remove_xattr(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"key">>)).
set_acl_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_acl(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), [])).
remove_acl_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:remove_acl(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))).
set_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
JSON = #{<<"key">> => <<"value">>},
?assertMatch(?ERROR_POSIX(?EPERM),
opt_file_metadata:set_custom_metadata(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), json, JSON, [])).
set_cdmi_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_mimetype(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"mimetype">>)),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_cdmi_completion_status(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"COMPLETED">>)),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_transfer_encoding(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"base64">>)).
create_share_from_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_shares:create(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"MY SHARE">>)).
add_qos_entry_for_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_qos:add_qos_entry(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"key=value">>, 1)).
remove_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_file_metadata:remove_custom_metadata(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), json)).
schedule_replication_transfer_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P2Id = oct_background:get_provider_id(paris),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_transfers:schedule_file_replication(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P2Id)).
schedule_eviction_transfer_on_trash_dir_is_allowed(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P1Id = oct_background:get_provider_id(krakow),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P1Id, undefined)),
?assertMatch({ok, #document{value = #transfer{eviction_status = completed}}},
rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
schedule_migration_transfer_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P1Id, P2Id)).
schedule_replication_transfer_on_space_does_not_replicate_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
% create file and directory
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}], DirGuid),
% move subtree to trash
ok = lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)),
% wait till moving directory to trash is synchronized
?assertMatch({ok, [{DirGuid, _}]},
lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
P2Id = oct_background:get_provider_id(paris),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replication(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P2Id)),
?assertMatch({ok, #document{value = #transfer{
replication_status = completed,
files_replicated = 0
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
schedule_eviction_transfer_on_space_evicts_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
FileName = ?RAND_FILE_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
% create file and directory
TestData = <<"test data">>,
Size = byte_size(TestData),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{ok, {FileGuid, H}} =
?assertMatch({ok, _}, lfm_proxy:create_and_open(P1Node, UserSessIdP1, DirGuid, FileName, ?DEFAULT_FILE_PERMS), ?ATTEMPTS),
?assertMatch({ok, _}, lfm_proxy:write(P1Node, H, 0, TestData), ?ATTEMPTS),
lfm_proxy:close(P1Node, H),
read file on P2 to replicate it
{ok, H2} =
?assertMatch({ok, _}, lfm_proxy:open(P2Node, UserSessIdP2, ?FILE_REF(FileGuid), read), ?ATTEMPTS),
?assertEqual(Size, try
{ok, Bytes} = lfm_proxy:read(P2Node, H2, 0, Size),
byte_size(Bytes)
catch
_:_ ->
error
end, ?ATTEMPTS),
lfm_proxy:close(P2Node, H2),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size, Size]), FileGuid, ?ATTEMPTS),
% Ensure that evicting provider has knowledge of remote provider blocks (through dbsync),
% as otherwise it will skip eviction.
@TODO VFS - VFS-9498 not needed after replica_deletion uses fetched file location instead of dbsynced
?assertEqual({ok, [[0, Size]]},
opt_file_metadata:get_local_knowledge_of_remote_provider_blocks(P1Node, FileGuid, P2Id), ?ATTEMPTS),
% evict whole space
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P1Id, undefined)),
?assertMatch({ok, #document{value = #transfer{
eviction_status = completed,
files_evicted = 1
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [0, Size]), FileGuid, ?ATTEMPTS).
schedule_migration_transfer_on_space_does_not_replicate_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
% create file and directory
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{0, 10}], DirGuid),
% move subtree to trash
DirCtx = file_ctx:new_by_guid(DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
% wait till moving directory to trash is synchronized
?assertMatch({ok, [{DirGuid, _}]},
lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P1Id, P2Id)),
?assertMatch({ok, #document{value = #transfer{
replication_status = completed,
eviction_status = completed,
files_replicated = 0,
files_evicted = 0
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
move_to_trash_should_work(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
DirCtx = file_ctx:new_by_guid(DirGuid),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
lfm_test_utils:assert_space_dir_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_dir_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, [{DirGuid, _}]}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)),
?assertMatch({ok, [{DirGuid, _}]}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
StorageFileId = filename:join(["/", DirName]),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
% file registration should fail because there is a deletion marker added for the file
% which prevents file to be imported
?assertMatch({ok, ?HTTP_400_BAD_REQUEST, _, _}, register_file(P1Node, user1, #{
<<"spaceId">> => ?SPACE_ID1,
<<"destinationPath">> => DirName,
<<"storageFileId">> => StorageFileId,
<<"storageId">> => StorageId,
<<"mtime">> => global_clock:timestamp_seconds(),
<<"size">> => 10,
<<"mode">> => <<"664">>,
<<"autoDetectAttributes">> => false
})).
move_to_trash_and_schedule_deletion_should_work(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{DirGuids, FileGuids} = lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
DirCtx = file_ctx:new_by_guid(DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
schedule_deletion_from_trash(P1Node, DirCtx, UserSessIdP1, ?SPACE_UUID, DirName),
lfm_test_utils:assert_space_and_trash_are_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_and_trash_are_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
lists:foreach(fun(G) ->
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P1Node, UserSessIdP1, ?FILE_REF(G)), ?ATTEMPTS),
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P2Node, UserSessIdP2, ?FILE_REF(G)), ?ATTEMPTS)
end, DirGuids ++ FileGuids ++ [DirGuid]),
StorageFileId = filename:join([?DIRECTORY_SEPARATOR, DirName]),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
Size = 10,
% file registration should succeed because the file has already been deleted
?assertMatch({ok, ?HTTP_201_CREATED, _, _}, register_file(P1Node, user1, #{
<<"spaceId">> => ?SPACE_ID1,
<<"destinationPath">> => DirName,
<<"storageFileId">> => StorageFileId,
<<"storageId">> => StorageId,
<<"mtime">> => global_clock:timestamp_seconds(),
<<"size">> => Size,
<<"mode">> => <<"664">>,
<<"autoDetectAttributes">> => false
})).
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
perform test as as he 's not a space owner
UserSessIdP1 = oct_background:get_user_session_id(user2, krakow),
InsufficientPerms = [8#600, 8#500, 8#400],
lists:foreach(fun(Perms) ->
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, Perms),
?assertMatch({error, ?EACCES}, lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)))
end, InsufficientPerms).
move_to_trash_should_fail_if_required_acl_perm_is_missing(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
perform test as as he 's not a space owner
UserSessIdP1 = oct_background:get_user_session_id(user2, krakow),
? and ? delete_object are mapped to the same bitmask so we have to remove both of them
% to test whether operatio will fail without these perms
RequiredPerms = [?delete, ?list_container, ?traverse_container, [?delete_subcontainer, ?delete_object]],
lists:foreach(fun(RequiredPerm) ->
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
Perms = ?ALL_DIR_PERMS -- utils:ensure_list(RequiredPerm),
ok = lfm_proxy:set_acl(P1Node, UserSessIdP1, ?FILE_REF(DirGuid), [perms_to_allow_ace(Perms)]),
?assertMatch({error, ?EACCES}, lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)))
end, RequiredPerms).
qos_set_on_file_does_not_affect_file_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, file).
qos_set_on_parent_directory_does_not_affect_files_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, parent_dir).
qos_set_on_space_directory_does_not_affect_files_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, space_dir).
files_from_trash_are_not_reimported(_Config) ->
this test is performed in ? SPACE2 which is supported by ImportedNullStorage2
% on which legacy dataset is simulated with
structure 1 - 0:10 - 10 ( 1 root directory with 10 subdirectories and 10 files )
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
ensure that 1st scan has been finished
?assertEqual(true, rpc:call(P1Node, storage_import_monitoring, is_initial_scan_finished, [?SPACE_ID2]), ?ATTEMPTS),
{ok, [{DirGuid, _}]} = lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID(?SPACE_ID2)), 0, 1000),
DirCtx = file_ctx:new_by_guid(DirGuid),
% move imported directory to trash
move_to_trash(P1Node, DirCtx, UserSessIdP1),
% start scan and wait till it's finished
ok = rpc:call(P1Node, storage_import, start_auto_scan, [?SPACE_ID2]),
?assertEqual(true, rpc:call(P1Node, storage_import_monitoring, is_scan_finished, [?SPACE_ID2, 2]), ?ATTEMPTS),
% files which are currently in trash shouldn't have been reimported
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID(?SPACE_ID2)), 0, 1000)).
deletion_lasting_for_4_days_should_succeed(Config) ->
4 days
long_lasting_deletion_test_base(Config, 1, TimeWarp, 0, success).
deletion_lasting_for_40_days_should_succeed(Config) ->
This test simulates 20 time warps , each of them warps 2 day forward
Interval between simulating time warps is 30 seconds .
TimeWarpsCount = 20,
2 days
Interval = 30,
deletion from trash will last for ( simulated ) 40 days
long_lasting_deletion_test_base(Config, TimeWarpsCount, TimeWarp, Interval, success).
deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time(Config) ->
This test simulates a 40 day time warp which will result in failed refresh of offline session
( offline sessions are valid for a month )
40 days
long_lasting_deletion_test_base(Config, 1, TimeWarp, 1, failure).
%===================================================================
% Test base functions
%===================================================================
qos_does_not_affect_files_in_trash_test_base(_Config, SetQosOn) ->
% this test creates the following structure in the space directory:
% /space_dir/parent_dir/file
It adds QoS entry for file determined by SetQosOn parameter
% and checks whether file which is in trash is not replicated by QoS.
% Parameter SetQosOn can have the following values:
% - space_dir
% - parent_dir
% - file
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
ok = rpc:call(P1Node, storage_logic, set_qos_parameters, [StorageId, #{<<"key">> => <<"value">>}]),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
DirName = ?RAND_DIR_NAME,
FileName = ?RAND_FILE_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P2Node, UserSessIdP2, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
DirCtx = file_ctx:new_by_guid(DirGuid),
{ok, {FileGuid, H1}} = lfm_proxy:create_and_open(P2Node, UserSessIdP2, DirGuid, FileName, ?DEFAULT_FILE_PERMS),
TestData1 = <<"first part ">>,
TestData2 = <<"seconds part">>,
Size1 = byte_size(TestData1),
Size2 = Size1 + byte_size(TestData2),
{ok, _} = lfm_proxy:write(P2Node, H1, 0, TestData1),
lfm_proxy:fsync(P2Node, H1),
GuidWithQos = case SetQosOn of
space_dir -> ?SPACE_GUID;
parent_dir -> DirGuid;
file -> FileGuid
end,
{ok, QosEntryId} = ?assertMatch(
{ok, _},
opt_qos:add_qos_entry(P1Node, UserSessIdP1, ?FILE_REF(GuidWithQos), <<"key=value">>, 1),
?ATTEMPTS
),
check whether QoS synchronized the file
?assertMatch({ok, {#{QosEntryId := fulfilled}, _}},
opt_qos:get_effective_file_qos(P1Node, UserSessIdP1, ?FILE_REF(GuidWithQos)), ?ATTEMPTS),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size1, Size1]), FileGuid, ?ATTEMPTS),
% move the file to trash
move_to_trash(P1Node, DirCtx, UserSessIdP1),
% write new blocks to file which is in trash
{ok, _} = lfm_proxy:write(P2Node, H1, Size1, TestData2),
lfm_proxy:close(P2Node, H1),
% file shouldn't have been synchronized because it's in trash
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size1, Size2]), FileGuid, ?ATTEMPTS).
long_lasting_deletion_test_base(_Config, TimeWarpsCount,
TimeWarpPeriod, TimeWarpInterval, ExpectedResult
) ->
% this test moves directory to trash, schedules its deletion and simulates that
% as many as TimeWarpsCount time warps occurred
% each of them warps TimeWarpPeriod in time
% interval (in real life) between simulating time warps is equal to TimeWarpInterval
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{DirGuids, FileGuids} = lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
DirCtx = file_ctx:new_by_guid(DirGuid),
mock_traverse_finished(P1Node, self()),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
{ok, TaskId} = schedule_deletion_from_trash(P1Node, DirCtx, UserSessIdP1, ?SPACE_UUID, DirName),
lists:foreach(fun(_) ->
% simulate that a TimeWarpPeriod time warp occurred during deletion from trash
time_test_utils:simulate_seconds_passing(TimeWarpPeriod),
timer:sleep(timer:seconds(TimeWarpInterval))
end, lists:seq(1, TimeWarpsCount)),
await_traverse_finished(TaskId, 600),
case ExpectedResult of
success ->
use ? ROOT_SESS_ID in below assert as normal sessions may have expired
lfm_test_utils:assert_space_and_trash_are_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_and_trash_are_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, ?ROOT_SESS_ID, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, ?ROOT_SESS_ID, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
lists:foreach(fun(G) ->
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P1Node, ?ROOT_SESS_ID, ?FILE_REF(G)), ?ATTEMPTS),
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P2Node, ?ROOT_SESS_ID, ?FILE_REF(G)), ?ATTEMPTS)
end, DirGuids ++ FileGuids ++ [DirGuid]);
failure ->
% failure was expected so there should be files which weren't deleted
AllFilesNum = length([DirGuid | DirGuids] ++ FileGuids),
DeletedFilesNum = lists:foldl(fun(Guid, Acc) ->
case lfm_proxy:stat(P1Node, ?ROOT_SESS_ID, ?FILE_REF(Guid)) of
{ok, _} -> Acc;
{error, ?ENOENT} -> Acc + 1
end
end, 0, [DirGuid | DirGuids] ++ FileGuids),
?assertNotEqual(AllFilesNum, DeletedFilesNum)
end.
%===================================================================
SetUp and TearDown functions
%===================================================================
init_per_suite(Config) ->
oct_background:init_per_suite([{?LOAD_MODULES, [dir_stats_test_utils]} | Config],
#onenv_test_config{
onenv_scenario = "2op-manual-import",
posthook = fun dir_stats_test_utils:disable_stats_counting_ct_posthook/1
}).
end_per_suite(Config) ->
oct_background:end_per_suite(),
dir_stats_test_utils:enable_stats_counting(Config).
init_per_testcase(Case, Config) when
Case =:= deletion_lasting_for_4_days_should_succeed orelse
Case =:= deletion_lasting_for_40_days_should_succeed orelse
Case =:= deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time
->
time_test_utils:freeze_time(Config),
init_per_testcase(default, Config);
init_per_testcase(_Case, Config) ->
% update background config to update sessions
Config2 = oct_background:update_background_config(Config),
lfm_proxy:init(Config2).
end_per_testcase(_Case, Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
AllNodes = [P1Node, P2Node],
time_test_utils:unfreeze_time(Config),
lfm_test_utils:clean_space(P1Node, AllNodes, ?SPACE_ID1, ?ATTEMPTS),
lfm_proxy:teardown(Config).
%%%===================================================================
Internal functions
%%%===================================================================
move_to_trash(Worker, FileCtx, SessId) ->
UserCtx = rpc:call(Worker, user_ctx, new, [SessId]),
rpc:call(Worker, trash, move_to_trash, [FileCtx, UserCtx]).
schedule_deletion_from_trash(Worker, FileCtx, SessId, RootOriginalParentUuid, DirName) ->
UserCtx = rpc:call(Worker, user_ctx, new, [SessId]),
rpc:call(Worker, trash, schedule_deletion_from_trash, [FileCtx, UserCtx, false, RootOriginalParentUuid, DirName]).
register_file(Worker, User, Body) ->
Headers = #{
?HDR_X_AUTH_TOKEN => oct_background:get_user_access_token(User),
?HDR_CONTENT_TYPE => <<"application/json">>
},
rest_test_utils:request(Worker, <<"data/register">>, post, Headers, json_utils:encode(Body)).
perms_to_allow_ace(Perms) ->
?ALLOW_ACE(?owner, ?no_flags_mask, permissions_test_utils:perms_to_bitmask(Perms)).
mock_traverse_finished(Worker, TestProcess) ->
ok = test_utils:mock_new(Worker, tree_deletion_traverse),
ok = test_utils:mock_expect(Worker, tree_deletion_traverse, task_finished, fun(TaskId, Pool) ->
Result = meck:passthrough([TaskId, Pool]),
TestProcess ! {traverse_finished, TaskId},
Result
end),
ok = test_utils:mock_expect(Worker, tree_deletion_traverse, task_canceled, fun(TaskId, Pool) ->
Result = meck:passthrough([TaskId, Pool]),
TestProcess ! {traverse_finished, TaskId},
Result
end).
await_traverse_finished(TaskId, Attempts) ->
receive {traverse_finished, TaskId} -> ok
after
timer:seconds(Attempts) ->
ct:fail("Traverse ~s not finished in expected time", [TaskId])
end. | null | https://raw.githubusercontent.com/onedata/op-worker/d6d6aeaaf95da7383f3988e4101e2bcc4fb59535/test_distributed/suites/trash/trash_test_SUITE.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
Tests of trash.
@end
-------------------------------------------------------------------
exported for CT
tests
deletion_lasting_for_10_days_should_fail_if_session_is_not_refreshed_within_expected_time
===================================================================
Test functions
===================================================================
% trash dir should be visible in the space on both providers
?assertMatch({ok, [{_, ?TRASH_DIR_NAME}]},
?assertMatch({ok, [{_, ?TRASH_DIR_NAME}]},
trash dir should be empty
create file and directory
move subtree to trash
wait till moving directory to trash is synchronized
create file and directory
Ensure that evicting provider has knowledge of remote provider blocks (through dbsync),
as otherwise it will skip eviction.
evict whole space
create file and directory
move subtree to trash
wait till moving directory to trash is synchronized
file registration should fail because there is a deletion marker added for the file
which prevents file to be imported
file registration should succeed because the file has already been deleted
to test whether operatio will fail without these perms
on which legacy dataset is simulated with
move imported directory to trash
start scan and wait till it's finished
files which are currently in trash shouldn't have been reimported
===================================================================
Test base functions
===================================================================
this test creates the following structure in the space directory:
/space_dir/parent_dir/file
and checks whether file which is in trash is not replicated by QoS.
Parameter SetQosOn can have the following values:
- space_dir
- parent_dir
- file
move the file to trash
write new blocks to file which is in trash
file shouldn't have been synchronized because it's in trash
this test moves directory to trash, schedules its deletion and simulates that
as many as TimeWarpsCount time warps occurred
each of them warps TimeWarpPeriod in time
interval (in real life) between simulating time warps is equal to TimeWarpInterval
simulate that a TimeWarpPeriod time warp occurred during deletion from trash
failure was expected so there should be files which weren't deleted
===================================================================
===================================================================
update background config to update sessions
===================================================================
=================================================================== | @author
( C ) 2020 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(trash_test_SUITE).
-author("Jakub Kudzia").
-include("permissions_test.hrl").
-include("modules/fslogic/fslogic_common.hrl").
-include("distribution_assert.hrl").
-include_lib("onenv_ct/include/oct_background.hrl").
-include_lib("ctool/include/errors.hrl").
-include_lib("ctool/include/http/headers.hrl").
-include_lib("ctool/include/http/codes.hrl").
-include_lib("ctool/include/test/assertions.hrl").
-include_lib("ctool/include/test/performance.hrl").
-include_lib("ctool/include/test/test_utils.hrl").
-export([all/0, init_per_suite/1, end_per_suite/1, init_per_testcase/2, end_per_testcase/2]).
-export([
trash_dir_should_exist/1,
create_dir_with_trash_dir_name_is_forbidden/1,
create_file_with_trash_dir_name_is_forbidden/1,
remove_trash_dir_is_forbidden/1,
rename_trash_dir_is_forbidden/1,
rename_other_dir_to_trash_dir_is_forbidden/1,
chmod_on_trash_dir_is_forbidden/1,
set_xattr_on_trash_dir_is_forbidden/1,
remove_xattr_on_trash_dir_is_forbidden/1,
set_acl_on_trash_dir_is_forbidden/1,
remove_acl_on_trash_dir_is_forbidden/1,
set_metadata_on_trash_dir_is_forbidden/1,
set_cdmi_metadata_on_trash_dir_is_forbidden/1,
create_share_from_trash_dir_is_forbidden/1,
add_qos_entry_for_trash_dir_is_forbidden/1,
remove_metadata_on_trash_dir_is_forbidden/1,
schedule_replication_transfer_on_trash_dir_is_forbidden/1,
schedule_eviction_transfer_on_trash_dir_is_allowed/1,
schedule_migration_transfer_on_trash_dir_is_forbidden/1,
schedule_replication_transfer_on_space_does_not_replicate_trash/1,
schedule_eviction_transfer_on_space_evicts_trash/1,
schedule_migration_transfer_on_space_does_not_replicate_trash/1,
move_to_trash_should_work/1,
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms/1,
move_to_trash_should_fail_if_required_acl_perm_is_missing/1,
move_to_trash_and_schedule_deletion_should_work/1,
qos_set_on_file_does_not_affect_file_in_trash/1,
qos_set_on_parent_directory_does_not_affect_files_in_trash/1,
qos_set_on_space_directory_does_not_affect_files_in_trash/1,
files_from_trash_are_not_reimported/1,
deletion_lasting_for_4_days_should_succeed/1,
deletion_lasting_for_40_days_should_succeed/1,
deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time/1
]).
all() -> ?ALL([
trash_dir_should_exist,
create_dir_with_trash_dir_name_is_forbidden,
create_file_with_trash_dir_name_is_forbidden,
remove_trash_dir_is_forbidden,
rename_trash_dir_is_forbidden,
rename_other_dir_to_trash_dir_is_forbidden,
chmod_on_trash_dir_is_forbidden,
set_xattr_on_trash_dir_is_forbidden,
remove_xattr_on_trash_dir_is_forbidden,
set_acl_on_trash_dir_is_forbidden,
remove_acl_on_trash_dir_is_forbidden,
set_metadata_on_trash_dir_is_forbidden,
set_cdmi_metadata_on_trash_dir_is_forbidden,
create_share_from_trash_dir_is_forbidden,
add_qos_entry_for_trash_dir_is_forbidden,
remove_metadata_on_trash_dir_is_forbidden,
schedule_replication_transfer_on_trash_dir_is_forbidden,
schedule_eviction_transfer_on_trash_dir_is_allowed,
schedule_migration_transfer_on_trash_dir_is_forbidden,
schedule_replication_transfer_on_space_does_not_replicate_trash,
schedule_eviction_transfer_on_space_evicts_trash,
schedule_migration_transfer_on_space_does_not_replicate_trash,
move_to_trash_should_work,
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms,
move_to_trash_should_fail_if_required_acl_perm_is_missing,
move_to_trash_and_schedule_deletion_should_work,
qos_set_on_file_does_not_affect_file_in_trash,
qos_set_on_parent_directory_does_not_affect_files_in_trash,
qos_set_on_space_directory_does_not_affect_files_in_trash,
files_from_trash_are_not_reimported,
deletion_lasting_for_4_days_should_succeed,
deletion_lasting_for_40_days_should_succeed
TODO VFS-7348 this test should pass when deletion is scheduled as user not by root
]).
-define(SPACE1_PLACEHOLDER, space1).
-define(SPACE_ID1, oct_background:get_space_id(?SPACE1_PLACEHOLDER)).
-define(SPACE_NAME, oct_background:get_space_name(?SPACE1_PLACEHOLDER)).
-define(SPACE2_PLACEHOLDER, space2).
-define(SPACE_ID2, oct_background:get_space_id(?SPACE2_PLACEHOLDER)).
-define(SPACE_NAME2, oct_background:get_space_name(?SPACE2_PLACEHOLDER)).
-define(SPACE_UUID, ?SPACE_UUID(?SPACE_ID1)).
-define(SPACE_UUID(SpaceId), fslogic_file_id:spaceid_to_space_dir_uuid(SpaceId)).
-define(SPACE_GUID, ?SPACE_GUID(?SPACE_ID1)).
-define(SPACE_GUID(SpaceId), fslogic_file_id:spaceid_to_space_dir_guid(SpaceId)).
-define(TRASH_DIR_GUID(SpaceId), fslogic_file_id:spaceid_to_trash_dir_guid(SpaceId)).
-define(ATTEMPTS, 300).
-define(RAND_NAME(Prefix), <<Prefix/binary, (integer_to_binary(rand:uniform(1000)))/binary>>).
-define(RAND_DIR_NAME, ?RAND_NAME(<<"dir_">>)).
-define(RAND_FILE_NAME, ?RAND_NAME(<<"file_">>)).
trash_dir_should_exist(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
TODO VFS-7064 uncomment after introducing links to trash directory
lfm_proxy : , UserSessIdP1 , ? FILE_REF(?SPACE_GUID ) , 0 , 10 ) ) ,
lfm_proxy : get_children(P2Node , UserSessIdP2 , ? FILE_REF(?SPACE_GUID ) , 0 , 10 ) ) ,
?assertMatch({ok, #file_attr{name = ?TRASH_DIR_NAME}},
lfm_proxy:stat(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({ok, #file_attr{name = ?TRASH_DIR_NAME}},
lfm_proxy:stat(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)).
create_dir_with_trash_dir_name_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TODO VFS-7064 change this error to EEXIST after adding link from space to trash directory
?assertMatch({error, ?EPERM},
lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, ?TRASH_DIR_NAME, ?DEFAULT_DIR_PERMS)).
create_file_with_trash_dir_name_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TODO VFS-7064 change this error to EEXIST after adding link from space to trash directory
?assertMatch({error, ?EPERM},
lfm_proxy:create(P1Node, UserSessIdP1, ?SPACE_GUID, ?TRASH_DIR_NAME, ?DEFAULT_FILE_PERMS)).
remove_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({error, ?EPERM},
lfm_proxy:unlink(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))),
?assertMatch({error, ?EPERM},
lfm_proxy:unlink(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))).
rename_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
TargetPath = filename:join([?DIRECTORY_SEPARATOR, ?SPACE_NAME, <<"other_trash_name">>]),
?assertMatch({error, ?EPERM},
lfm_proxy:mv(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), TargetPath)).
rename_other_dir_to_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
?assertMatch({error, ?EPERM},
lfm_proxy:mv(P1Node, UserSessIdP1, ?FILE_REF(DirGuid), filename:join([?SPACE_NAME, ?TRASH_DIR_NAME]))).
chmod_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_perms(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 8#777)).
set_xattr_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_xattr(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), #{<<"key">> => <<"value">>})).
remove_xattr_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:remove_xattr(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"key">>)).
set_acl_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:set_acl(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), [])).
remove_acl_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch({error, ?EPERM},
lfm_proxy:remove_acl(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)))).
set_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
JSON = #{<<"key">> => <<"value">>},
?assertMatch(?ERROR_POSIX(?EPERM),
opt_file_metadata:set_custom_metadata(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), json, JSON, [])).
set_cdmi_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_mimetype(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"mimetype">>)),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_cdmi_completion_status(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"COMPLETED">>)),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_cdmi:set_transfer_encoding(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"base64">>)).
create_share_from_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_shares:create(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"MY SHARE">>)).
add_qos_entry_for_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_qos:add_qos_entry(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), <<"key=value">>, 1)).
remove_metadata_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_file_metadata:remove_custom_metadata(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), json)).
schedule_replication_transfer_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P2Id = oct_background:get_provider_id(paris),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_transfers:schedule_file_replication(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P2Id)).
schedule_eviction_transfer_on_trash_dir_is_allowed(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P1Id = oct_background:get_provider_id(krakow),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P1Id, undefined)),
?assertMatch({ok, #document{value = #transfer{eviction_status = completed}}},
rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
schedule_migration_transfer_on_trash_dir_is_forbidden(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
?assertMatch(?ERROR_POSIX(?EPERM),
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), P1Id, P2Id)).
schedule_replication_transfer_on_space_does_not_replicate_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}], DirGuid),
ok = lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)),
?assertMatch({ok, [{DirGuid, _}]},
lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
P2Id = oct_background:get_provider_id(paris),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replication(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P2Id)),
?assertMatch({ok, #document{value = #transfer{
replication_status = completed,
files_replicated = 0
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
schedule_eviction_transfer_on_space_evicts_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
FileName = ?RAND_FILE_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
TestData = <<"test data">>,
Size = byte_size(TestData),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{ok, {FileGuid, H}} =
?assertMatch({ok, _}, lfm_proxy:create_and_open(P1Node, UserSessIdP1, DirGuid, FileName, ?DEFAULT_FILE_PERMS), ?ATTEMPTS),
?assertMatch({ok, _}, lfm_proxy:write(P1Node, H, 0, TestData), ?ATTEMPTS),
lfm_proxy:close(P1Node, H),
read file on P2 to replicate it
{ok, H2} =
?assertMatch({ok, _}, lfm_proxy:open(P2Node, UserSessIdP2, ?FILE_REF(FileGuid), read), ?ATTEMPTS),
?assertEqual(Size, try
{ok, Bytes} = lfm_proxy:read(P2Node, H2, 0, Size),
byte_size(Bytes)
catch
_:_ ->
error
end, ?ATTEMPTS),
lfm_proxy:close(P2Node, H2),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size, Size]), FileGuid, ?ATTEMPTS),
@TODO VFS - VFS-9498 not needed after replica_deletion uses fetched file location instead of dbsynced
?assertEqual({ok, [[0, Size]]},
opt_file_metadata:get_local_knowledge_of_remote_provider_blocks(P1Node, FileGuid, P2Id), ?ATTEMPTS),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P1Id, undefined)),
?assertMatch({ok, #document{value = #transfer{
eviction_status = completed,
files_evicted = 1
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [0, Size]), FileGuid, ?ATTEMPTS).
schedule_migration_transfer_on_space_does_not_replicate_trash(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{0, 10}], DirGuid),
DirCtx = file_ctx:new_by_guid(DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
?assertMatch({ok, [{DirGuid, _}]},
lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
{ok, TransferId} = ?assertMatch({ok, _},
opt_transfers:schedule_file_replica_eviction(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID), P1Id, P2Id)),
?assertMatch({ok, #document{value = #transfer{
replication_status = completed,
eviction_status = completed,
files_replicated = 0,
files_evicted = 0
}}}, rpc:call(P1Node, transfer, get, [TransferId]), ?ATTEMPTS).
move_to_trash_should_work(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
DirCtx = file_ctx:new_by_guid(DirGuid),
lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
lfm_test_utils:assert_space_dir_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_dir_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, [{DirGuid, _}]}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10)),
?assertMatch({ok, [{DirGuid, _}]}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
StorageFileId = filename:join(["/", DirName]),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
?assertMatch({ok, ?HTTP_400_BAD_REQUEST, _, _}, register_file(P1Node, user1, #{
<<"spaceId">> => ?SPACE_ID1,
<<"destinationPath">> => DirName,
<<"storageFileId">> => StorageFileId,
<<"storageId">> => StorageId,
<<"mtime">> => global_clock:timestamp_seconds(),
<<"size">> => 10,
<<"mode">> => <<"664">>,
<<"autoDetectAttributes">> => false
})).
move_to_trash_and_schedule_deletion_should_work(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{DirGuids, FileGuids} = lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
DirCtx = file_ctx:new_by_guid(DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
schedule_deletion_from_trash(P1Node, DirCtx, UserSessIdP1, ?SPACE_UUID, DirName),
lfm_test_utils:assert_space_and_trash_are_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_and_trash_are_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, UserSessIdP2, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
lists:foreach(fun(G) ->
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P1Node, UserSessIdP1, ?FILE_REF(G)), ?ATTEMPTS),
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P2Node, UserSessIdP2, ?FILE_REF(G)), ?ATTEMPTS)
end, DirGuids ++ FileGuids ++ [DirGuid]),
StorageFileId = filename:join([?DIRECTORY_SEPARATOR, DirName]),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
Size = 10,
?assertMatch({ok, ?HTTP_201_CREATED, _, _}, register_file(P1Node, user1, #{
<<"spaceId">> => ?SPACE_ID1,
<<"destinationPath">> => DirName,
<<"storageFileId">> => StorageFileId,
<<"storageId">> => StorageId,
<<"mtime">> => global_clock:timestamp_seconds(),
<<"size">> => Size,
<<"mode">> => <<"664">>,
<<"autoDetectAttributes">> => false
})).
move_to_trash_should_fail_if_user_does_not_have_sufficient_perms(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
perform test as as he 's not a space owner
UserSessIdP1 = oct_background:get_user_session_id(user2, krakow),
InsufficientPerms = [8#600, 8#500, 8#400],
lists:foreach(fun(Perms) ->
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, Perms),
?assertMatch({error, ?EACCES}, lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)))
end, InsufficientPerms).
move_to_trash_should_fail_if_required_acl_perm_is_missing(_Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
perform test as as he 's not a space owner
UserSessIdP1 = oct_background:get_user_session_id(user2, krakow),
? and ? delete_object are mapped to the same bitmask so we have to remove both of them
RequiredPerms = [?delete, ?list_container, ?traverse_container, [?delete_subcontainer, ?delete_object]],
lists:foreach(fun(RequiredPerm) ->
DirName = ?RAND_DIR_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
Perms = ?ALL_DIR_PERMS -- utils:ensure_list(RequiredPerm),
ok = lfm_proxy:set_acl(P1Node, UserSessIdP1, ?FILE_REF(DirGuid), [perms_to_allow_ace(Perms)]),
?assertMatch({error, ?EACCES}, lfm_proxy:rm_recursive(P1Node, UserSessIdP1, ?FILE_REF(DirGuid)))
end, RequiredPerms).
qos_set_on_file_does_not_affect_file_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, file).
qos_set_on_parent_directory_does_not_affect_files_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, parent_dir).
qos_set_on_space_directory_does_not_affect_files_in_trash(Config) ->
qos_does_not_affect_files_in_trash_test_base(Config, space_dir).
files_from_trash_are_not_reimported(_Config) ->
this test is performed in ? SPACE2 which is supported by ImportedNullStorage2
structure 1 - 0:10 - 10 ( 1 root directory with 10 subdirectories and 10 files )
[P1Node] = oct_background:get_provider_nodes(krakow),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
ensure that 1st scan has been finished
?assertEqual(true, rpc:call(P1Node, storage_import_monitoring, is_initial_scan_finished, [?SPACE_ID2]), ?ATTEMPTS),
{ok, [{DirGuid, _}]} = lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID(?SPACE_ID2)), 0, 1000),
DirCtx = file_ctx:new_by_guid(DirGuid),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
ok = rpc:call(P1Node, storage_import, start_auto_scan, [?SPACE_ID2]),
?assertEqual(true, rpc:call(P1Node, storage_import_monitoring, is_scan_finished, [?SPACE_ID2, 2]), ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, UserSessIdP1, ?FILE_REF(?SPACE_GUID(?SPACE_ID2)), 0, 1000)).
deletion_lasting_for_4_days_should_succeed(Config) ->
4 days
long_lasting_deletion_test_base(Config, 1, TimeWarp, 0, success).
deletion_lasting_for_40_days_should_succeed(Config) ->
This test simulates 20 time warps , each of them warps 2 day forward
Interval between simulating time warps is 30 seconds .
TimeWarpsCount = 20,
2 days
Interval = 30,
deletion from trash will last for ( simulated ) 40 days
long_lasting_deletion_test_base(Config, TimeWarpsCount, TimeWarp, Interval, success).
deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time(Config) ->
This test simulates a 40 day time warp which will result in failed refresh of offline session
( offline sessions are valid for a month )
40 days
long_lasting_deletion_test_base(Config, 1, TimeWarp, 1, failure).
qos_does_not_affect_files_in_trash_test_base(_Config, SetQosOn) ->
It adds QoS entry for file determined by SetQosOn parameter
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
[StorageId] = opw_test_rpc:get_space_local_storages(P1Node, ?SPACE_ID1),
ok = rpc:call(P1Node, storage_logic, set_qos_parameters, [StorageId, #{<<"key">> => <<"value">>}]),
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
UserSessIdP2 = oct_background:get_user_session_id(user1, paris),
P1Id = oct_background:get_provider_id(krakow),
P2Id = oct_background:get_provider_id(paris),
DirName = ?RAND_DIR_NAME,
FileName = ?RAND_FILE_NAME,
{ok, DirGuid} = lfm_proxy:mkdir(P2Node, UserSessIdP2, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
DirCtx = file_ctx:new_by_guid(DirGuid),
{ok, {FileGuid, H1}} = lfm_proxy:create_and_open(P2Node, UserSessIdP2, DirGuid, FileName, ?DEFAULT_FILE_PERMS),
TestData1 = <<"first part ">>,
TestData2 = <<"seconds part">>,
Size1 = byte_size(TestData1),
Size2 = Size1 + byte_size(TestData2),
{ok, _} = lfm_proxy:write(P2Node, H1, 0, TestData1),
lfm_proxy:fsync(P2Node, H1),
GuidWithQos = case SetQosOn of
space_dir -> ?SPACE_GUID;
parent_dir -> DirGuid;
file -> FileGuid
end,
{ok, QosEntryId} = ?assertMatch(
{ok, _},
opt_qos:add_qos_entry(P1Node, UserSessIdP1, ?FILE_REF(GuidWithQos), <<"key=value">>, 1),
?ATTEMPTS
),
check whether QoS synchronized the file
?assertMatch({ok, {#{QosEntryId := fulfilled}, _}},
opt_qos:get_effective_file_qos(P1Node, UserSessIdP1, ?FILE_REF(GuidWithQos)), ?ATTEMPTS),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size1, Size1]), FileGuid, ?ATTEMPTS),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
{ok, _} = lfm_proxy:write(P2Node, H1, Size1, TestData2),
lfm_proxy:close(P2Node, H1),
?assertDistribution(P1Node, UserSessIdP1, ?DISTS([P1Id, P2Id], [Size1, Size2]), FileGuid, ?ATTEMPTS).
long_lasting_deletion_test_base(_Config, TimeWarpsCount,
TimeWarpPeriod, TimeWarpInterval, ExpectedResult
) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
DirName = ?RAND_DIR_NAME,
UserSessIdP1 = oct_background:get_user_session_id(user1, krakow),
{ok, DirGuid} = lfm_proxy:mkdir(P1Node, UserSessIdP1, ?SPACE_GUID, DirName, ?DEFAULT_DIR_PERMS),
{DirGuids, FileGuids} = lfm_test_utils:create_files_tree(P1Node, UserSessIdP1, [{10, 10}, {10, 10}, {10, 10}], DirGuid),
DirCtx = file_ctx:new_by_guid(DirGuid),
mock_traverse_finished(P1Node, self()),
move_to_trash(P1Node, DirCtx, UserSessIdP1),
{ok, TaskId} = schedule_deletion_from_trash(P1Node, DirCtx, UserSessIdP1, ?SPACE_UUID, DirName),
lists:foreach(fun(_) ->
time_test_utils:simulate_seconds_passing(TimeWarpPeriod),
timer:sleep(timer:seconds(TimeWarpInterval))
end, lists:seq(1, TimeWarpsCount)),
await_traverse_finished(TaskId, 600),
case ExpectedResult of
success ->
use ? ROOT_SESS_ID in below assert as normal sessions may have expired
lfm_test_utils:assert_space_and_trash_are_empty(P1Node, ?SPACE_ID1, ?ATTEMPTS),
lfm_test_utils:assert_space_and_trash_are_empty(P2Node, ?SPACE_ID1, ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P1Node, ?ROOT_SESS_ID, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
?assertMatch({ok, []}, lfm_proxy:get_children(P2Node, ?ROOT_SESS_ID, ?FILE_REF(?TRASH_DIR_GUID(?SPACE_ID1)), 0, 10), ?ATTEMPTS),
lists:foreach(fun(G) ->
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P1Node, ?ROOT_SESS_ID, ?FILE_REF(G)), ?ATTEMPTS),
?assertMatch({error, ?ENOENT}, lfm_proxy:stat(P2Node, ?ROOT_SESS_ID, ?FILE_REF(G)), ?ATTEMPTS)
end, DirGuids ++ FileGuids ++ [DirGuid]);
failure ->
AllFilesNum = length([DirGuid | DirGuids] ++ FileGuids),
DeletedFilesNum = lists:foldl(fun(Guid, Acc) ->
case lfm_proxy:stat(P1Node, ?ROOT_SESS_ID, ?FILE_REF(Guid)) of
{ok, _} -> Acc;
{error, ?ENOENT} -> Acc + 1
end
end, 0, [DirGuid | DirGuids] ++ FileGuids),
?assertNotEqual(AllFilesNum, DeletedFilesNum)
end.
SetUp and TearDown functions
init_per_suite(Config) ->
oct_background:init_per_suite([{?LOAD_MODULES, [dir_stats_test_utils]} | Config],
#onenv_test_config{
onenv_scenario = "2op-manual-import",
posthook = fun dir_stats_test_utils:disable_stats_counting_ct_posthook/1
}).
end_per_suite(Config) ->
oct_background:end_per_suite(),
dir_stats_test_utils:enable_stats_counting(Config).
init_per_testcase(Case, Config) when
Case =:= deletion_lasting_for_4_days_should_succeed orelse
Case =:= deletion_lasting_for_40_days_should_succeed orelse
Case =:= deletion_lasting_for_40_days_should_fail_if_session_is_not_refreshed_within_expected_time
->
time_test_utils:freeze_time(Config),
init_per_testcase(default, Config);
init_per_testcase(_Case, Config) ->
Config2 = oct_background:update_background_config(Config),
lfm_proxy:init(Config2).
end_per_testcase(_Case, Config) ->
[P1Node] = oct_background:get_provider_nodes(krakow),
[P2Node] = oct_background:get_provider_nodes(paris),
AllNodes = [P1Node, P2Node],
time_test_utils:unfreeze_time(Config),
lfm_test_utils:clean_space(P1Node, AllNodes, ?SPACE_ID1, ?ATTEMPTS),
lfm_proxy:teardown(Config).
Internal functions
move_to_trash(Worker, FileCtx, SessId) ->
UserCtx = rpc:call(Worker, user_ctx, new, [SessId]),
rpc:call(Worker, trash, move_to_trash, [FileCtx, UserCtx]).
schedule_deletion_from_trash(Worker, FileCtx, SessId, RootOriginalParentUuid, DirName) ->
UserCtx = rpc:call(Worker, user_ctx, new, [SessId]),
rpc:call(Worker, trash, schedule_deletion_from_trash, [FileCtx, UserCtx, false, RootOriginalParentUuid, DirName]).
register_file(Worker, User, Body) ->
Headers = #{
?HDR_X_AUTH_TOKEN => oct_background:get_user_access_token(User),
?HDR_CONTENT_TYPE => <<"application/json">>
},
rest_test_utils:request(Worker, <<"data/register">>, post, Headers, json_utils:encode(Body)).
perms_to_allow_ace(Perms) ->
?ALLOW_ACE(?owner, ?no_flags_mask, permissions_test_utils:perms_to_bitmask(Perms)).
mock_traverse_finished(Worker, TestProcess) ->
ok = test_utils:mock_new(Worker, tree_deletion_traverse),
ok = test_utils:mock_expect(Worker, tree_deletion_traverse, task_finished, fun(TaskId, Pool) ->
Result = meck:passthrough([TaskId, Pool]),
TestProcess ! {traverse_finished, TaskId},
Result
end),
ok = test_utils:mock_expect(Worker, tree_deletion_traverse, task_canceled, fun(TaskId, Pool) ->
Result = meck:passthrough([TaskId, Pool]),
TestProcess ! {traverse_finished, TaskId},
Result
end).
await_traverse_finished(TaskId, Attempts) ->
receive {traverse_finished, TaskId} -> ok
after
timer:seconds(Attempts) ->
ct:fail("Traverse ~s not finished in expected time", [TaskId])
end. |
e5c0c9252bd473eb38a4e00d68325a0698ebdd71e02231199106eba291e77e70 | skanev/playground | 30.scm | SICP exercise 3.30
;
Figure 3.27 shows a ripple - carry adder formed by stringing together n
full - adders . This is the simplest form of parallel adder for adding two
n - bit binary numbers . The inputs A₁ , A₂ , A₃ , ... , Aᵢ and B₁ , B₂ , B₃ , ... ,
are the two binary numbers to be added ( each Aᵣ and Bᵣ is a 0 or a 1 ) . The
circuit generates , S₂ , S₃ , ... , Sᵢ , the i bits of the sum , and C , the
; carry from the addition. Write a procedure ripple-carry-adder that generates
that circuit . The procedure should take as arguments three lists of i wires
each - the Aᵣ , the Bᵣ and the Sᵣ - and also another wire C. The major
; drawback of the ripple-carry adder is the need to wait for the signals to
; propagate. What is the delay needed to obtain the complete output from an
; i-bit ripple-carry adder, expressed in terms of the delays of and-gates,
; or-gates, and inverters?
; The ripple-carry-adder procedure is defined below.
;
; As for the delay:
;
The half - adder has the following delays for each output :
;
; s: and + not + and | or + and (whichever is slower)
; c: and
;
; The full-adder has the following delays:
;
; sum: and + not + and | or + and
; c-out: (and + not + and | or + and) + and + or
;
; Finally, the delay of C an n-bit ripple adder is:
;
; n-ripple: n((and + not + and | or + and) + and + or)
;
; With the delays we have defined, this is either:
;
; n(3and + or + not)
;
; or
;
; n(2or + 2and)
;
; whichever is slower. However, we need wait for the last SUM too. We can
; subtract (and + or) from the time (since this is the time that takes the
signal to travel through the carry from the second half - adder in the last
; adder and then through the and gate) and then we can add the s time for
another half - adder , which makes the total time :
;
; n(3and + or + not) + and + not - or
;
; or, again:
;
; n(2or + 2and)
(require r5rs/init)
; Ripple-carry adder
(define (ripple-carry-adder a b c-in s c-out)
(define (ripple a b s c)
(cond ((null? (cdr a))
(full-adder (car a) (car b) c-in (car s) c))
(else
(let ((w (make-wire)))
(full-adder (car a) (car b) w (car s) c)
(ripple (cdr a) (cdr b) (cdr s) w)))))
(ripple (reverse a) (reverse b) (reverse s) c-out))
; Half-adder & adder
(define (half-adder a b s c)
(let ((d (make-wire)) (e (make-wire)))
(or-gate a b d)
(and-gate a b c)
(inverter c e)
(and-gate d e s)
'ok))
(define (full-adder a b c-in sum c-out)
(let ((s (make-wire))
(c1 (make-wire))
(c2 (make-wire)))
(half-adder b c-in s c1)
(half-adder a s sum c2)
(or-gate c1 c2 c-out)
'ok))
; Primitive function boxes
(define (inverter input output)
(define (invert-input)
(let ((new-value (logical-not (get-signal input))))
(after-delay inverter-delay
(lambda () (set-signal! output new-value)))))
(add-action! input invert-input)
'ok)
(define (and-gate a1 a2 output)
(define (and-action-procedure)
(let ((new-value (logical-and (get-signal a1) (get-signal a2))))
(after-delay and-gate-delay (lambda () (set-signal! output new-value)))))
(add-action! a1 and-action-procedure)
(add-action! a2 and-action-procedure)
'ok)
(define (or-gate o1 o2 output)
(define (or-action-procedure)
(let ((new-value (logical-or (get-signal o1) (get-signal o2))))
(after-delay or-gate-delay (lambda () (set-signal! output new-value)))))
(add-action! o1 or-action-procedure)
(add-action! o2 or-action-procedure)
'ok)
; Logical functions
(define (logical-not s)
(cond ((= s 0) 1)
((= s 1) 0)
(else (error "Invalid signal" s))))
(define (logical-and a b)
(cond ((and (= a 0) (= b 0)) 0)
((and (= a 0) (= b 1)) 0)
((and (= a 1) (= b 0)) 0)
((and (= a 1) (= b 1)) 1)
(else (error "Invalid signals" a b))))
(define (logical-or a b)
(cond ((and (= a 0) (= b 0)) 0)
((and (= a 0) (= b 1)) 1)
((and (= a 1) (= b 0)) 1)
((and (= a 1) (= b 1)) 1)
(else (error "Invalid signals" a b))))
; Wires
(define (make-wire)
(let ((signal-value 0) (action-procedures '()))
(define (set-my-signal! new-value)
(if (not (= signal-value new-value))
(begin (set! signal-value new-value)
(call-each action-procedures))
'done))
(define (accept-action-procedure! proc)
(set! action-procedures (cons proc action-procedures))
(proc))
(define (dispatch m)
(cond ((eq? m 'get-signal) signal-value)
((eq? m 'set-signal!) set-my-signal!)
((eq? m 'add-action!) accept-action-procedure!)
(else (error "Unknown operation -- WIRE" m))))
dispatch))
(define (call-each procedures)
(if (null? procedures)
'done
(begin
((car procedures))
(call-each (cdr procedures)))))
(define (get-signal wire) (wire 'get-signal))
(define (set-signal! wire new-value) ((wire 'set-signal!) new-value))
(define (add-action! wire action-procedure) ((wire 'add-action!) action-procedure))
Queues
(define (front-ptr queue) (car queue))
(define (rear-ptr queue) (cdr queue))
(define (set-front-ptr! queue item) (set-car! queue item))
(define (set-rear-ptr! queue item) (set-cdr! queue item))
(define (make-queue) (cons '() '()))
(define (empty-queue? queue) (null? (front-ptr queue)))
(define (front-queue queue)
(if (empty-queue? queue)
(error "FRONT called with an empty queue" queue)
(car (front-ptr queue))))
(define (insert-queue! queue item)
(let ((new-pair (cons item '())))
(cond ((empty-queue? queue)
(set-front-ptr! queue new-pair)
(set-rear-ptr! queue new-pair))
(else
(set-cdr! (rear-ptr queue) new-pair)
(set-rear-ptr! queue new-pair)))
queue))
(define (delete-queue! queue)
(cond ((empty-queue? queue)
(error "DELETE! called with an empty queue" queue))
(else
(set-front-ptr! queue (cdr (front-ptr queue)))
queue)))
; The agenda
(define (make-time-segment time queue) (cons time queue))
(define (segment-time s) (car s))
(define (segment-queue s) (cdr s))
(define (make-agenda) (list 0))
(define (current-time agenda) (car agenda))
(define (set-current-time! agenda time) (set-car! agenda time))
(define (segments agenda) (cdr agenda))
(define (set-segments! agenda segments) (set-cdr! agenda segments))
(define (first-segment agenda) (car (segments agenda)))
(define (rest-segments agenda) (cdr (segments agenda)))
(define (empty-agenda? agenda) (null? (segments agenda)))
(define (add-to-agenda! time action agenda)
(define (belongs-before? segments)
(or (null? segments)
(< time (segment-time (car segments)))))
(define (make-new-time-segment time action)
(let ((q (make-queue)))
(insert-queue! q action)
(make-time-segment time q)))
(define (add-to-segments! segments)
(if (= (segment-time (car segments)) time)
(insert-queue! (segment-queue (car segments))
action)
(let ((rest (cdr segments)))
(if (belongs-before? rest)
(set-cdr! segments (cons (make-new-time-segment time action) rest))
(add-to-segments! rest)))))
(let ((segments (segments agenda)))
(if (belongs-before? segments)
(set-segments! agenda (cons (make-new-time-segment time action) segments))
(add-to-segments! segments))))
(define (remove-first-agenda-item! agenda)
(let ((q (segment-queue (first-segment agenda))))
(delete-queue! q)
(if (empty-queue? q)
(set-segments! agenda (rest-segments agenda))
'done)))
(define (first-agenda-item agenda)
(if (empty-agenda? agenda)
(error "Agenda is empty -- FIRST-AGENDA-ITEM")
(let ((first-seg (first-segment agenda)))
(set-current-time! agenda (segment-time first-seg))
(front-queue (segment-queue first-seg)))))
; Delays
(define inverter-delay 2)
(define and-gate-delay 3)
(define or-gate-delay 5)
; Simulation infrastructure
(define the-agenda (make-agenda))
(define (after-delay delay action)
(add-to-agenda! (+ delay (current-time the-agenda))
action
the-agenda))
(define (propagate)
(if (empty-agenda? the-agenda)
'done
(let ((first-item (first-agenda-item the-agenda)))
(first-item)
(remove-first-agenda-item! the-agenda)
(propagate))))
(define (probe name wire)
(add-action! wire
(lambda ()
(display name)
(display " ")
(display (current-time the-agenda))
(display " New-value = ")
(display (get-signal wire))
(newline))))
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/03/30.scm | scheme |
carry from the addition. Write a procedure ripple-carry-adder that generates
drawback of the ripple-carry adder is the need to wait for the signals to
propagate. What is the delay needed to obtain the complete output from an
i-bit ripple-carry adder, expressed in terms of the delays of and-gates,
or-gates, and inverters?
The ripple-carry-adder procedure is defined below.
As for the delay:
s: and + not + and | or + and (whichever is slower)
c: and
The full-adder has the following delays:
sum: and + not + and | or + and
c-out: (and + not + and | or + and) + and + or
Finally, the delay of C an n-bit ripple adder is:
n-ripple: n((and + not + and | or + and) + and + or)
With the delays we have defined, this is either:
n(3and + or + not)
or
n(2or + 2and)
whichever is slower. However, we need wait for the last SUM too. We can
subtract (and + or) from the time (since this is the time that takes the
adder and then through the and gate) and then we can add the s time for
n(3and + or + not) + and + not - or
or, again:
n(2or + 2and)
Ripple-carry adder
Half-adder & adder
Primitive function boxes
Logical functions
Wires
The agenda
Delays
Simulation infrastructure | SICP exercise 3.30
Figure 3.27 shows a ripple - carry adder formed by stringing together n
full - adders . This is the simplest form of parallel adder for adding two
n - bit binary numbers . The inputs A₁ , A₂ , A₃ , ... , Aᵢ and B₁ , B₂ , B₃ , ... ,
are the two binary numbers to be added ( each Aᵣ and Bᵣ is a 0 or a 1 ) . The
circuit generates , S₂ , S₃ , ... , Sᵢ , the i bits of the sum , and C , the
that circuit . The procedure should take as arguments three lists of i wires
each - the Aᵣ , the Bᵣ and the Sᵣ - and also another wire C. The major
The half - adder has the following delays for each output :
signal to travel through the carry from the second half - adder in the last
another half - adder , which makes the total time :
(require r5rs/init)
(define (ripple-carry-adder a b c-in s c-out)
(define (ripple a b s c)
(cond ((null? (cdr a))
(full-adder (car a) (car b) c-in (car s) c))
(else
(let ((w (make-wire)))
(full-adder (car a) (car b) w (car s) c)
(ripple (cdr a) (cdr b) (cdr s) w)))))
(ripple (reverse a) (reverse b) (reverse s) c-out))
(define (half-adder a b s c)
(let ((d (make-wire)) (e (make-wire)))
(or-gate a b d)
(and-gate a b c)
(inverter c e)
(and-gate d e s)
'ok))
(define (full-adder a b c-in sum c-out)
(let ((s (make-wire))
(c1 (make-wire))
(c2 (make-wire)))
(half-adder b c-in s c1)
(half-adder a s sum c2)
(or-gate c1 c2 c-out)
'ok))
(define (inverter input output)
(define (invert-input)
(let ((new-value (logical-not (get-signal input))))
(after-delay inverter-delay
(lambda () (set-signal! output new-value)))))
(add-action! input invert-input)
'ok)
(define (and-gate a1 a2 output)
(define (and-action-procedure)
(let ((new-value (logical-and (get-signal a1) (get-signal a2))))
(after-delay and-gate-delay (lambda () (set-signal! output new-value)))))
(add-action! a1 and-action-procedure)
(add-action! a2 and-action-procedure)
'ok)
(define (or-gate o1 o2 output)
(define (or-action-procedure)
(let ((new-value (logical-or (get-signal o1) (get-signal o2))))
(after-delay or-gate-delay (lambda () (set-signal! output new-value)))))
(add-action! o1 or-action-procedure)
(add-action! o2 or-action-procedure)
'ok)
(define (logical-not s)
(cond ((= s 0) 1)
((= s 1) 0)
(else (error "Invalid signal" s))))
(define (logical-and a b)
(cond ((and (= a 0) (= b 0)) 0)
((and (= a 0) (= b 1)) 0)
((and (= a 1) (= b 0)) 0)
((and (= a 1) (= b 1)) 1)
(else (error "Invalid signals" a b))))
(define (logical-or a b)
(cond ((and (= a 0) (= b 0)) 0)
((and (= a 0) (= b 1)) 1)
((and (= a 1) (= b 0)) 1)
((and (= a 1) (= b 1)) 1)
(else (error "Invalid signals" a b))))
(define (make-wire)
(let ((signal-value 0) (action-procedures '()))
(define (set-my-signal! new-value)
(if (not (= signal-value new-value))
(begin (set! signal-value new-value)
(call-each action-procedures))
'done))
(define (accept-action-procedure! proc)
(set! action-procedures (cons proc action-procedures))
(proc))
(define (dispatch m)
(cond ((eq? m 'get-signal) signal-value)
((eq? m 'set-signal!) set-my-signal!)
((eq? m 'add-action!) accept-action-procedure!)
(else (error "Unknown operation -- WIRE" m))))
dispatch))
(define (call-each procedures)
(if (null? procedures)
'done
(begin
((car procedures))
(call-each (cdr procedures)))))
(define (get-signal wire) (wire 'get-signal))
(define (set-signal! wire new-value) ((wire 'set-signal!) new-value))
(define (add-action! wire action-procedure) ((wire 'add-action!) action-procedure))
Queues
(define (front-ptr queue) (car queue))
(define (rear-ptr queue) (cdr queue))
(define (set-front-ptr! queue item) (set-car! queue item))
(define (set-rear-ptr! queue item) (set-cdr! queue item))
(define (make-queue) (cons '() '()))
(define (empty-queue? queue) (null? (front-ptr queue)))
(define (front-queue queue)
(if (empty-queue? queue)
(error "FRONT called with an empty queue" queue)
(car (front-ptr queue))))
(define (insert-queue! queue item)
(let ((new-pair (cons item '())))
(cond ((empty-queue? queue)
(set-front-ptr! queue new-pair)
(set-rear-ptr! queue new-pair))
(else
(set-cdr! (rear-ptr queue) new-pair)
(set-rear-ptr! queue new-pair)))
queue))
(define (delete-queue! queue)
(cond ((empty-queue? queue)
(error "DELETE! called with an empty queue" queue))
(else
(set-front-ptr! queue (cdr (front-ptr queue)))
queue)))
(define (make-time-segment time queue) (cons time queue))
(define (segment-time s) (car s))
(define (segment-queue s) (cdr s))
(define (make-agenda) (list 0))
(define (current-time agenda) (car agenda))
(define (set-current-time! agenda time) (set-car! agenda time))
(define (segments agenda) (cdr agenda))
(define (set-segments! agenda segments) (set-cdr! agenda segments))
(define (first-segment agenda) (car (segments agenda)))
(define (rest-segments agenda) (cdr (segments agenda)))
(define (empty-agenda? agenda) (null? (segments agenda)))
(define (add-to-agenda! time action agenda)
(define (belongs-before? segments)
(or (null? segments)
(< time (segment-time (car segments)))))
(define (make-new-time-segment time action)
(let ((q (make-queue)))
(insert-queue! q action)
(make-time-segment time q)))
(define (add-to-segments! segments)
(if (= (segment-time (car segments)) time)
(insert-queue! (segment-queue (car segments))
action)
(let ((rest (cdr segments)))
(if (belongs-before? rest)
(set-cdr! segments (cons (make-new-time-segment time action) rest))
(add-to-segments! rest)))))
(let ((segments (segments agenda)))
(if (belongs-before? segments)
(set-segments! agenda (cons (make-new-time-segment time action) segments))
(add-to-segments! segments))))
(define (remove-first-agenda-item! agenda)
(let ((q (segment-queue (first-segment agenda))))
(delete-queue! q)
(if (empty-queue? q)
(set-segments! agenda (rest-segments agenda))
'done)))
(define (first-agenda-item agenda)
(if (empty-agenda? agenda)
(error "Agenda is empty -- FIRST-AGENDA-ITEM")
(let ((first-seg (first-segment agenda)))
(set-current-time! agenda (segment-time first-seg))
(front-queue (segment-queue first-seg)))))
(define inverter-delay 2)
(define and-gate-delay 3)
(define or-gate-delay 5)
(define the-agenda (make-agenda))
(define (after-delay delay action)
(add-to-agenda! (+ delay (current-time the-agenda))
action
the-agenda))
(define (propagate)
(if (empty-agenda? the-agenda)
'done
(let ((first-item (first-agenda-item the-agenda)))
(first-item)
(remove-first-agenda-item! the-agenda)
(propagate))))
(define (probe name wire)
(add-action! wire
(lambda ()
(display name)
(display " ")
(display (current-time the-agenda))
(display " New-value = ")
(display (get-signal wire))
(newline))))
|
df97169a38d6731257c146e04c3cc4e75b8d9259ab8aab8579aea53c10f15d21 | Jarzka/stylefy | project.clj | (defproject stylefy/rum "3.0.0"
:description "RumDOM for stylefy"
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/clojurescript "1.10.520"]
[prismatic/dommy "1.1.0"]
[rum "0.12.3"]
[org.clojure/core.async "0.3.443"]]
:source-paths ["src/cljs"]
:cljsbuild {:builds [{:id "dev"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/stylefy-rum.js"
:output-dir "resources/public/js/out"
:optimizations :none
:source-map true}}
{:id "prod"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/stylefy-rum.js"
:output-dir "resources/public/js/out"
:optimizations :advanced}}]})
| null | https://raw.githubusercontent.com/Jarzka/stylefy/c4c3a9b1d230605e2d08a769d83d632e696d84a0/modules/stylefy-rum/project.clj | clojure | (defproject stylefy/rum "3.0.0"
:description "RumDOM for stylefy"
:dependencies [[org.clojure/clojure "1.10.1"]
[org.clojure/clojurescript "1.10.520"]
[prismatic/dommy "1.1.0"]
[rum "0.12.3"]
[org.clojure/core.async "0.3.443"]]
:source-paths ["src/cljs"]
:cljsbuild {:builds [{:id "dev"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/stylefy-rum.js"
:output-dir "resources/public/js/out"
:optimizations :none
:source-map true}}
{:id "prod"
:source-paths ["src/cljs"]
:compiler {:output-to "resources/public/js/stylefy-rum.js"
:output-dir "resources/public/js/out"
:optimizations :advanced}}]})
|
|
655db515179d2b7292e5a007d51d9a7b79446f5c4a2743de49d1a2239fd68e67 | cl-unix-cybernetics/cl-unix-cybernetics | include.lisp | ;; cl-unix-cybernetics
Copyright 2013 - 2022 < >
;;
;; Permission is hereby granted to use this software granted
;; the above copyright notice and this permission paragraph
;; are included in all copies and substantial portions of this
;; software.
;;
;; THIS SOFTWARE IS PROVIDED "AS-IS" WITHOUT ANY GUARANTEE OF
;; PURPOSE AND PERFORMANCE. IN NO EVENT WHATSOEVER SHALL THE
;; AUTHOR BE CONSIDERED LIABLE FOR THE USE AND PERFORMANCE OF
;; THIS SOFTWARE.
(in-package :cl-unix-cybernetics)
(defun include/resolve-filename (spec)
(flet ((try (&rest parts)
(let ((path (str parts)))
(when (probe-file path)
(return-from include/resolve-filename path)))))
(try spec ".lisp")
(try spec)))
(defun include/resolve-filename! (spec)
(or (include/resolve-filename spec)
(error "(include ~S) => file not found.~%
Current directory : ~S" spec *default-pathname-defaults*)))
(defun include (&rest sources)
(let* ((head (cons 'list nil))
(tail head)
(eof (gensym "EOF")))
(dolist (source sources)
(let ((path (include/resolve-filename! source)))
(with-open-file (in path
:element-type 'character
:external-format :utf-8)
(loop
(let ((form (read in nil eof)))
(when (eq form eof)
(return))
(setf (rest tail) (cons form nil)
tail (rest tail)))))))
head))
| null | https://raw.githubusercontent.com/cl-unix-cybernetics/cl-unix-cybernetics/63e4862274e170bda7540b4caec6c94ed850a31c/core/include.lisp | lisp | cl-unix-cybernetics
Permission is hereby granted to use this software granted
the above copyright notice and this permission paragraph
are included in all copies and substantial portions of this
software.
THIS SOFTWARE IS PROVIDED "AS-IS" WITHOUT ANY GUARANTEE OF
PURPOSE AND PERFORMANCE. IN NO EVENT WHATSOEVER SHALL THE
AUTHOR BE CONSIDERED LIABLE FOR THE USE AND PERFORMANCE OF
THIS SOFTWARE. | Copyright 2013 - 2022 < >
(in-package :cl-unix-cybernetics)
(defun include/resolve-filename (spec)
(flet ((try (&rest parts)
(let ((path (str parts)))
(when (probe-file path)
(return-from include/resolve-filename path)))))
(try spec ".lisp")
(try spec)))
(defun include/resolve-filename! (spec)
(or (include/resolve-filename spec)
(error "(include ~S) => file not found.~%
Current directory : ~S" spec *default-pathname-defaults*)))
(defun include (&rest sources)
(let* ((head (cons 'list nil))
(tail head)
(eof (gensym "EOF")))
(dolist (source sources)
(let ((path (include/resolve-filename! source)))
(with-open-file (in path
:element-type 'character
:external-format :utf-8)
(loop
(let ((form (read in nil eof)))
(when (eq form eof)
(return))
(setf (rest tail) (cons form nil)
tail (rest tail)))))))
head))
|
fb1eed336134dc096c6463333e95cf6571ade7689ed69fcfe0eecf2cbef0fb3e | karlhof26/gimp-scheme | AutoColorize_FlavorD_4_02.scm |
; Auto colorize image into random number of colors of random hues
author :
date : 2015
(define (script-fu-auto-colorize-d image layer
hatches
)
(let* (
(color-map 0)
(colors 0)
(image-width)
(image-height)
( R 0.2126 ) ; constants for calculating luminance
( G 0.7152 )
;(B 0.0722)
;(0.299*R + 0.587*G + 0.114*B)
( R 0.299 )
;(G 0.587)
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
(B (/ 18.0 255))
(R (/ 54.0 255))
(G (/ 182.0 255))
(r) ;randomly generated r g b values
(g)
(b)
(l-original) ;luminance original
(l-new)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
(counter 0)
(bigcounter 0)
)
;(gimp-image-undo-disable image); DN = NO UNDO
undo - group in one step
;convert to indexed
(set! image-width (car (gimp-image-width image)))
(set! image-height (car (gimp-image-height image)))
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches TRUE FALSE "unused palette name")
;grabs color map
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(gimp-image-convert-rgb image) ;converts it to rgb before we call hatch loop
(set! y hatches) ;loop hatches number of times
(srand (car (gettimeofday)))
(gimp-context-set-sample-threshold 0)
(while (> y 0)
;do work here
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
;select each color
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
;(gimp-edit-copy layer)
;(set! floating (car(gimp-edit-paste layer TRUE)))
;(gimp-floating-sel-to-layer floating)
;(gimp-image-set-active-layer image floating)
(set! floating (car (gimp-layer-new image image-width image-height
RGBA-IMAGE "Colorize" 100 LAYER-MODE-NORMAL))) ;creates layer
;insert above current layer
;(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
(gimp-image-insert-layer image floating 0 0)
;set that layer to be active layer
(gimp-image-set-active-layer image floating)
( set ! hue ( rand 360 ) )
( gimp - colorize floating hue 100 0 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red R) (* green G) (* blue B)))
(set! difference 10)
(set! bigcounter 1000)
;just randomly pick a color until we find a color of similar luminance
;absolutely not the ideal way of getting a color
(while (and (> difference 1) (> bigcounter 1))
(set! r (- (rand 256) 1))
(set! g (- (rand 256) 1))
(set! b (- (rand 256) 1))
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r R) (* g G) (* b B)))
(set! difference (abs (- l-new l-original)))
(set! bigcounter (- bigcounter 1))
(gimp-progress-update (/ 1 (/ bigcounter 1000)))
)
( script - fu - colorize image floating ( list b ) 100 )
(gimp-context-set-foreground (list r g b))
(gimp-edit-fill floating FILL-FOREGROUND)
(if (> y 1) ;if y is still valid we set colors to the next colors
(begin
(set! colors (cdddr colors))
)
(begin ;else
)
)
;loop control
(set! y (- y 1))
);end of while
(gimp-selection-none image)
;(gimp-image-undo-enable image) ;DN = NO UNDO
undo group in one step
(gimp-displays-flush)
)
) ;end of define
(script-fu-register
"script-fu-auto-colorize-d" ;function name
"<Image>/Script-Fu2/Create from Image/Auto Colorize Flavor D" ;menu register
"Randomly colorize image with specified number of colors. \nfile: AutoColorize_FlavorD_4_02.scm" ;description
"Tin Tran" ;author name
"copyright info and description" ;copyright info or description
"2015" ;date
"RGB*, GRAY*" ;mode
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
) | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/799bb65845f8d282dd1ea971db7c27594b7c41c4/AutoColorize_FlavorD_4_02.scm | scheme | Auto colorize image into random number of colors of random hues
constants for calculating luminance
(B 0.0722)
(0.299*R + 0.587*G + 0.114*B)
(G 0.587)
randomly generated r g b values
luminance original
(gimp-image-undo-disable image); DN = NO UNDO
convert to indexed
grabs color map
converts it to rgb before we call hatch loop
loop hatches number of times
do work here
select each color
(gimp-edit-copy layer)
(set! floating (car(gimp-edit-paste layer TRUE)))
(gimp-floating-sel-to-layer floating)
(gimp-image-set-active-layer image floating)
creates layer
insert above current layer
(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
set that layer to be active layer
just randomly pick a color until we find a color of similar luminance
absolutely not the ideal way of getting a color
if y is still valid we set colors to the next colors
else
loop control
end of while
(gimp-image-undo-enable image) ;DN = NO UNDO
end of define
function name
menu register
description
author name
copyright info or description
date
mode
|
author :
date : 2015
(define (script-fu-auto-colorize-d image layer
hatches
)
(let* (
(color-map 0)
(colors 0)
(image-width)
(image-height)
( G 0.7152 )
( R 0.299 )
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
(B (/ 18.0 255))
(R (/ 54.0 255))
(G (/ 182.0 255))
(g)
(b)
(l-new)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
(counter 0)
(bigcounter 0)
)
undo - group in one step
(set! image-width (car (gimp-image-width image)))
(set! image-height (car (gimp-image-height image)))
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches TRUE FALSE "unused palette name")
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(srand (car (gettimeofday)))
(gimp-context-set-sample-threshold 0)
(while (> y 0)
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
(set! floating (car (gimp-layer-new image image-width image-height
(gimp-image-insert-layer image floating 0 0)
(gimp-image-set-active-layer image floating)
( set ! hue ( rand 360 ) )
( gimp - colorize floating hue 100 0 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red R) (* green G) (* blue B)))
(set! difference 10)
(set! bigcounter 1000)
(while (and (> difference 1) (> bigcounter 1))
(set! r (- (rand 256) 1))
(set! g (- (rand 256) 1))
(set! b (- (rand 256) 1))
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r R) (* g G) (* b B)))
(set! difference (abs (- l-new l-original)))
(set! bigcounter (- bigcounter 1))
(gimp-progress-update (/ 1 (/ bigcounter 1000)))
)
( script - fu - colorize image floating ( list b ) 100 )
(gimp-context-set-foreground (list r g b))
(gimp-edit-fill floating FILL-FOREGROUND)
(begin
(set! colors (cdddr colors))
)
)
)
(set! y (- y 1))
(gimp-selection-none image)
undo group in one step
(gimp-displays-flush)
)
(script-fu-register
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
) |
f0eab4509e7c64ee16d84cf215b2b753da6847f136a20f54cb056e69421ca636 | kapilreddy/clojure-north-2020-concurrency | core.clj | (ns concurrency-workshop.core)
(defn foo
"I don't do a whole lot."
[x]
(println x "Hello, World!"))
| null | https://raw.githubusercontent.com/kapilreddy/clojure-north-2020-concurrency/17e0a25f63598bf45b805f64b21701f888268ec9/concurrency-workshop/src/concurrency_workshop/core.clj | clojure | (ns concurrency-workshop.core)
(defn foo
"I don't do a whole lot."
[x]
(println x "Hello, World!"))
|
|
4ce1af3a2ba5d9568a8d9a163fbe26924d71ede39be3109cb3555e9f3661a31e | spurious/sagittarius-scheme-mirror | %3a0.scm | -*- mode : scheme ; coding : utf-8 ; -*-
(library (srfi :0)
(export cond-expand)
(import (srfi :0 cond-expand))
)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/srfi/%253a0.scm | scheme | coding : utf-8 ; -*- | (library (srfi :0)
(export cond-expand)
(import (srfi :0 cond-expand))
)
|
ba5cfe304a14dd87c3a0cfd13b7b9dc226fc0c07196b3881dbaea767ea7ed392 | nmaehlmann/mallRL | Renderer.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Renderer where
import Control.Monad
import Foreign.C.Types
import SDL.Vect
import SDL (($=))
import qualified SDL
import Data.Array ((!))
import qualified Data.Array as Array
import Position
import Apecs hiding (($=))
import Control.Concurrent
import TileImage
import TileMap
import Colors
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (mapWidth * tileSize, mapHeight * tileSize)
data Texture = Texture SDL.Texture (V2 CInt)
createBlank :: SDL.Renderer -> V2 CInt -> SDL.TextureAccess -> IO Texture
createBlank r size access = Texture <$> SDL.createTexture r SDL.RGBA8888 access size <*> pure size
setAsRenderTarget :: SDL.Renderer -> Maybe Texture -> IO ()
setAsRenderTarget r Nothing = SDL.rendererRenderTarget r $= Nothing
setAsRenderTarget r (Just (Texture t _)) = SDL.rendererRenderTarget r $= Just t
loadTexture :: SDL.Renderer -> FilePath -> IO Texture
loadTexture r filePath = do
surface <- SDL.loadBMP filePath
size <- SDL.surfaceDimensions surface
t <- SDL.createTextureFromSurface r surface
SDL.freeSurface surface
return (Texture t size)
renderTileMap :: SDL.Renderer -> Texture -> TileImage -> TileImage -> IO ()
renderTileMap r t (TileImage previous) (TileImage arr) = mapM st (Array.range (Array.bounds arr)) >> return ()
where st pos = do
let tileOld = previous ! pos
let tileNew = arr ! pos
unless (tileOld == tileNew) $ do
renderTile r t pos tileNew
setTextureColor :: Texture -> Color -> IO ()
setTextureColor (Texture t _) color = SDL.textureColorMod t $= color
spriteSize :: V2 CInt
spriteSize = V2 (fromIntegral tileSize) (fromIntegral tileSize)
renderTile :: SDL.Renderer -> Texture -> Position -> Tile -> IO ()
renderTile r t pos (Tile glyph fgColor bgColor) = do
let point = P $ fmap (*tileSize) $ fmap fromIntegral pos
let renderGlyph g = renderTexture r t point $ Just $ SDL.Rectangle (P (fmap (*tileSize) g)) spriteSize
setTextureColor t bgColor
renderGlyph filledGlyph
setTextureColor t fgColor
renderGlyph glyph
tileSize, textureWidth, textureHeight :: CInt
tileSize = 12
textureWidth = mapWidth * tileSize
textureHeight = mapHeight * tileSize
renderTexture :: SDL.Renderer -> Texture -> Point V2 CInt -> Maybe (SDL.Rectangle CInt) -> IO ()
renderTexture r (Texture t size) xy clip =
let dstSize = maybe size (\(SDL.Rectangle _ size') -> size') clip
in SDL.copy r t clip (Just (SDL.Rectangle xy dstSize))
setTextureBlendMode :: Texture -> SDL.BlendMode -> IO ()
setTextureBlendMode (Texture t _) bm = SDL.textureBlendMode t $= bm
play
:: w
-> System w TileImage -- ^ Drawing function
-> (SDL.EventPayload -> System w ()) -- ^ Event handling function
-> (Float -> System w Bool) -- ^ Stepping function, with a time delta argument.
-> IO ()
play initialWorld draw handle step = do
-- init and show window
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $ putStrLn "Warning: Linear texture filtering not enabled!"
window <- SDL.createWindow "mallRL" SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
-- init and show renderer
let rendererConfig = SDL.RendererConfig { SDL.rendererType = SDL.AcceleratedVSyncRenderer, SDL.rendererTargetTexture = False}
renderer <- SDL.createRenderer window (-1) rendererConfig
targetTexture <- createBlank renderer (V2 textureWidth textureHeight) SDL.TextureAccessTarget
spriteSheetTexture <- loadTexture renderer "resources/font_custom.bmp"
let loop world previousImage = do
-- handle events
events <- SDL.pollEvents
let eventPayloads = map SDL.eventPayload events
let quit = elem SDL.QuitEvent $ eventPayloads
let handle' w evt = runWith w $ handle evt >> ask
worldAfterEvents <- foldM handle' world eventPayloads
-- step world
let t = 0.1
(worldAfterStepping, rerenderNecessary) <- runWith worldAfterEvents $ do
rerenderNecessary <- step t
worldAfterStepping <- ask
return (worldAfterStepping, rerenderNecessary)
nextImage <- if rerenderNecessary
then do
-- render map to texture
tileImage <- runWith worldAfterStepping draw
setAsRenderTarget renderer (Just targetTexture)
renderTileMap renderer spriteSheetTexture previousImage tileImage
return tileImage
else return previousImage
-- render texture to screen
setAsRenderTarget renderer Nothing
SDL.rendererDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
SDL.clear renderer
let renderPosition = P (V2 0 0)
renderTexture renderer targetTexture renderPosition Nothing
SDL.present renderer
-- sleep
33333
--repeat
unless quit $ loop worldAfterStepping nextImage
loop initialWorld emptyMap
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
emptyMap :: TileImage
emptyMap = TileImage $ Array.listArray arrayBounds $ cycle [tileEmpty] | null | https://raw.githubusercontent.com/nmaehlmann/mallRL/1ae9add37ee390d645e380b77694d868a90fa70c/app/Renderer.hs | haskell | # LANGUAGE OverloadedStrings #
^ Drawing function
^ Event handling function
^ Stepping function, with a time delta argument.
init and show window
init and show renderer
handle events
step world
render map to texture
render texture to screen
sleep
repeat | # LANGUAGE LambdaCase #
module Renderer where
import Control.Monad
import Foreign.C.Types
import SDL.Vect
import SDL (($=))
import qualified SDL
import Data.Array ((!))
import qualified Data.Array as Array
import Position
import Apecs hiding (($=))
import Control.Concurrent
import TileImage
import TileMap
import Colors
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (mapWidth * tileSize, mapHeight * tileSize)
data Texture = Texture SDL.Texture (V2 CInt)
createBlank :: SDL.Renderer -> V2 CInt -> SDL.TextureAccess -> IO Texture
createBlank r size access = Texture <$> SDL.createTexture r SDL.RGBA8888 access size <*> pure size
setAsRenderTarget :: SDL.Renderer -> Maybe Texture -> IO ()
setAsRenderTarget r Nothing = SDL.rendererRenderTarget r $= Nothing
setAsRenderTarget r (Just (Texture t _)) = SDL.rendererRenderTarget r $= Just t
loadTexture :: SDL.Renderer -> FilePath -> IO Texture
loadTexture r filePath = do
surface <- SDL.loadBMP filePath
size <- SDL.surfaceDimensions surface
t <- SDL.createTextureFromSurface r surface
SDL.freeSurface surface
return (Texture t size)
renderTileMap :: SDL.Renderer -> Texture -> TileImage -> TileImage -> IO ()
renderTileMap r t (TileImage previous) (TileImage arr) = mapM st (Array.range (Array.bounds arr)) >> return ()
where st pos = do
let tileOld = previous ! pos
let tileNew = arr ! pos
unless (tileOld == tileNew) $ do
renderTile r t pos tileNew
setTextureColor :: Texture -> Color -> IO ()
setTextureColor (Texture t _) color = SDL.textureColorMod t $= color
spriteSize :: V2 CInt
spriteSize = V2 (fromIntegral tileSize) (fromIntegral tileSize)
renderTile :: SDL.Renderer -> Texture -> Position -> Tile -> IO ()
renderTile r t pos (Tile glyph fgColor bgColor) = do
let point = P $ fmap (*tileSize) $ fmap fromIntegral pos
let renderGlyph g = renderTexture r t point $ Just $ SDL.Rectangle (P (fmap (*tileSize) g)) spriteSize
setTextureColor t bgColor
renderGlyph filledGlyph
setTextureColor t fgColor
renderGlyph glyph
tileSize, textureWidth, textureHeight :: CInt
tileSize = 12
textureWidth = mapWidth * tileSize
textureHeight = mapHeight * tileSize
renderTexture :: SDL.Renderer -> Texture -> Point V2 CInt -> Maybe (SDL.Rectangle CInt) -> IO ()
renderTexture r (Texture t size) xy clip =
let dstSize = maybe size (\(SDL.Rectangle _ size') -> size') clip
in SDL.copy r t clip (Just (SDL.Rectangle xy dstSize))
setTextureBlendMode :: Texture -> SDL.BlendMode -> IO ()
setTextureBlendMode (Texture t _) bm = SDL.textureBlendMode t $= bm
play
:: w
-> IO ()
play initialWorld draw handle step = do
SDL.initialize [SDL.InitVideo]
SDL.HintRenderScaleQuality $= SDL.ScaleLinear
renderQuality <- SDL.get SDL.HintRenderScaleQuality
when (renderQuality /= SDL.ScaleLinear) $ putStrLn "Warning: Linear texture filtering not enabled!"
window <- SDL.createWindow "mallRL" SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight}
SDL.showWindow window
let rendererConfig = SDL.RendererConfig { SDL.rendererType = SDL.AcceleratedVSyncRenderer, SDL.rendererTargetTexture = False}
renderer <- SDL.createRenderer window (-1) rendererConfig
targetTexture <- createBlank renderer (V2 textureWidth textureHeight) SDL.TextureAccessTarget
spriteSheetTexture <- loadTexture renderer "resources/font_custom.bmp"
let loop world previousImage = do
events <- SDL.pollEvents
let eventPayloads = map SDL.eventPayload events
let quit = elem SDL.QuitEvent $ eventPayloads
let handle' w evt = runWith w $ handle evt >> ask
worldAfterEvents <- foldM handle' world eventPayloads
let t = 0.1
(worldAfterStepping, rerenderNecessary) <- runWith worldAfterEvents $ do
rerenderNecessary <- step t
worldAfterStepping <- ask
return (worldAfterStepping, rerenderNecessary)
nextImage <- if rerenderNecessary
then do
tileImage <- runWith worldAfterStepping draw
setAsRenderTarget renderer (Just targetTexture)
renderTileMap renderer spriteSheetTexture previousImage tileImage
return tileImage
else return previousImage
setAsRenderTarget renderer Nothing
SDL.rendererDrawColor renderer $= V4 maxBound maxBound maxBound maxBound
SDL.clear renderer
let renderPosition = P (V2 0 0)
renderTexture renderer targetTexture renderPosition Nothing
SDL.present renderer
33333
unless quit $ loop worldAfterStepping nextImage
loop initialWorld emptyMap
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
emptyMap :: TileImage
emptyMap = TileImage $ Array.listArray arrayBounds $ cycle [tileEmpty] |
feefd731c04af6498f0acaf6426cf018370101ac46a80fe6775c47cc601db69f | gigasquid/libpython-clj-examples | supervised_learning.clj | (ns gigasquid.sk-learn.supervised-learning
(:require [libpython-clj.require :refer [require-python]]
[libpython-clj.python :as py :refer [py. py.. py.-]]
[gigasquid.plot :as plot]))
;;; From -learn.org/stable/tutorial/statistical_inference/supervised_learning.html
;; Clasifying irises
(require-python '[numpy :as np])
(require-python '[numpy.random :as np-random])
(require-python '[sklearn.datasets :as datasets])
(require-python '[matplotlib.pyplot :as pyplot])
(def iris (datasets/load_iris :return_X_y true))
(def iris-x (first iris))
(def iris-y (last iris))
= > ( [ 5.1 3.5 1.4 0.2 ] [ 4.9 3 . 1.4 0.2 ] )
(take 2 iris-y) ;=> (0 0)
= > [ 0 1 2 ]
;;; K-Nearest neighbors classifier
;;; The simplest possible classifier is the nearest neighbor: given a new observation X_test, find in the training set (i.e. the data used to train the estimator) the observation with the closest feature vector.
;;Split iris data in train and test data
;; A random permutation, to split the data randomly
;;; here instead of following the python example we are going to use
;; shuffle and take instead
= > ( 150 , 4 )
= > ( 150 , )
(def shuffled-data (->> (map (fn [x y] {:x x :y y}) iris-x iris-y)
(shuffle)))
(def train-data (take 140 shuffled-data))
(def test-data (drop 140 shuffled-data))
- > 140
- > 10
(def iris-x-train (mapv :x train-data))
(def iris-y-train (mapv :y train-data))
(def iris-x-test (mapv :x test-data))
(def iris-y-test (mapv :y test-data))
(require-python '[sklearn.neighbors :as neighbors])
(def knn (neighbors/KNeighborsClassifier))
(py. knn fit iris-x-train iris-y-train)
;;; predict
(py. knn predict iris-x-test) ;=> [0 0 1 2 2 0 2 2 0 2]
;;; actual test
iris-y-test ;=> [0 0 1 2 2 0 2 1 0 2]
Linear model - From regression to sparsity
;; Diabetes dataset
The diabetes dataset consists of 10 physiological variables ( age , sex , weight , blood pressure ) measure on 442 patients , and an indication of disease progression after one year :
(require-python '[sklearn.linear_model :as linear-model])
(def diabetes (datasets/load_diabetes :return_X_y true))
(def diabetes-x (first diabetes))
(def diabetes-y (last diabetes))
= > ( 442 , 10 )
= > 422
(def diabetes-x-train (->> diabetes-x (take 422) (into []) (np/array)))
(def diabetes-y-train (->> diabetes-y (take 422) (into []) (np/array)))
(def diabetes-x-test (->> diabetes-x (drop 422) (into []) (np/array)))
(def diabetes-y-test (->> diabetes-y (drop 422) (into []) (np/array)))
LinearRegression , in its simplest form , fits a linear model to the data set by adjusting a set of parameters in order to make the sum of the squared residuals of the model as small as possible .
(py/python-type diabetes-x-train);=> :ndarray
= > ( 442 , 10 )
= > ( 422 , 10 )
(def regr (linear-model/LinearRegression))
(py. regr fit diabetes-x-train diabetes-y-train)
(py.- regr coef_)
[ -2.37639315e+02 5.10530605e+02 3.27736980e+02
-8.14131709e+02 4.92814588e+02 1.02848452e+02 1.84606489e+02
7.43519617e+02 7.60951722e+01 ]
;;; The mean square error
(np/mean
(np/square
= > 13.41173112391975
= > 0.5175336599402476
;;; shrinkage
;;If there are few data points per dimension, noise in the observations induces high variance:
(def X [[0.5] [1]])
(def Y [0.5 1])
(def test [[0] [2]])
(def regr (linear-model/LinearRegression))
(np-random/seed 0)
(plot/with-show
(dotimes [i 6]
(let [this-x (np/multiply 0.1
(np/add
(np-random/normal :size [2 1]) X))
_ (py. regr fit this-x Y)
prediction (py. regr predict test)]
(pyplot/plot test prediction)
(pyplot/scatter this-x Y :s 3))))
A solution in high - dimensional statistical learning is to shrink the regression coefficients to zero : any two randomly chosen set of observations are likely to be uncorrelated . This is called Ridge regression :
(def regr (linear-model/Ridge :alpha 1))
(plot/with-show
(dotimes [i 6]
(let [this-x (np/multiply 0.1
(np/add
(np-random/normal :size [2 1]) X))
_ (py. regr fit this-x Y)
prediction (py. regr predict test)]
(pyplot/plot test prediction)
(pyplot/scatter this-x Y :s 3))))
;; This is an example of bias/variance tradeoff: the larger the ridge alpha parameter, the higher the bias and the lower the variance.
;; We can choose alpha to minimize left out error, this time using the diabetes dataset rather than our synthetic data:
(def alphas (np/logspace -4 -1 6))
(mapv #(-> regr
(py. set_params :alpha %)
(py. fit diabetes-x-train diabetes-y-train)
(py. score diabetes-x-test diabetes-y-test))
alphas)
-=>[0.5851110683883531 0.5852073015444674 0.585467754069849 0.5855512036503915 0.5830717085554161 0.570589994372801 ]
;;; Sparsity
(def regr (linear-model/Lasso))
(def scores (map #(-> regr
(py. set_params :alpha %)
(py. fit diabetes-x-train diabetes-y-train)
(py. score diabetes-x-test diabetes-y-test))
alphas))
(def best-alpha (->> (map (fn [a s] {:alpha a :score s}) alphas scores)
(sort-by :score)
last))
(-> regr
(py. set_params :alpha best-alpha)
(py. fit diabetes-x-train diabetes-y-train)
(py.- coef_))
[ 0 . -212.43764548 517.19478111 313.77959962 -160.8303982
-0 . -187.19554705 69.38229038 508.66011217 71.84239008 ]
Classification
;; For classification, as in the labeling iris task, linear regression is not the right approach as it will give too much weight to data far from the decision frontier. A linear approach is to fit a sigmoid function or logistic function:
(def log (linear-model/LogisticRegression :C 1e5))
The C parameter controls the amount of regularization in the LogisticRegression object : a large value for C results in less regularization . penalty="l2 " gives ( i.e. non - sparse coefficients ) , while penalty="l1 " gives Sparsity .
(py. log fit iris-x-train iris-y-train)
= > 1.0
Support Vector Machines
(require-python '[sklearn.svm :as svm])
(def svc (svm/SVC :kernel "linear"))
(py. svc fit iris-x-train iris-y-train)
C=1.0 , break_ties = False , cache_size=200 , = None , coef0=0.0 ,
decision_function_shape='ovr ' , degree=3 , gamma='scale ' , kernel='linear ' ,
;; max_iter=-1, probability=False, random_state=None, shrinking=True,
;; tol=0.001, verbose=False)
| null | https://raw.githubusercontent.com/gigasquid/libpython-clj-examples/f151c00415c82a144a13959ff7b56f58704ac6f2/src/gigasquid/sk_learn/supervised_learning.clj | clojure | From -learn.org/stable/tutorial/statistical_inference/supervised_learning.html
Clasifying irises
=> (0 0)
K-Nearest neighbors classifier
The simplest possible classifier is the nearest neighbor: given a new observation X_test, find in the training set (i.e. the data used to train the estimator) the observation with the closest feature vector.
Split iris data in train and test data
A random permutation, to split the data randomly
here instead of following the python example we are going to use
shuffle and take instead
predict
=> [0 0 1 2 2 0 2 2 0 2]
actual test
=> [0 0 1 2 2 0 2 1 0 2]
Diabetes dataset
=> :ndarray
The mean square error
shrinkage
If there are few data points per dimension, noise in the observations induces high variance:
This is an example of bias/variance tradeoff: the larger the ridge alpha parameter, the higher the bias and the lower the variance.
We can choose alpha to minimize left out error, this time using the diabetes dataset rather than our synthetic data:
Sparsity
For classification, as in the labeling iris task, linear regression is not the right approach as it will give too much weight to data far from the decision frontier. A linear approach is to fit a sigmoid function or logistic function:
max_iter=-1, probability=False, random_state=None, shrinking=True,
tol=0.001, verbose=False) | (ns gigasquid.sk-learn.supervised-learning
(:require [libpython-clj.require :refer [require-python]]
[libpython-clj.python :as py :refer [py. py.. py.-]]
[gigasquid.plot :as plot]))
(require-python '[numpy :as np])
(require-python '[numpy.random :as np-random])
(require-python '[sklearn.datasets :as datasets])
(require-python '[matplotlib.pyplot :as pyplot])
(def iris (datasets/load_iris :return_X_y true))
(def iris-x (first iris))
(def iris-y (last iris))
= > ( [ 5.1 3.5 1.4 0.2 ] [ 4.9 3 . 1.4 0.2 ] )
= > [ 0 1 2 ]
= > ( 150 , 4 )
= > ( 150 , )
(def shuffled-data (->> (map (fn [x y] {:x x :y y}) iris-x iris-y)
(shuffle)))
(def train-data (take 140 shuffled-data))
(def test-data (drop 140 shuffled-data))
- > 140
- > 10
(def iris-x-train (mapv :x train-data))
(def iris-y-train (mapv :y train-data))
(def iris-x-test (mapv :x test-data))
(def iris-y-test (mapv :y test-data))
(require-python '[sklearn.neighbors :as neighbors])
(def knn (neighbors/KNeighborsClassifier))
(py. knn fit iris-x-train iris-y-train)
Linear model - From regression to sparsity
The diabetes dataset consists of 10 physiological variables ( age , sex , weight , blood pressure ) measure on 442 patients , and an indication of disease progression after one year :
(require-python '[sklearn.linear_model :as linear-model])
(def diabetes (datasets/load_diabetes :return_X_y true))
(def diabetes-x (first diabetes))
(def diabetes-y (last diabetes))
= > ( 442 , 10 )
= > 422
(def diabetes-x-train (->> diabetes-x (take 422) (into []) (np/array)))
(def diabetes-y-train (->> diabetes-y (take 422) (into []) (np/array)))
(def diabetes-x-test (->> diabetes-x (drop 422) (into []) (np/array)))
(def diabetes-y-test (->> diabetes-y (drop 422) (into []) (np/array)))
LinearRegression , in its simplest form , fits a linear model to the data set by adjusting a set of parameters in order to make the sum of the squared residuals of the model as small as possible .
= > ( 442 , 10 )
= > ( 422 , 10 )
(def regr (linear-model/LinearRegression))
(py. regr fit diabetes-x-train diabetes-y-train)
(py.- regr coef_)
[ -2.37639315e+02 5.10530605e+02 3.27736980e+02
-8.14131709e+02 4.92814588e+02 1.02848452e+02 1.84606489e+02
7.43519617e+02 7.60951722e+01 ]
(np/mean
(np/square
= > 13.41173112391975
= > 0.5175336599402476
(def X [[0.5] [1]])
(def Y [0.5 1])
(def test [[0] [2]])
(def regr (linear-model/LinearRegression))
(np-random/seed 0)
(plot/with-show
(dotimes [i 6]
(let [this-x (np/multiply 0.1
(np/add
(np-random/normal :size [2 1]) X))
_ (py. regr fit this-x Y)
prediction (py. regr predict test)]
(pyplot/plot test prediction)
(pyplot/scatter this-x Y :s 3))))
A solution in high - dimensional statistical learning is to shrink the regression coefficients to zero : any two randomly chosen set of observations are likely to be uncorrelated . This is called Ridge regression :
(def regr (linear-model/Ridge :alpha 1))
(plot/with-show
(dotimes [i 6]
(let [this-x (np/multiply 0.1
(np/add
(np-random/normal :size [2 1]) X))
_ (py. regr fit this-x Y)
prediction (py. regr predict test)]
(pyplot/plot test prediction)
(pyplot/scatter this-x Y :s 3))))
(def alphas (np/logspace -4 -1 6))
(mapv #(-> regr
(py. set_params :alpha %)
(py. fit diabetes-x-train diabetes-y-train)
(py. score diabetes-x-test diabetes-y-test))
alphas)
-=>[0.5851110683883531 0.5852073015444674 0.585467754069849 0.5855512036503915 0.5830717085554161 0.570589994372801 ]
(def regr (linear-model/Lasso))
(def scores (map #(-> regr
(py. set_params :alpha %)
(py. fit diabetes-x-train diabetes-y-train)
(py. score diabetes-x-test diabetes-y-test))
alphas))
(def best-alpha (->> (map (fn [a s] {:alpha a :score s}) alphas scores)
(sort-by :score)
last))
(-> regr
(py. set_params :alpha best-alpha)
(py. fit diabetes-x-train diabetes-y-train)
(py.- coef_))
[ 0 . -212.43764548 517.19478111 313.77959962 -160.8303982
-0 . -187.19554705 69.38229038 508.66011217 71.84239008 ]
Classification
(def log (linear-model/LogisticRegression :C 1e5))
The C parameter controls the amount of regularization in the LogisticRegression object : a large value for C results in less regularization . penalty="l2 " gives ( i.e. non - sparse coefficients ) , while penalty="l1 " gives Sparsity .
(py. log fit iris-x-train iris-y-train)
= > 1.0
Support Vector Machines
(require-python '[sklearn.svm :as svm])
(def svc (svm/SVC :kernel "linear"))
(py. svc fit iris-x-train iris-y-train)
C=1.0 , break_ties = False , cache_size=200 , = None , coef0=0.0 ,
decision_function_shape='ovr ' , degree=3 , gamma='scale ' , kernel='linear ' ,
|
91cbf82070bf5f6478003e52f439f449fa0c1e1148bea7aa772d9881e321bdd0 | swtwsk/vinci-lang | PrintVinci.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
#if __GLASGOW_HASKELL__ <= 708
# LANGUAGE OverlappingInstances #
#endif
# OPTIONS_GHC -fno - warn - incomplete - patterns #
| Pretty - printer for Parser .
Generated by the BNF converter .
module Parser.PrintVinci where
import Prelude
( ($), (.)
, Bool(..), (==), (<)
, Int, Integer, Double, (+), (-), (*)
, String, (++)
, ShowS, showChar, showString
, all, dropWhile, elem, foldr, id, map, null, replicate, shows, span
)
import Data.Char ( Char, isSpace )
import qualified Parser.AbsVinci
-- | The top-level printing method.
printTree :: Print a => a -> String
printTree = render . prt 0
type Doc = [ShowS] -> [ShowS]
doc :: ShowS -> Doc
doc = (:)
render :: Doc -> String
render d = rend 0 (map ($ "") $ d []) "" where
rend i = \case
"[" :ts -> showChar '[' . rend i ts
"(" :ts -> showChar '(' . rend i ts
"{" :ts -> showChar '{' . new (i+1) . rend (i+1) ts
"}" : ";":ts -> new (i-1) . space "}" . showChar ';' . new (i-1) . rend (i-1) ts
"}" :ts -> new (i-1) . showChar '}' . new (i-1) . rend (i-1) ts
[";"] -> showChar ';'
";" :ts -> showChar ';' . new i . rend i ts
t : ts@(p:_) | closingOrPunctuation p -> showString t . rend i ts
t :ts -> space t . rend i ts
_ -> id
new i = showChar '\n' . replicateS (2*i) (showChar ' ') . dropWhile isSpace
space t s =
case (all isSpace t', null spc, null rest) of
(True , _ , True ) -> [] -- remove trailing space
(False, _ , True ) -> t' -- remove trailing space
(False, True, False) -> t' ++ ' ' : s -- add space if none
_ -> t' ++ s
where
t' = showString t []
(spc, rest) = span isSpace s
closingOrPunctuation :: String -> Bool
closingOrPunctuation [c] = c `elem` closerOrPunct
closingOrPunctuation _ = False
closerOrPunct :: String
closerOrPunct = ")],;"
parenth :: Doc -> Doc
parenth ss = doc (showChar '(') . ss . doc (showChar ')')
concatS :: [ShowS] -> ShowS
concatS = foldr (.) id
concatD :: [Doc] -> Doc
concatD = foldr (.) id
replicateS :: Int -> ShowS -> ShowS
replicateS n f = concatS (replicate n f)
-- | The printer class does the job.
class Print a where
prt :: Int -> a -> Doc
prtList :: Int -> [a] -> Doc
prtList i = concatD . map (prt i)
instance {-# OVERLAPPABLE #-} Print a => Print [a] where
prt = prtList
instance Print Char where
prt _ s = doc (showChar '\'' . mkEsc '\'' s . showChar '\'')
prtList _ s = doc (showChar '"' . concatS (map (mkEsc '"') s) . showChar '"')
mkEsc :: Char -> Char -> ShowS
mkEsc q = \case
s | s == q -> showChar '\\' . showChar s
'\\' -> showString "\\\\"
'\n' -> showString "\\n"
'\t' -> showString "\\t"
s -> showChar s
prPrec :: Int -> Int -> Doc -> Doc
prPrec i j = if j < i then parenth else id
instance Print Integer where
prt _ x = doc (shows x)
instance Print Double where
prt _ x = doc (shows x)
instance Print Parser.AbsVinci.VIdent where
prt _ (Parser.AbsVinci.VIdent i) = doc $ showString i
instance Print Parser.AbsVinci.SIdent where
prt _ (Parser.AbsVinci.SIdent i) = doc $ showString i
instance Print Parser.AbsVinci.TPolyIdent where
prt _ (Parser.AbsVinci.TPolyIdent i) = doc $ showString i
prtList _ [] = concatD []
prtList _ (x:xs) = concatD [prt 0 x, prt 0 xs]
instance Print Parser.AbsVinci.AttrString where
prt _ (Parser.AbsVinci.AttrString i) = doc $ showString i
instance Print (Parser.AbsVinci.Program a) where
prt i = \case
Parser.AbsVinci.Prog _ phrases -> prPrec i 0 (concatD [prt 0 phrases])
instance Print (Parser.AbsVinci.Phrase a) where
prt i = \case
Parser.AbsVinci.Value _ letdef -> prPrec i 0 (concatD [prt 0 letdef])
Parser.AbsVinci.StructDecl _ structdef -> prPrec i 0 (concatD [prt 0 structdef])
Parser.AbsVinci.TypeSynon _ sident type_ -> prPrec i 0 (concatD [doc (showString "type"), prt 0 sident, doc (showString "="), prt 0 type_])
Parser.AbsVinci.Expression _ expr -> prPrec i 0 (concatD [prt 0 expr])
prtList _ [] = concatD []
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ";;"), prt 0 xs]
instance Print [Parser.AbsVinci.Phrase a] where
prt = prtList
instance Print (Parser.AbsVinci.LetDef a) where
prt i = \case
Parser.AbsVinci.Let _ letbinds -> prPrec i 0 (concatD [doc (showString "let"), prt 0 letbinds])
instance Print (Parser.AbsVinci.LetBind a) where
prt i = \case
Parser.AbsVinci.ConstBind _ letlvi expr -> prPrec i 0 (concatD [prt 0 letlvi, doc (showString "="), prt 0 expr])
Parser.AbsVinci.ProcBind _ procname letlvis rtype expr -> prPrec i 0 (concatD [prt 0 procname, prt 0 letlvis, prt 0 rtype, doc (showString "="), prt 0 expr])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString "and"), prt 0 xs]
instance Print (Parser.AbsVinci.LetLVI a) where
prt i = \case
Parser.AbsVinci.LetLVI _ lambdavi -> prPrec i 0 (concatD [prt 0 lambdavi])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, prt 0 xs]
instance Print [Parser.AbsVinci.LetLVI a] where
prt = prtList
instance Print [Parser.AbsVinci.LetBind a] where
prt = prtList
instance Print (Parser.AbsVinci.Expr a) where
prt i = \case
Parser.AbsVinci.EId _ vident -> prPrec i 8 (concatD [prt 0 vident])
Parser.AbsVinci.EInt _ n -> prPrec i 8 (concatD [prt 0 n])
Parser.AbsVinci.EFloat _ d -> prPrec i 8 (concatD [prt 0 d])
Parser.AbsVinci.ETrue _ -> prPrec i 8 (concatD [doc (showString "True")])
Parser.AbsVinci.EFalse _ -> prPrec i 8 (concatD [doc (showString "False")])
Parser.AbsVinci.EFieldGet _ expr vident -> prPrec i 8 (concatD [prt 8 expr, doc (showString "."), prt 0 vident])
Parser.AbsVinci.ETuple _ expr exprs -> prPrec i 8 (concatD [doc (showString "("), prt 0 expr, doc (showString ","), prt 0 exprs, doc (showString ")")])
Parser.AbsVinci.EApp _ expr1 expr2 -> prPrec i 7 (concatD [prt 7 expr1, prt 8 expr2])
Parser.AbsVinci.ETyped _ expr type_ -> prPrec i 6 (concatD [doc (showString "("), prt 0 expr, doc (showString ":"), prt 0 type_, doc (showString ")")])
Parser.AbsVinci.ENeg _ expr -> prPrec i 5 (concatD [doc (showString "-"), prt 6 expr])
Parser.AbsVinci.ENot _ expr -> prPrec i 5 (concatD [doc (showString "not"), prt 6 expr])
Parser.AbsVinci.EVecMatMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString ".*"), prt 5 expr2])
Parser.AbsVinci.EMatVecMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "*."), prt 5 expr2])
Parser.AbsVinci.EMatMatMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "@"), prt 5 expr2])
Parser.AbsVinci.EMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "*"), prt 5 expr2])
Parser.AbsVinci.EDiv _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "/"), prt 5 expr2])
Parser.AbsVinci.EMod _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "%"), prt 5 expr2])
Parser.AbsVinci.EAdd _ expr1 expr2 -> prPrec i 3 (concatD [prt 3 expr1, doc (showString "+"), prt 4 expr2])
Parser.AbsVinci.ESub _ expr1 expr2 -> prPrec i 3 (concatD [prt 3 expr1, doc (showString "-"), prt 4 expr2])
Parser.AbsVinci.ELTH _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "<"), prt 3 expr2])
Parser.AbsVinci.ELE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "<="), prt 3 expr2])
Parser.AbsVinci.EGTH _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString ">"), prt 3 expr2])
Parser.AbsVinci.EGE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString ">="), prt 3 expr2])
Parser.AbsVinci.EEQU _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "=="), prt 3 expr2])
Parser.AbsVinci.ENE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "!="), prt 3 expr2])
Parser.AbsVinci.EAnd _ expr1 expr2 -> prPrec i 1 (concatD [prt 2 expr1, doc (showString "&&"), prt 1 expr2])
Parser.AbsVinci.EOr _ expr1 expr2 -> prPrec i 0 (concatD [prt 1 expr1, doc (showString "||"), prt 0 expr2])
Parser.AbsVinci.ECond _ expr1 expr2 expr3 -> prPrec i 0 (concatD [doc (showString "if"), prt 0 expr1, doc (showString "then"), prt 0 expr2, doc (showString "else"), prt 0 expr3])
Parser.AbsVinci.ELetIn _ letdef expr -> prPrec i 0 (concatD [prt 0 letdef, doc (showString "in"), prt 0 expr])
Parser.AbsVinci.ELambda _ lambdavis expr -> prPrec i 0 (concatD [doc (showString "\\"), prt 0 lambdavis, doc (showString "->"), prt 0 expr])
Parser.AbsVinci.ECons _ sident fielddefs -> prPrec i 0 (concatD [prt 0 sident, doc (showString "{"), prt 0 fielddefs, doc (showString "}")])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print (Parser.AbsVinci.LambdaVI a) where
prt i = \case
Parser.AbsVinci.TypedVId _ lambdavi type_ -> prPrec i 0 (concatD [doc (showString "("), prt 0 lambdavi, doc (showString ":"), prt 0 type_, doc (showString ")")])
Parser.AbsVinci.LambdaVId _ vident -> prPrec i 0 (concatD [prt 0 vident])
Parser.AbsVinci.WildVId _ -> prPrec i 0 (concatD [doc (showString "_")])
Parser.AbsVinci.TupleVId _ lambdavis -> prPrec i 0 (concatD [doc (showString "("), prt 0 lambdavis, doc (showString ")")])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print (Parser.AbsVinci.FieldDef a) where
prt i = \case
Parser.AbsVinci.FieldDef _ vident expr -> prPrec i 0 (concatD [prt 0 vident, doc (showString "="), prt 0 expr])
prtList _ [] = concatD []
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print [Parser.AbsVinci.Expr a] where
prt = prtList
instance Print [Parser.AbsVinci.LambdaVI a] where
prt = prtList
instance Print [Parser.AbsVinci.FieldDef a] where
prt = prtList
instance Print (Parser.AbsVinci.ProcName a) where
prt i = \case
Parser.AbsVinci.ProcNameId _ vident -> prPrec i 0 (concatD [prt 0 vident])
instance Print (Parser.AbsVinci.StructDef a) where
prt i = \case
Parser.AbsVinci.SDef _ sident tpolyidents fielddecls -> prPrec i 0 (concatD [doc (showString "struct"), prt 0 sident, prt 0 tpolyidents, doc (showString "{"), prt 0 fielddecls, doc (showString "}")])
instance Print (Parser.AbsVinci.FieldDecl a) where
prt i = \case
Parser.AbsVinci.FieldDecl _ attr vident type_ -> prPrec i 0 (concatD [prt 0 attr, prt 0 vident, doc (showString ":"), prt 0 type_])
prtList _ [] = concatD []
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print [Parser.AbsVinci.FieldDecl a] where
prt = prtList
instance Print [Parser.AbsVinci.TPolyIdent] where
prt = prtList
instance Print (Parser.AbsVinci.Type a) where
prt i = \case
Parser.AbsVinci.TInt _ -> prPrec i 1 (concatD [doc (showString "Int")])
Parser.AbsVinci.TFloat _ -> prPrec i 1 (concatD [doc (showString "Float")])
Parser.AbsVinci.TBool _ -> prPrec i 1 (concatD [doc (showString "Bool")])
Parser.AbsVinci.TStruct _ sident -> prPrec i 1 (concatD [prt 0 sident])
Parser.AbsVinci.TPoly _ tpolyident -> prPrec i 1 (concatD [prt 0 tpolyident])
Parser.AbsVinci.TFun _ type_1 type_2 -> prPrec i 0 (concatD [prt 1 type_1, doc (showString "->"), prt 0 type_2])
instance Print (Parser.AbsVinci.RType a) where
prt i = \case
Parser.AbsVinci.NoRetType _ -> prPrec i 0 (concatD [])
Parser.AbsVinci.RetType _ type_ -> prPrec i 0 (concatD [doc (showString "->"), prt 0 type_])
instance Print (Parser.AbsVinci.Attr a) where
prt i = \case
Parser.AbsVinci.NoAttr _ -> prPrec i 0 (concatD [])
Parser.AbsVinci.Attr _ attrstring -> prPrec i 0 (concatD [doc (showString "@"), prt 0 attrstring])
| null | https://raw.githubusercontent.com/swtwsk/vinci-lang/9c7e01953e0b1cf135af7188e0c71fe6195bdfa1/src/Parser/PrintVinci.hs | haskell | | The top-level printing method.
remove trailing space
remove trailing space
add space if none
| The printer class does the job.
# OVERLAPPABLE # | # LANGUAGE CPP #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
#if __GLASGOW_HASKELL__ <= 708
# LANGUAGE OverlappingInstances #
#endif
# OPTIONS_GHC -fno - warn - incomplete - patterns #
| Pretty - printer for Parser .
Generated by the BNF converter .
module Parser.PrintVinci where
import Prelude
( ($), (.)
, Bool(..), (==), (<)
, Int, Integer, Double, (+), (-), (*)
, String, (++)
, ShowS, showChar, showString
, all, dropWhile, elem, foldr, id, map, null, replicate, shows, span
)
import Data.Char ( Char, isSpace )
import qualified Parser.AbsVinci
printTree :: Print a => a -> String
printTree = render . prt 0
type Doc = [ShowS] -> [ShowS]
doc :: ShowS -> Doc
doc = (:)
render :: Doc -> String
render d = rend 0 (map ($ "") $ d []) "" where
rend i = \case
"[" :ts -> showChar '[' . rend i ts
"(" :ts -> showChar '(' . rend i ts
"{" :ts -> showChar '{' . new (i+1) . rend (i+1) ts
"}" : ";":ts -> new (i-1) . space "}" . showChar ';' . new (i-1) . rend (i-1) ts
"}" :ts -> new (i-1) . showChar '}' . new (i-1) . rend (i-1) ts
[";"] -> showChar ';'
";" :ts -> showChar ';' . new i . rend i ts
t : ts@(p:_) | closingOrPunctuation p -> showString t . rend i ts
t :ts -> space t . rend i ts
_ -> id
new i = showChar '\n' . replicateS (2*i) (showChar ' ') . dropWhile isSpace
space t s =
case (all isSpace t', null spc, null rest) of
_ -> t' ++ s
where
t' = showString t []
(spc, rest) = span isSpace s
closingOrPunctuation :: String -> Bool
closingOrPunctuation [c] = c `elem` closerOrPunct
closingOrPunctuation _ = False
closerOrPunct :: String
closerOrPunct = ")],;"
parenth :: Doc -> Doc
parenth ss = doc (showChar '(') . ss . doc (showChar ')')
concatS :: [ShowS] -> ShowS
concatS = foldr (.) id
concatD :: [Doc] -> Doc
concatD = foldr (.) id
replicateS :: Int -> ShowS -> ShowS
replicateS n f = concatS (replicate n f)
class Print a where
prt :: Int -> a -> Doc
prtList :: Int -> [a] -> Doc
prtList i = concatD . map (prt i)
prt = prtList
instance Print Char where
prt _ s = doc (showChar '\'' . mkEsc '\'' s . showChar '\'')
prtList _ s = doc (showChar '"' . concatS (map (mkEsc '"') s) . showChar '"')
mkEsc :: Char -> Char -> ShowS
mkEsc q = \case
s | s == q -> showChar '\\' . showChar s
'\\' -> showString "\\\\"
'\n' -> showString "\\n"
'\t' -> showString "\\t"
s -> showChar s
prPrec :: Int -> Int -> Doc -> Doc
prPrec i j = if j < i then parenth else id
instance Print Integer where
prt _ x = doc (shows x)
instance Print Double where
prt _ x = doc (shows x)
instance Print Parser.AbsVinci.VIdent where
prt _ (Parser.AbsVinci.VIdent i) = doc $ showString i
instance Print Parser.AbsVinci.SIdent where
prt _ (Parser.AbsVinci.SIdent i) = doc $ showString i
instance Print Parser.AbsVinci.TPolyIdent where
prt _ (Parser.AbsVinci.TPolyIdent i) = doc $ showString i
prtList _ [] = concatD []
prtList _ (x:xs) = concatD [prt 0 x, prt 0 xs]
instance Print Parser.AbsVinci.AttrString where
prt _ (Parser.AbsVinci.AttrString i) = doc $ showString i
instance Print (Parser.AbsVinci.Program a) where
prt i = \case
Parser.AbsVinci.Prog _ phrases -> prPrec i 0 (concatD [prt 0 phrases])
instance Print (Parser.AbsVinci.Phrase a) where
prt i = \case
Parser.AbsVinci.Value _ letdef -> prPrec i 0 (concatD [prt 0 letdef])
Parser.AbsVinci.StructDecl _ structdef -> prPrec i 0 (concatD [prt 0 structdef])
Parser.AbsVinci.TypeSynon _ sident type_ -> prPrec i 0 (concatD [doc (showString "type"), prt 0 sident, doc (showString "="), prt 0 type_])
Parser.AbsVinci.Expression _ expr -> prPrec i 0 (concatD [prt 0 expr])
prtList _ [] = concatD []
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ";;"), prt 0 xs]
instance Print [Parser.AbsVinci.Phrase a] where
prt = prtList
instance Print (Parser.AbsVinci.LetDef a) where
prt i = \case
Parser.AbsVinci.Let _ letbinds -> prPrec i 0 (concatD [doc (showString "let"), prt 0 letbinds])
instance Print (Parser.AbsVinci.LetBind a) where
prt i = \case
Parser.AbsVinci.ConstBind _ letlvi expr -> prPrec i 0 (concatD [prt 0 letlvi, doc (showString "="), prt 0 expr])
Parser.AbsVinci.ProcBind _ procname letlvis rtype expr -> prPrec i 0 (concatD [prt 0 procname, prt 0 letlvis, prt 0 rtype, doc (showString "="), prt 0 expr])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString "and"), prt 0 xs]
instance Print (Parser.AbsVinci.LetLVI a) where
prt i = \case
Parser.AbsVinci.LetLVI _ lambdavi -> prPrec i 0 (concatD [prt 0 lambdavi])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, prt 0 xs]
instance Print [Parser.AbsVinci.LetLVI a] where
prt = prtList
instance Print [Parser.AbsVinci.LetBind a] where
prt = prtList
instance Print (Parser.AbsVinci.Expr a) where
prt i = \case
Parser.AbsVinci.EId _ vident -> prPrec i 8 (concatD [prt 0 vident])
Parser.AbsVinci.EInt _ n -> prPrec i 8 (concatD [prt 0 n])
Parser.AbsVinci.EFloat _ d -> prPrec i 8 (concatD [prt 0 d])
Parser.AbsVinci.ETrue _ -> prPrec i 8 (concatD [doc (showString "True")])
Parser.AbsVinci.EFalse _ -> prPrec i 8 (concatD [doc (showString "False")])
Parser.AbsVinci.EFieldGet _ expr vident -> prPrec i 8 (concatD [prt 8 expr, doc (showString "."), prt 0 vident])
Parser.AbsVinci.ETuple _ expr exprs -> prPrec i 8 (concatD [doc (showString "("), prt 0 expr, doc (showString ","), prt 0 exprs, doc (showString ")")])
Parser.AbsVinci.EApp _ expr1 expr2 -> prPrec i 7 (concatD [prt 7 expr1, prt 8 expr2])
Parser.AbsVinci.ETyped _ expr type_ -> prPrec i 6 (concatD [doc (showString "("), prt 0 expr, doc (showString ":"), prt 0 type_, doc (showString ")")])
Parser.AbsVinci.ENeg _ expr -> prPrec i 5 (concatD [doc (showString "-"), prt 6 expr])
Parser.AbsVinci.ENot _ expr -> prPrec i 5 (concatD [doc (showString "not"), prt 6 expr])
Parser.AbsVinci.EVecMatMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString ".*"), prt 5 expr2])
Parser.AbsVinci.EMatVecMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "*."), prt 5 expr2])
Parser.AbsVinci.EMatMatMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "@"), prt 5 expr2])
Parser.AbsVinci.EMul _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "*"), prt 5 expr2])
Parser.AbsVinci.EDiv _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "/"), prt 5 expr2])
Parser.AbsVinci.EMod _ expr1 expr2 -> prPrec i 4 (concatD [prt 4 expr1, doc (showString "%"), prt 5 expr2])
Parser.AbsVinci.EAdd _ expr1 expr2 -> prPrec i 3 (concatD [prt 3 expr1, doc (showString "+"), prt 4 expr2])
Parser.AbsVinci.ESub _ expr1 expr2 -> prPrec i 3 (concatD [prt 3 expr1, doc (showString "-"), prt 4 expr2])
Parser.AbsVinci.ELTH _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "<"), prt 3 expr2])
Parser.AbsVinci.ELE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "<="), prt 3 expr2])
Parser.AbsVinci.EGTH _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString ">"), prt 3 expr2])
Parser.AbsVinci.EGE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString ">="), prt 3 expr2])
Parser.AbsVinci.EEQU _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "=="), prt 3 expr2])
Parser.AbsVinci.ENE _ expr1 expr2 -> prPrec i 2 (concatD [prt 2 expr1, doc (showString "!="), prt 3 expr2])
Parser.AbsVinci.EAnd _ expr1 expr2 -> prPrec i 1 (concatD [prt 2 expr1, doc (showString "&&"), prt 1 expr2])
Parser.AbsVinci.EOr _ expr1 expr2 -> prPrec i 0 (concatD [prt 1 expr1, doc (showString "||"), prt 0 expr2])
Parser.AbsVinci.ECond _ expr1 expr2 expr3 -> prPrec i 0 (concatD [doc (showString "if"), prt 0 expr1, doc (showString "then"), prt 0 expr2, doc (showString "else"), prt 0 expr3])
Parser.AbsVinci.ELetIn _ letdef expr -> prPrec i 0 (concatD [prt 0 letdef, doc (showString "in"), prt 0 expr])
Parser.AbsVinci.ELambda _ lambdavis expr -> prPrec i 0 (concatD [doc (showString "\\"), prt 0 lambdavis, doc (showString "->"), prt 0 expr])
Parser.AbsVinci.ECons _ sident fielddefs -> prPrec i 0 (concatD [prt 0 sident, doc (showString "{"), prt 0 fielddefs, doc (showString "}")])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print (Parser.AbsVinci.LambdaVI a) where
prt i = \case
Parser.AbsVinci.TypedVId _ lambdavi type_ -> prPrec i 0 (concatD [doc (showString "("), prt 0 lambdavi, doc (showString ":"), prt 0 type_, doc (showString ")")])
Parser.AbsVinci.LambdaVId _ vident -> prPrec i 0 (concatD [prt 0 vident])
Parser.AbsVinci.WildVId _ -> prPrec i 0 (concatD [doc (showString "_")])
Parser.AbsVinci.TupleVId _ lambdavis -> prPrec i 0 (concatD [doc (showString "("), prt 0 lambdavis, doc (showString ")")])
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print (Parser.AbsVinci.FieldDef a) where
prt i = \case
Parser.AbsVinci.FieldDef _ vident expr -> prPrec i 0 (concatD [prt 0 vident, doc (showString "="), prt 0 expr])
prtList _ [] = concatD []
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print [Parser.AbsVinci.Expr a] where
prt = prtList
instance Print [Parser.AbsVinci.LambdaVI a] where
prt = prtList
instance Print [Parser.AbsVinci.FieldDef a] where
prt = prtList
instance Print (Parser.AbsVinci.ProcName a) where
prt i = \case
Parser.AbsVinci.ProcNameId _ vident -> prPrec i 0 (concatD [prt 0 vident])
instance Print (Parser.AbsVinci.StructDef a) where
prt i = \case
Parser.AbsVinci.SDef _ sident tpolyidents fielddecls -> prPrec i 0 (concatD [doc (showString "struct"), prt 0 sident, prt 0 tpolyidents, doc (showString "{"), prt 0 fielddecls, doc (showString "}")])
instance Print (Parser.AbsVinci.FieldDecl a) where
prt i = \case
Parser.AbsVinci.FieldDecl _ attr vident type_ -> prPrec i 0 (concatD [prt 0 attr, prt 0 vident, doc (showString ":"), prt 0 type_])
prtList _ [] = concatD []
prtList _ [x] = concatD [prt 0 x]
prtList _ (x:xs) = concatD [prt 0 x, doc (showString ","), prt 0 xs]
instance Print [Parser.AbsVinci.FieldDecl a] where
prt = prtList
instance Print [Parser.AbsVinci.TPolyIdent] where
prt = prtList
instance Print (Parser.AbsVinci.Type a) where
prt i = \case
Parser.AbsVinci.TInt _ -> prPrec i 1 (concatD [doc (showString "Int")])
Parser.AbsVinci.TFloat _ -> prPrec i 1 (concatD [doc (showString "Float")])
Parser.AbsVinci.TBool _ -> prPrec i 1 (concatD [doc (showString "Bool")])
Parser.AbsVinci.TStruct _ sident -> prPrec i 1 (concatD [prt 0 sident])
Parser.AbsVinci.TPoly _ tpolyident -> prPrec i 1 (concatD [prt 0 tpolyident])
Parser.AbsVinci.TFun _ type_1 type_2 -> prPrec i 0 (concatD [prt 1 type_1, doc (showString "->"), prt 0 type_2])
instance Print (Parser.AbsVinci.RType a) where
prt i = \case
Parser.AbsVinci.NoRetType _ -> prPrec i 0 (concatD [])
Parser.AbsVinci.RetType _ type_ -> prPrec i 0 (concatD [doc (showString "->"), prt 0 type_])
instance Print (Parser.AbsVinci.Attr a) where
prt i = \case
Parser.AbsVinci.NoAttr _ -> prPrec i 0 (concatD [])
Parser.AbsVinci.Attr _ attrstring -> prPrec i 0 (concatD [doc (showString "@"), prt 0 attrstring])
|
70ca8d859e494b5a1016e41b4686f8a4e79a76649cddfee70874b2953ac46a63 | vimus/libmpd-haskell | ApplicativeSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module Network.MPD.ApplicativeSpec (main, spec) where
import TestUtil
import Data.List (intercalate)
import Network.MPD.Applicative
import Network.MPD.Commands.Types
import Control.Applicative
import qualified Data.Map as M
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Command as an Applicative" $ do
describe "currentSong" $ do
it "returns the currently played song" $ do
let response = songResponse ++ "list_OK\nOK\n"
testMPD [("currentsong", Right response)] (runCommand currentSong) `shouldBe` Right (Just songValue)
it "can be composed" $ do
let command = intercalate "\n" [
"command_list_ok_begin"
, "currentsong"
, "stats"
, "command_list_end"
]
response = songResponse ++ "list_OK\n" ++ statsResponse ++ "list_OK\nOK\n"
let action = runCommand $ (,) <$> currentSong <*> stats
testMPD [(command, Right response)] action `shouldBe` Right (Just songValue, statsValue)
songResponse :: String
songResponse = unlines [
"file: Trip-Hop/Morcheeba/Morcheeba - 2010 - Blood Like Lemonade/03 - Blood Like Lemonade.mp3"
, "Last-Modified: 2010-08-01T11:37:50Z"
, "Time: 291"
, "Artist: Morcheeba"
, "Title: Blood Like Lemonade"
, "Album: Blood Like Lemonade"
, "Track: 3"
, "Date: 2010"
, "Pos: 16"
, "Id: 80"
]
songValue :: Song
songValue = Song {
sgFilePath = "Trip-Hop/Morcheeba/Morcheeba - 2010 - Blood Like Lemonade/03 - Blood Like Lemonade.mp3"
, sgTags = M.fromList [
(Artist,[Value "Morcheeba"])
, (Album,[Value "Blood Like Lemonade"])
, (Title,[Value "Blood Like Lemonade"])
, (Track,[Value "3"])
, (Date,[Value "2010"])
]
, sgLastModified = Just (read "2010-08-01 11:37:50 UTC")
, sgLength = 291
, sgId = Just (Id 80)
, sgIndex = Just 16
}
statsResponse :: String
statsResponse = unlines [
"artists: 23"
, "albums: 42"
, "songs: 65"
, "uptime: 120"
, "playtime: 240"
, "db_playtime: 560"
, "db_update: 1024"
]
statsValue :: Stats
statsValue = Stats {
stsArtists = 23
, stsAlbums = 42
, stsSongs = 65
, stsUptime = 120
, stsPlaytime = 240
, stsDbPlaytime = 560
, stsDbUpdate = 1024
}
| null | https://raw.githubusercontent.com/vimus/libmpd-haskell/1ec02deba33ce2a16012d8f0954e648eb4b5c485/tests/Network/MPD/ApplicativeSpec.hs | haskell | # LANGUAGE OverloadedStrings # |
module Network.MPD.ApplicativeSpec (main, spec) where
import TestUtil
import Data.List (intercalate)
import Network.MPD.Applicative
import Network.MPD.Commands.Types
import Control.Applicative
import qualified Data.Map as M
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Command as an Applicative" $ do
describe "currentSong" $ do
it "returns the currently played song" $ do
let response = songResponse ++ "list_OK\nOK\n"
testMPD [("currentsong", Right response)] (runCommand currentSong) `shouldBe` Right (Just songValue)
it "can be composed" $ do
let command = intercalate "\n" [
"command_list_ok_begin"
, "currentsong"
, "stats"
, "command_list_end"
]
response = songResponse ++ "list_OK\n" ++ statsResponse ++ "list_OK\nOK\n"
let action = runCommand $ (,) <$> currentSong <*> stats
testMPD [(command, Right response)] action `shouldBe` Right (Just songValue, statsValue)
songResponse :: String
songResponse = unlines [
"file: Trip-Hop/Morcheeba/Morcheeba - 2010 - Blood Like Lemonade/03 - Blood Like Lemonade.mp3"
, "Last-Modified: 2010-08-01T11:37:50Z"
, "Time: 291"
, "Artist: Morcheeba"
, "Title: Blood Like Lemonade"
, "Album: Blood Like Lemonade"
, "Track: 3"
, "Date: 2010"
, "Pos: 16"
, "Id: 80"
]
songValue :: Song
songValue = Song {
sgFilePath = "Trip-Hop/Morcheeba/Morcheeba - 2010 - Blood Like Lemonade/03 - Blood Like Lemonade.mp3"
, sgTags = M.fromList [
(Artist,[Value "Morcheeba"])
, (Album,[Value "Blood Like Lemonade"])
, (Title,[Value "Blood Like Lemonade"])
, (Track,[Value "3"])
, (Date,[Value "2010"])
]
, sgLastModified = Just (read "2010-08-01 11:37:50 UTC")
, sgLength = 291
, sgId = Just (Id 80)
, sgIndex = Just 16
}
statsResponse :: String
statsResponse = unlines [
"artists: 23"
, "albums: 42"
, "songs: 65"
, "uptime: 120"
, "playtime: 240"
, "db_playtime: 560"
, "db_update: 1024"
]
statsValue :: Stats
statsValue = Stats {
stsArtists = 23
, stsAlbums = 42
, stsSongs = 65
, stsUptime = 120
, stsPlaytime = 240
, stsDbPlaytime = 560
, stsDbUpdate = 1024
}
|
8b230e7704932125a839fdb51eb6f3aa6f494f8d9cae97d0a97ca8ec44208741 | Kappa-Dev/KappaTools | map_test.ml | module IntS = Mods.IntSetMap
module LIntS = Map_wrapper.Make (IntS)
module CharS = Mods.CharSetMap
module LCharS = Map_wrapper.Make (CharS)
let p i j = i=j
let proj i =
if i mod 2 = 0 then 'a'
else 'b'
let monaproj _ b i = b,proj i
module P = SetMap.Proj(IntS)(CharS)
module LP = Map_wrapper.Proj(LIntS)(LCharS)
let map_test remanent parameters =
let error0 = remanent.Sanity_test_sig.error in
let f1 = IntS.Map.add 2 4 IntS.Map.empty in
let f2 = IntS.Map.add 3 5 f1 in
let f3 = IntS.Map.add 2 4 f2 in
let f4 = IntS.Map.add 6 8 f3 in
let f5 = IntS.Map.add 10 12 f4 in
let error1,f1' = LIntS.Map.add parameters error0 2 4 IntS.Map.empty in
let f2' = IntS.Map.add 3 5 f1' in
let error2,f3' = LIntS.Map.overwrite parameters error1 2 4 f2' in
let error3,f4' = LIntS.Map.add_or_overwrite parameters error2 6 8 f3' in
let error4,_ = LIntS.Map.add parameters error3 2 9 f4' in
let error5,f5' = LIntS.Map.overwrite parameters error3 10 12 f4' in
let f = List.fold_left
(fun map (a,b) -> IntS.Map.add a b map)
IntS.Map.empty
[1,[2;3];2,[3;4];5,[6;7];8,[12;13]]
in
let g = P.proj_map proj [] (List.append) f in
let g' = CharS.Map.map List.rev (P.proj_map proj [] (fun x y -> List.append (List.rev y) x) f) in
let error6,h = LP.monadic_proj_map
monaproj
parameters
error5 []
(fun _ a l l' -> a,List.append l l')
f
in
let error7,h' = LP.monadic_proj_map monaproj parameters error6 [] (fun _ a x y -> a,List.append (List.rev y) x) f in
let h' = LCharS.Map.map List.rev h' in
let error8,i = LP.proj_map
proj
parameters
error7 []
(fun l l' -> List.append l l')
f in
let error9,i' = LP.proj_map proj parameters error8 [] (fun x y -> List.append (List.rev y) x) f in
let i' = LCharS.Map.map List.rev i' in
["map1",(fun remanent -> remanent, IntS.Map.equal p f1 f1',None);
"map2",(fun remanent -> remanent, LIntS.Map.equal p f2 f2',None);
"map3",(fun remanent -> remanent, IntS.Map.equal p f3 f3',None);
"map4",(fun remanent -> remanent, IntS.Map.equal p f4 f4',None);
"map5",(fun remanent -> remanent, IntS.Map.equal p f5 f5',None);
"nowarn_add",(fun remanent -> remanent, error1==error0,None);
"nowarn_overwrite",(fun remanent -> remanent, error2==error1,None);
"nowarn_overwrite_or_add",(fun remanent -> remanent, error3== error2,None);
"warn_add",(fun remanent -> remanent, not (error4 == error3),None);
"warn_overwrite",(fun remanent -> remanent, not (error5 == error4),None);
"nowarn_proj1",(fun remanent -> remanent, error6==error5,None);
"nowarn_proj2",(fun remanent -> remanent, error7==error6,None);
"nowarn_proj3",(fun remanent -> remanent, error8==error7,None);
"nowarn_proj4",(fun remanent -> remanent, error8==error9,None);
"proj1",(fun remanent -> remanent, CharS.Map.equal p g g',None);
"proj2",(fun remanent -> remanent, CharS.Map.equal p g h,None);
"proj3",(fun remanent -> remanent, CharS.Map.equal p g h',None);
"proj4",(fun remanent -> remanent, CharS.Map.find_default [] 'a' g = [3;4;12;13],None);
"proj5",(fun remanent -> remanent, CharS.Map.equal p i i',None);
"proj6",(fun remanent -> remanent, CharS.Map.equal p i g,None);
]
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/eef2337e8688018eda47ccc838aea809cae68de7/core/KaSa_rep/sanity_test/map_test.ml | ocaml | module IntS = Mods.IntSetMap
module LIntS = Map_wrapper.Make (IntS)
module CharS = Mods.CharSetMap
module LCharS = Map_wrapper.Make (CharS)
let p i j = i=j
let proj i =
if i mod 2 = 0 then 'a'
else 'b'
let monaproj _ b i = b,proj i
module P = SetMap.Proj(IntS)(CharS)
module LP = Map_wrapper.Proj(LIntS)(LCharS)
let map_test remanent parameters =
let error0 = remanent.Sanity_test_sig.error in
let f1 = IntS.Map.add 2 4 IntS.Map.empty in
let f2 = IntS.Map.add 3 5 f1 in
let f3 = IntS.Map.add 2 4 f2 in
let f4 = IntS.Map.add 6 8 f3 in
let f5 = IntS.Map.add 10 12 f4 in
let error1,f1' = LIntS.Map.add parameters error0 2 4 IntS.Map.empty in
let f2' = IntS.Map.add 3 5 f1' in
let error2,f3' = LIntS.Map.overwrite parameters error1 2 4 f2' in
let error3,f4' = LIntS.Map.add_or_overwrite parameters error2 6 8 f3' in
let error4,_ = LIntS.Map.add parameters error3 2 9 f4' in
let error5,f5' = LIntS.Map.overwrite parameters error3 10 12 f4' in
let f = List.fold_left
(fun map (a,b) -> IntS.Map.add a b map)
IntS.Map.empty
[1,[2;3];2,[3;4];5,[6;7];8,[12;13]]
in
let g = P.proj_map proj [] (List.append) f in
let g' = CharS.Map.map List.rev (P.proj_map proj [] (fun x y -> List.append (List.rev y) x) f) in
let error6,h = LP.monadic_proj_map
monaproj
parameters
error5 []
(fun _ a l l' -> a,List.append l l')
f
in
let error7,h' = LP.monadic_proj_map monaproj parameters error6 [] (fun _ a x y -> a,List.append (List.rev y) x) f in
let h' = LCharS.Map.map List.rev h' in
let error8,i = LP.proj_map
proj
parameters
error7 []
(fun l l' -> List.append l l')
f in
let error9,i' = LP.proj_map proj parameters error8 [] (fun x y -> List.append (List.rev y) x) f in
let i' = LCharS.Map.map List.rev i' in
["map1",(fun remanent -> remanent, IntS.Map.equal p f1 f1',None);
"map2",(fun remanent -> remanent, LIntS.Map.equal p f2 f2',None);
"map3",(fun remanent -> remanent, IntS.Map.equal p f3 f3',None);
"map4",(fun remanent -> remanent, IntS.Map.equal p f4 f4',None);
"map5",(fun remanent -> remanent, IntS.Map.equal p f5 f5',None);
"nowarn_add",(fun remanent -> remanent, error1==error0,None);
"nowarn_overwrite",(fun remanent -> remanent, error2==error1,None);
"nowarn_overwrite_or_add",(fun remanent -> remanent, error3== error2,None);
"warn_add",(fun remanent -> remanent, not (error4 == error3),None);
"warn_overwrite",(fun remanent -> remanent, not (error5 == error4),None);
"nowarn_proj1",(fun remanent -> remanent, error6==error5,None);
"nowarn_proj2",(fun remanent -> remanent, error7==error6,None);
"nowarn_proj3",(fun remanent -> remanent, error8==error7,None);
"nowarn_proj4",(fun remanent -> remanent, error8==error9,None);
"proj1",(fun remanent -> remanent, CharS.Map.equal p g g',None);
"proj2",(fun remanent -> remanent, CharS.Map.equal p g h,None);
"proj3",(fun remanent -> remanent, CharS.Map.equal p g h',None);
"proj4",(fun remanent -> remanent, CharS.Map.find_default [] 'a' g = [3;4;12;13],None);
"proj5",(fun remanent -> remanent, CharS.Map.equal p i i',None);
"proj6",(fun remanent -> remanent, CharS.Map.equal p i g,None);
]
|
|
2112cd0a42e815eb5ab345e0c9d2fb0c28c99d37f7b8125d9002e900512b7a9c | ruricolist/serapeum | files.lisp | (in-package :serapeum.tests)
(def-suite files :in serapeum)
(in-suite files)
(test resolve-executable
(cond ((uiop:os-macosx-p)
TODO Are there any universal Mac executables ?
(skip "Mac."))
((uiop:os-windows-p)
(is-true (resolve-executable "clip"))
(is-true (resolve-executable "notepad"))
(is (equal (resolve-executable "clip")
(resolve-executable "clip.exe"))))
((uiop:os-unix-p)
(is-true (resolve-executable "sh"))
ECL needs ` pathname - equal ' here ; the two file names have
;; different versions.
(is (uiop:pathname-equal
(pathname
(chomp
(uiop:run-program '("sh" "-c" "command -v sh")
:output :string)))
(resolve-executable "sh"))))
(t (skip "Not Windows or Unix."))))
(test file-size-human-readable
(is (equal "0" (format-file-size-human-readable nil 0)))
(is (equal "0" (format-file-size-human-readable nil 0 :flavor :iec)))
(is (equal "0" (format-file-size-human-readable nil 0 :flavor :si)))
(is (equal "1k" (format-file-size-human-readable nil 1024)))
(is (equal "1KiB" (format-file-size-human-readable nil 1024 :flavor :iec)))
(is (equal "1000" (format-file-size-human-readable nil 1000)))
(is (equal "1 k" (format-file-size-human-readable nil 1000 :flavor :si)))
(is (equal "500 k" (format-file-size-human-readable nil 500000 :flavor :si))))
(test file=
(let* ((file1 (asdf:system-relative-pathname "serapeum" "README.md"))
(file2
(uiop:with-temporary-file (:pathname p
:stream out
:element-type 'character
:direction :output
:keep t)
(write-string (read-file-into-string file1 :external-format :utf-8)
out)
p))
(empty-file
(uiop:with-temporary-file (:pathname p
:keep t)
p))
(junk-file
(uiop:with-temporary-file (:pathname p
:stream out
:element-type 'character
:direction :output
:keep t)
(write-string
(shuffle
(read-file-into-string file1 :external-format :utf-8))
out)
p)))
(is (file= file1 file2))
(is (not (file= file1 empty-file)))
(is (not (file= file2 empty-file)))
(is (not (file= junk-file empty-file)))
(is (not (file= junk-file file1)))
(is (not (file= junk-file file2)))
(uiop:delete-file-if-exists file2)
(uiop:delete-file-if-exists empty-file)
(uiop:delete-file-if-exists junk-file)))
(test join
(is (uiop:pathname-equal
(base-path-join "foo")
#p"foo"))
(is (uiop:pathname-equal
(base-path-join #p"foo" "bar")
#p"foobar"))
(is (uiop:pathname-equal
(base-path-join #p"foo" "bar" #p"baz")
#p"foobarbaz"))
(is (uiop:pathname-equal (base-path-join #p"foo" "bar/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo" "/bar/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar" "baz")
#p"foo/barbaz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar.x" "bar.y")
#p"foo/bar.xbar.y"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar" "/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar/" "baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/" "bar/" "baz" "qux")
#p"foo/bar/bazqux"))
(is (uiop:pathname-equal (base-path-join #p"foo.txt" "bar/baz")
#p"foo.txt/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo.txt" "bar.ext")
#p"foo.txtbar.ext")))
(test basename
(is (null (path-basename "")))
(is (equal (path-basename "foo/bar") "bar"))
(is (null (path-basename #p"")))
(is (equal (path-basename #p"/foo/bar/baz") "baz"))
(is (equal (path-basename #p"/foo/bar/baz/") "baz"))
(is (equal (path-basename #p"/foo/bar/baz.ext") "baz.ext"))
(is (equal (path-basename #p"foo/bar/baz.ext") "baz.ext")))
| null | https://raw.githubusercontent.com/ruricolist/serapeum/00a41b2cdca81a2eeb0e22ea977f4471ca44e98e/tests/files.lisp | lisp | the two file names have
different versions. | (in-package :serapeum.tests)
(def-suite files :in serapeum)
(in-suite files)
(test resolve-executable
(cond ((uiop:os-macosx-p)
TODO Are there any universal Mac executables ?
(skip "Mac."))
((uiop:os-windows-p)
(is-true (resolve-executable "clip"))
(is-true (resolve-executable "notepad"))
(is (equal (resolve-executable "clip")
(resolve-executable "clip.exe"))))
((uiop:os-unix-p)
(is-true (resolve-executable "sh"))
(is (uiop:pathname-equal
(pathname
(chomp
(uiop:run-program '("sh" "-c" "command -v sh")
:output :string)))
(resolve-executable "sh"))))
(t (skip "Not Windows or Unix."))))
(test file-size-human-readable
(is (equal "0" (format-file-size-human-readable nil 0)))
(is (equal "0" (format-file-size-human-readable nil 0 :flavor :iec)))
(is (equal "0" (format-file-size-human-readable nil 0 :flavor :si)))
(is (equal "1k" (format-file-size-human-readable nil 1024)))
(is (equal "1KiB" (format-file-size-human-readable nil 1024 :flavor :iec)))
(is (equal "1000" (format-file-size-human-readable nil 1000)))
(is (equal "1 k" (format-file-size-human-readable nil 1000 :flavor :si)))
(is (equal "500 k" (format-file-size-human-readable nil 500000 :flavor :si))))
(test file=
(let* ((file1 (asdf:system-relative-pathname "serapeum" "README.md"))
(file2
(uiop:with-temporary-file (:pathname p
:stream out
:element-type 'character
:direction :output
:keep t)
(write-string (read-file-into-string file1 :external-format :utf-8)
out)
p))
(empty-file
(uiop:with-temporary-file (:pathname p
:keep t)
p))
(junk-file
(uiop:with-temporary-file (:pathname p
:stream out
:element-type 'character
:direction :output
:keep t)
(write-string
(shuffle
(read-file-into-string file1 :external-format :utf-8))
out)
p)))
(is (file= file1 file2))
(is (not (file= file1 empty-file)))
(is (not (file= file2 empty-file)))
(is (not (file= junk-file empty-file)))
(is (not (file= junk-file file1)))
(is (not (file= junk-file file2)))
(uiop:delete-file-if-exists file2)
(uiop:delete-file-if-exists empty-file)
(uiop:delete-file-if-exists junk-file)))
(test join
(is (uiop:pathname-equal
(base-path-join "foo")
#p"foo"))
(is (uiop:pathname-equal
(base-path-join #p"foo" "bar")
#p"foobar"))
(is (uiop:pathname-equal
(base-path-join #p"foo" "bar" #p"baz")
#p"foobarbaz"))
(is (uiop:pathname-equal (base-path-join #p"foo" "bar/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo" "/bar/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar" "baz")
#p"foo/barbaz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar.x" "bar.y")
#p"foo/bar.xbar.y"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar" "/baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/bar/" "baz")
#p"foo/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo/" "bar/" "baz" "qux")
#p"foo/bar/bazqux"))
(is (uiop:pathname-equal (base-path-join #p"foo.txt" "bar/baz")
#p"foo.txt/bar/baz"))
(is (uiop:pathname-equal (base-path-join #p"foo.txt" "bar.ext")
#p"foo.txtbar.ext")))
(test basename
(is (null (path-basename "")))
(is (equal (path-basename "foo/bar") "bar"))
(is (null (path-basename #p"")))
(is (equal (path-basename #p"/foo/bar/baz") "baz"))
(is (equal (path-basename #p"/foo/bar/baz/") "baz"))
(is (equal (path-basename #p"/foo/bar/baz.ext") "baz.ext"))
(is (equal (path-basename #p"foo/bar/baz.ext") "baz.ext")))
|
83c9bad5e6700423f54429be503be9dd89cfa5335c4e7f1e0a3da966bb6874b8 | racket/typed-racket | no-check.rkt | #lang typed-racket/minimal
(require racket/require typed/private/no-check-helper
(subtract-in typed-scheme typed/private/no-check-helper))
(provide (all-from-out typed-scheme typed/private/no-check-helper))
| null | https://raw.githubusercontent.com/racket/typed-racket/9974c9d07d4faf09617607f9e46bc6ec32934a17/typed-racket-compatibility/typed-scheme/no-check.rkt | racket | #lang typed-racket/minimal
(require racket/require typed/private/no-check-helper
(subtract-in typed-scheme typed/private/no-check-helper))
(provide (all-from-out typed-scheme typed/private/no-check-helper))
|
|
f8cbd9649786813e3b9028805f5992dc62e5b9114aaec3c294dd8261ace55935 | cyverse-archive/DiscoveryEnvironmentBackend | coge.clj | (ns donkey.clients.coge
(:use [clojure-commons.core :only [remove-nil-values]]
[donkey.auth.user-attributes :only [current-user]]
[slingshot.slingshot :only [throw+ try+]])
(:require [cemerick.url :as curl]
[cheshire.core :as cheshire]
[clj-http.client :as http]
[clojure.tools.logging :as log]
[clojure-commons.error-codes :as ce]
[donkey.util.config :as config]
[donkey.util.jwt :as jwt]))
(defn- coge-url
[& components]
(str (apply curl/url (config/coge-base-url) components)))
(defn- default-error-handler
[error-code {:keys [body] :as response}]
(log/warn "CoGe request failed:" response)
(throw+ {:error_code error-code
:reason (if (string? body) body (slurp body))}))
(defmacro ^:private with-trap
[[handle-error] & body]
`(try+
(do ~@body)
(catch [:status 400] bad-request#
(~handle-error ce/ERR_BAD_REQUEST bad-request#))
(catch [:status 404] not-found#
(~handle-error ce/ERR_NOT_FOUND not-found#))
(catch (comp number? :status) server-error#
(~handle-error ce/ERR_REQUEST_FAILED server-error#))))
(defn search-genomes
"Searches for genomes in CoGe."
[search-term]
(with-trap [default-error-handler]
(:body (http/get (coge-url "genomes" "search" search-term)
{:headers (jwt/add-auth-header current-user)
:as :json}))))
(def export-fasta-job-type "export_fasta")
(def export-fasta-dest-type "irods")
(defn- export-fasta-request
"Builds the request to export the FastA file for a genome into iRODS."
[user genome-id {:keys [notify overwrite destination]}]
(cheshire/encode
{:type export-fasta-job-type
:parameters (remove-nil-values
{:genome_id genome-id
:dest_type export-fasta-dest-type
:overwrite (if overwrite 1 0)
:email (when notify (:email user))})}))
(defn export-fasta
"Submits a job to CoGe to export the FastA file for a genome into iRODS."
[genome-id opts]
(with-trap [default-error-handler]
(:body (http/put (coge-url "jobs")
{:headers (jwt/add-auth-header current-user)
:body (export-fasta-request current-user genome-id opts)
:content-type :json
:as :json}))))
(def test-organism-id 38378)
(defn- genome-viewer-url-request
"Builds the request to import a list of genomes into CoGe."
[paths]
(cheshire/encode
{:organism_id test-organism-id
:metadata {:name "test"
:version "1"
:type_id 1
:source_name "test"
:restricted true}
:source_data (map (partial hash-map :type "irods" :path) paths)}))
(defn get-genome-viewer-url
"Sends a request for a genome viewer URL to the COGE service."
[paths]
(with-trap [default-error-handler]
(:body (http/put (coge-url "genomes")
{:body (genome-viewer-url-request paths)
:headers (jwt/add-auth-header current-user)
:content-type :json
:as :json}))))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/Donkey/src/donkey/clients/coge.clj | clojure | (ns donkey.clients.coge
(:use [clojure-commons.core :only [remove-nil-values]]
[donkey.auth.user-attributes :only [current-user]]
[slingshot.slingshot :only [throw+ try+]])
(:require [cemerick.url :as curl]
[cheshire.core :as cheshire]
[clj-http.client :as http]
[clojure.tools.logging :as log]
[clojure-commons.error-codes :as ce]
[donkey.util.config :as config]
[donkey.util.jwt :as jwt]))
(defn- coge-url
[& components]
(str (apply curl/url (config/coge-base-url) components)))
(defn- default-error-handler
[error-code {:keys [body] :as response}]
(log/warn "CoGe request failed:" response)
(throw+ {:error_code error-code
:reason (if (string? body) body (slurp body))}))
(defmacro ^:private with-trap
[[handle-error] & body]
`(try+
(do ~@body)
(catch [:status 400] bad-request#
(~handle-error ce/ERR_BAD_REQUEST bad-request#))
(catch [:status 404] not-found#
(~handle-error ce/ERR_NOT_FOUND not-found#))
(catch (comp number? :status) server-error#
(~handle-error ce/ERR_REQUEST_FAILED server-error#))))
(defn search-genomes
"Searches for genomes in CoGe."
[search-term]
(with-trap [default-error-handler]
(:body (http/get (coge-url "genomes" "search" search-term)
{:headers (jwt/add-auth-header current-user)
:as :json}))))
(def export-fasta-job-type "export_fasta")
(def export-fasta-dest-type "irods")
(defn- export-fasta-request
"Builds the request to export the FastA file for a genome into iRODS."
[user genome-id {:keys [notify overwrite destination]}]
(cheshire/encode
{:type export-fasta-job-type
:parameters (remove-nil-values
{:genome_id genome-id
:dest_type export-fasta-dest-type
:overwrite (if overwrite 1 0)
:email (when notify (:email user))})}))
(defn export-fasta
"Submits a job to CoGe to export the FastA file for a genome into iRODS."
[genome-id opts]
(with-trap [default-error-handler]
(:body (http/put (coge-url "jobs")
{:headers (jwt/add-auth-header current-user)
:body (export-fasta-request current-user genome-id opts)
:content-type :json
:as :json}))))
(def test-organism-id 38378)
(defn- genome-viewer-url-request
"Builds the request to import a list of genomes into CoGe."
[paths]
(cheshire/encode
{:organism_id test-organism-id
:metadata {:name "test"
:version "1"
:type_id 1
:source_name "test"
:restricted true}
:source_data (map (partial hash-map :type "irods" :path) paths)}))
(defn get-genome-viewer-url
"Sends a request for a genome viewer URL to the COGE service."
[paths]
(with-trap [default-error-handler]
(:body (http/put (coge-url "genomes")
{:body (genome-viewer-url-request paths)
:headers (jwt/add-auth-header current-user)
:content-type :json
:as :json}))))
|
|
7b6293a2a1a4cac2236bc6973a815689c7cf22d04dceefe432333b8d47e87811 | erlang-ls/erlang_ls | diagnostics_autoimport.erl | -module(diagnostics_autoimport).
-export([main/1]).
main(_Args) ->
fun atom_to_list/1.
| null | https://raw.githubusercontent.com/erlang-ls/erlang_ls/36bf0815e35db5d25a76e80f98f306f25fff7d8c/apps/els_lsp/priv/code_navigation/src/diagnostics_autoimport.erl | erlang | -module(diagnostics_autoimport).
-export([main/1]).
main(_Args) ->
fun atom_to_list/1.
|
|
f521d3c02f3a6fbee7065ac1855ea3f717970c3d5724da7596f467c30d010b3e | karlhof26/gimp-scheme | FU_sharpness-sharper_smart_sharpen.scm | FU_sharpness-sharper_smart-sharpen.scm
version 2.8 [ gimphelp.org ]
last modified / tested by
02/15/2014 on GIMP-2.8.10
;
12/2/2007 - Modified by
; deprecated cons-array updated to newer "make-vector"
;
1.01 - 10/31/2007 - upgrade for Gimp 2.4 by
;
updated for GIMP-2.4.x
by 10/24/2007 , later moved menu location
;
; 02/15/2014 - accommodate indexed images
;==============================================================
;
; Installation:
; This script should be placed in the user or system-wide script folder.
;
; Windows Vista/7/8)
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
; or
C:\Users\YOUR - NAME\.gimp-2.8\scripts
;
Windows XP
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
; or
; C:\Documents and Settings\yourname\.gimp-2.8\scripts
;
; Linux
/home / yourname/.gimp-2.8 / scripts
; or
; Linux system-wide
; /usr/share/gimp/2.0/scripts
;
;==============================================================
;
; LICENSE
;
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
;
;==============================================================
; Original information
;
Smart sharpening script for GIMP 2.4
Original author : < >
;
; Author statement:
;
; script-fu-smart-sharpening - Smart sharpening of image. This script finds
; the edges of images and only sharpens those.
;
; You can find more about smart sharpening at
; /
;
; - Changelog -
; Changelog:
1.00 - Jan 07 , 2004 initial release
;==============================================================
(define (FU-smart-sharpening
inImg
inDrw
inAmount
inRadius
inEdge
)
(gimp-image-undo-group-start inImg)
(if (not (= RGB (car (gimp-image-base-type inImg))))
(gimp-image-convert-rgb inImg))
(let* (
(original inImg)
(template (car (gimp-image-duplicate original)))
(original-layers (cadr (gimp-image-get-layers inImg)))
(template-layers (cadr (gimp-image-get-layers template)))
(template-bg-copy (car (gimp-layer-copy (aref template-layers 0) TRUE)))
(width (car (gimp-image-width original)))
(height (car (gimp-image-height original)))
(sharpen-mask)
(lab-image)
(lab-layers)
(final-mask)
(result-image)
(result-layers)
)
(define (spline)
(let* (
(a (make-vector 8 'byte))
)
(set-pt a 0 0.0 0.0)
166 0
246 255
255 255
a
)
)
(define (set-pt a index x y)
(prog1
(aset a (* index 2) x)
(aset a (+ (* index 2) 1) y)
)
)
(gimp-image-insert-layer template template-bg-copy 0 -1)
(gimp-image-set-active-layer template template-bg-copy)
(gimp-selection-all template)
(gimp-edit-copy template-bg-copy)
(set! sharpen-mask (car (gimp-channel-new template width height "SharpenMask" 50 '(255 0 0))))
(gimp-image-insert-channel template sharpen-mask -1 0)
(gimp-floating-sel-anchor (car (gimp-edit-paste sharpen-mask FALSE)))
(plug-in-edge TRUE template sharpen-mask inEdge 1 0)
(gimp-drawable-invert sharpen-mask TRUE)
(gimp-drawable-curves-spline sharpen-mask HISTOGRAM-VALUE 8 (spline))
(plug-in-gauss-iir TRUE template sharpen-mask 1 TRUE TRUE)
(gimp-edit-copy sharpen-mask)
; split to L*a*b* and sharpen only L-channel
(set! lab-image (car (plug-in-decompose TRUE original (aref original-layers 0) "LAB" TRUE)))
(set! lab-layers (cadr (gimp-image-get-layers lab-image)))
(set! final-mask (car (gimp-channel-new lab-image width height "FinalMask" 50 '(255 0 0))))
(gimp-image-insert-channel lab-image final-mask -1 0)
(gimp-floating-sel-anchor (car (gimp-edit-paste final-mask FALSE)))
(gimp-image-delete template)
(gimp-image-select-item lab-image CHANNEL-OP-REPLACE final-mask)
(gimp-selection-invert lab-image)
(gimp-selection-shrink lab-image 1)
(gimp-image-remove-channel lab-image final-mask)
(plug-in-unsharp-mask TRUE lab-image (aref lab-layers 0) inRadius inAmount 0)
(gimp-selection-none lab-image)
; compose image from Lab-channels
(set! result-image (car (plug-in-drawable-compose TRUE 0 (aref lab-layers 0)
(aref lab-layers 1) (aref lab-layers 2) 0 "LAB")))
(set! result-layers (cadr (gimp-image-get-layers result-image)))
(gimp-edit-copy (aref result-layers 0))
(gimp-image-delete lab-image)
(gimp-image-delete result-image)
(gimp-floating-sel-anchor (car (gimp-edit-paste (aref original-layers 0) FALSE)))
(gimp-image-undo-group-end inImg)
(gimp-displays-flush)
)
)
(script-fu-register "FU-smart-sharpening"
"<Image>/Script-Fu/Photo/Sharpness/Sharper/Smart Sharpening"
"Sharpen images intelligently. Smart sharpen only sharpens images on the edges, where sharpening counts. Even areas are not sharpened, so noise levels are kept down when compared to normal unsharp mask. You may need to tweak the parameters for best result. \nfile:FU_sharpness-sharper_smart-sharpen.scm"
"Olli Salonen <>"
"Olli Salonen"
"Jan 07, 2004"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT "Amount of USM" '(0.5 0 10 0.01 0.01 2 0)
SF-ADJUSTMENT "Radius of USM" '(0.5 0 10 0.01 0.01 2 0)
SF-ADJUSTMENT "FindEdge amount" '(2.0 0 10 0.01 0.01 2 0)
)
;end of script | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/8e00194f318281ef9e7808b96d44ecff7cffd413/FU_sharpness-sharper_smart_sharpen.scm | scheme |
deprecated cons-array updated to newer "make-vector"
02/15/2014 - accommodate indexed images
==============================================================
Installation:
This script should be placed in the user or system-wide script folder.
Windows Vista/7/8)
or
or
C:\Documents and Settings\yourname\.gimp-2.8\scripts
Linux
or
Linux system-wide
/usr/share/gimp/2.0/scripts
==============================================================
LICENSE
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
==============================================================
Original information
Author statement:
script-fu-smart-sharpening - Smart sharpening of image. This script finds
the edges of images and only sharpens those.
You can find more about smart sharpening at
/
- Changelog -
Changelog:
==============================================================
split to L*a*b* and sharpen only L-channel
compose image from Lab-channels
end of script | FU_sharpness-sharper_smart-sharpen.scm
version 2.8 [ gimphelp.org ]
last modified / tested by
02/15/2014 on GIMP-2.8.10
12/2/2007 - Modified by
1.01 - 10/31/2007 - upgrade for Gimp 2.4 by
updated for GIMP-2.4.x
by 10/24/2007 , later moved menu location
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
C:\Users\YOUR - NAME\.gimp-2.8\scripts
Windows XP
C:\Program Files\GIMP 2\share\gimp\2.0\scripts
/home / yourname/.gimp-2.8 / scripts
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Smart sharpening script for GIMP 2.4
Original author : < >
1.00 - Jan 07 , 2004 initial release
(define (FU-smart-sharpening
inImg
inDrw
inAmount
inRadius
inEdge
)
(gimp-image-undo-group-start inImg)
(if (not (= RGB (car (gimp-image-base-type inImg))))
(gimp-image-convert-rgb inImg))
(let* (
(original inImg)
(template (car (gimp-image-duplicate original)))
(original-layers (cadr (gimp-image-get-layers inImg)))
(template-layers (cadr (gimp-image-get-layers template)))
(template-bg-copy (car (gimp-layer-copy (aref template-layers 0) TRUE)))
(width (car (gimp-image-width original)))
(height (car (gimp-image-height original)))
(sharpen-mask)
(lab-image)
(lab-layers)
(final-mask)
(result-image)
(result-layers)
)
(define (spline)
(let* (
(a (make-vector 8 'byte))
)
(set-pt a 0 0.0 0.0)
166 0
246 255
255 255
a
)
)
(define (set-pt a index x y)
(prog1
(aset a (* index 2) x)
(aset a (+ (* index 2) 1) y)
)
)
(gimp-image-insert-layer template template-bg-copy 0 -1)
(gimp-image-set-active-layer template template-bg-copy)
(gimp-selection-all template)
(gimp-edit-copy template-bg-copy)
(set! sharpen-mask (car (gimp-channel-new template width height "SharpenMask" 50 '(255 0 0))))
(gimp-image-insert-channel template sharpen-mask -1 0)
(gimp-floating-sel-anchor (car (gimp-edit-paste sharpen-mask FALSE)))
(plug-in-edge TRUE template sharpen-mask inEdge 1 0)
(gimp-drawable-invert sharpen-mask TRUE)
(gimp-drawable-curves-spline sharpen-mask HISTOGRAM-VALUE 8 (spline))
(plug-in-gauss-iir TRUE template sharpen-mask 1 TRUE TRUE)
(gimp-edit-copy sharpen-mask)
(set! lab-image (car (plug-in-decompose TRUE original (aref original-layers 0) "LAB" TRUE)))
(set! lab-layers (cadr (gimp-image-get-layers lab-image)))
(set! final-mask (car (gimp-channel-new lab-image width height "FinalMask" 50 '(255 0 0))))
(gimp-image-insert-channel lab-image final-mask -1 0)
(gimp-floating-sel-anchor (car (gimp-edit-paste final-mask FALSE)))
(gimp-image-delete template)
(gimp-image-select-item lab-image CHANNEL-OP-REPLACE final-mask)
(gimp-selection-invert lab-image)
(gimp-selection-shrink lab-image 1)
(gimp-image-remove-channel lab-image final-mask)
(plug-in-unsharp-mask TRUE lab-image (aref lab-layers 0) inRadius inAmount 0)
(gimp-selection-none lab-image)
(set! result-image (car (plug-in-drawable-compose TRUE 0 (aref lab-layers 0)
(aref lab-layers 1) (aref lab-layers 2) 0 "LAB")))
(set! result-layers (cadr (gimp-image-get-layers result-image)))
(gimp-edit-copy (aref result-layers 0))
(gimp-image-delete lab-image)
(gimp-image-delete result-image)
(gimp-floating-sel-anchor (car (gimp-edit-paste (aref original-layers 0) FALSE)))
(gimp-image-undo-group-end inImg)
(gimp-displays-flush)
)
)
(script-fu-register "FU-smart-sharpening"
"<Image>/Script-Fu/Photo/Sharpness/Sharper/Smart Sharpening"
"Sharpen images intelligently. Smart sharpen only sharpens images on the edges, where sharpening counts. Even areas are not sharpened, so noise levels are kept down when compared to normal unsharp mask. You may need to tweak the parameters for best result. \nfile:FU_sharpness-sharper_smart-sharpen.scm"
"Olli Salonen <>"
"Olli Salonen"
"Jan 07, 2004"
"*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-ADJUSTMENT "Amount of USM" '(0.5 0 10 0.01 0.01 2 0)
SF-ADJUSTMENT "Radius of USM" '(0.5 0 10 0.01 0.01 2 0)
SF-ADJUSTMENT "FindEdge amount" '(2.0 0 10 0.01 0.01 2 0)
)
|
aa300cc754daf5015fb2a1e37c5a5b0062525fe41856c94b11893c8cca067b07 | art-w/ocaml-webdriver | webdriver_cohttp_async.mli | include Webdriver.S with type 'a io = 'a Async.Deferred.t
| null | https://raw.githubusercontent.com/art-w/ocaml-webdriver/449e56427b59e46a3684fa66bda143aed687c085/cohttp/webdriver_cohttp_async.mli | ocaml | include Webdriver.S with type 'a io = 'a Async.Deferred.t
|
|
3efa872de9c5a8738baa24a6cd453e4a341442fcbead7550eb647873ef8ace6d | boundedvariation/quantfin | Time.hs |
module Quant.Time (
Time(..)
, timeDiff
, timeOffset
, timeFromZero
) where
data Time = Time {-# UNPACK #-} !Double deriving (Eq,Show,Ord)
timeDiff :: Time -> Time -> Double
timeDiff (Time x) (Time y) = y - x
# INLINE timeDiff #
timeOffset :: Time -> Double -> Time
timeOffset (Time x) y = Time (x+y)
{-# INLINE timeOffset #-}
timeFromZero :: Time -> Double
timeFromZero (Time x) = x
# INLINE timeFromZero #
| null | https://raw.githubusercontent.com/boundedvariation/quantfin/d077e00d234cbc6a88396ad5644ba76f18cc8ae7/src/Quant/Time.hs | haskell | # UNPACK #
# INLINE timeOffset # |
module Quant.Time (
Time(..)
, timeDiff
, timeOffset
, timeFromZero
) where
timeDiff :: Time -> Time -> Double
timeDiff (Time x) (Time y) = y - x
# INLINE timeDiff #
timeOffset :: Time -> Double -> Time
timeOffset (Time x) y = Time (x+y)
timeFromZero :: Time -> Double
timeFromZero (Time x) = x
# INLINE timeFromZero #
|
315764c21ca1eeaf677e75aec6740606f2fc3fdb56ecfbdf8d43f9758ccf4cda | jeromesimeon/Galax | args.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : args.ml , v 1.10 2007/02/01 22:08:45 simeon Exp $
(* Module: Args
Description:
This module implements extraction of function arguments for
various arities.
*)
open Error
(************************)
(* Arguments extraction *)
(************************)
let get_param0 =
function
| [] -> ()
| _ -> raise (Query (Parameter_Mismatch("Expected zero arguments")))
let get_param1 =
function
| [x1] -> x1
| _ -> raise (Query (Parameter_Mismatch("Expected one argument")))
let get_param2 =
function
| [x1;x2] -> (x1,x2)
| _ -> raise (Query (Parameter_Mismatch("Expected two arguments")))
let get_param3 =
function
| [x1;x2;x3] -> (x1,x2,x3)
| _ -> raise (Query (Parameter_Mismatch("Expected three arguments")))
let get_param4 =
function
| [x1;x2;x3;x4] -> (x1,x2,x3,x4)
| _ -> raise (Query (Parameter_Mismatch("Expected four arguments")))
let get_array_param0 x =
if (Array.length x) != 0 then
raise (Query (Parameter_Mismatch("Expected array argument of length zero")))
let get_array_param1 x =
if (Array.length x != 1) then
raise (Query (Parameter_Mismatch("Expected array argument of length one")))
else
x.(0)
let get_array_param2 x =
if (Array.length x != 2) then
raise (Query (Parameter_Mismatch("Expected array argument of length two")))
else
x.(0), x.(1)
let get_array_param3 x =
if (Array.length x != 3) then
raise (Query (Parameter_Mismatch("Expected array argument of length three")))
else
x.(0), x.(1), x.(2)
let get_array_param4 x =
if (Array.length x != 4) then
raise (Query (Parameter_Mismatch("Expected array argument of length four")))
else
x.(0), x.(1), x.(2), x.(3)
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/base/args.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Args
Description:
This module implements extraction of function arguments for
various arities.
**********************
Arguments extraction
********************** | Copyright 2001 - 2007 .
$ I d : args.ml , v 1.10 2007/02/01 22:08:45 simeon Exp $
open Error
let get_param0 =
function
| [] -> ()
| _ -> raise (Query (Parameter_Mismatch("Expected zero arguments")))
let get_param1 =
function
| [x1] -> x1
| _ -> raise (Query (Parameter_Mismatch("Expected one argument")))
let get_param2 =
function
| [x1;x2] -> (x1,x2)
| _ -> raise (Query (Parameter_Mismatch("Expected two arguments")))
let get_param3 =
function
| [x1;x2;x3] -> (x1,x2,x3)
| _ -> raise (Query (Parameter_Mismatch("Expected three arguments")))
let get_param4 =
function
| [x1;x2;x3;x4] -> (x1,x2,x3,x4)
| _ -> raise (Query (Parameter_Mismatch("Expected four arguments")))
let get_array_param0 x =
if (Array.length x) != 0 then
raise (Query (Parameter_Mismatch("Expected array argument of length zero")))
let get_array_param1 x =
if (Array.length x != 1) then
raise (Query (Parameter_Mismatch("Expected array argument of length one")))
else
x.(0)
let get_array_param2 x =
if (Array.length x != 2) then
raise (Query (Parameter_Mismatch("Expected array argument of length two")))
else
x.(0), x.(1)
let get_array_param3 x =
if (Array.length x != 3) then
raise (Query (Parameter_Mismatch("Expected array argument of length three")))
else
x.(0), x.(1), x.(2)
let get_array_param4 x =
if (Array.length x != 4) then
raise (Query (Parameter_Mismatch("Expected array argument of length four")))
else
x.(0), x.(1), x.(2), x.(3)
|
fb2d75ed9d6a66342281cd97727a1e10319df12b90ad10632c9003c453f647d8 | helpshift/hydrox | references_test.clj | (ns hydrox.doc.link.references-test
(:use midje.sweet)
(:require [hydrox.doc.link.references :refer :all]
[rewrite-clj.zip :as z]))
^{:refer hydrox.doc.link.references/process-doc-nodes :added "0.1"}
(fact "treat test nodes specially when rendering code"
(->> (z/of-string "(+ 1 1) => (+ 2 2)")
(iterate z/right*)
(take-while identity)
(map z/node)
(process-doc-nodes))
=> "(+ 1 1) => (+ 2 2)"
)
^{:refer hydrox.doc.link.references/link-references :added "0.1"}
(fact "link code for elements to references"
(link-references {:articles {"example" {:elements [{:type :reference :refer 'example.core/hello}]}}
:references '{example.core {hello {:docs []
:source "(defn hello [] 1)"}}}}
"example")
=> {:articles
{"example"
{:elements
'[{:type :code,
:refer example.core/hello,
:origin :reference,
:indentation 0,
:code "(defn hello [] 1)",
:mode :source,
:title "source of <i>example.core/hello</i>"}]}},
:references '{example.core {hello {:docs [], :source "(defn hello [] 1)"}}}})
| null | https://raw.githubusercontent.com/helpshift/hydrox/2beb3c56fad43bbf16f07db7ee72c5862978350c/test/hydrox/doc/link/references_test.clj | clojure | (ns hydrox.doc.link.references-test
(:use midje.sweet)
(:require [hydrox.doc.link.references :refer :all]
[rewrite-clj.zip :as z]))
^{:refer hydrox.doc.link.references/process-doc-nodes :added "0.1"}
(fact "treat test nodes specially when rendering code"
(->> (z/of-string "(+ 1 1) => (+ 2 2)")
(iterate z/right*)
(take-while identity)
(map z/node)
(process-doc-nodes))
=> "(+ 1 1) => (+ 2 2)"
)
^{:refer hydrox.doc.link.references/link-references :added "0.1"}
(fact "link code for elements to references"
(link-references {:articles {"example" {:elements [{:type :reference :refer 'example.core/hello}]}}
:references '{example.core {hello {:docs []
:source "(defn hello [] 1)"}}}}
"example")
=> {:articles
{"example"
{:elements
'[{:type :code,
:refer example.core/hello,
:origin :reference,
:indentation 0,
:code "(defn hello [] 1)",
:mode :source,
:title "source of <i>example.core/hello</i>"}]}},
:references '{example.core {hello {:docs [], :source "(defn hello [] 1)"}}}})
|
|
6a7f948f889e29df66586fff1766172fedd6ac98aa10b72f3bbbb6282d4deb06 | zehaochen19/vanilla-lang | Eval.hs | module Vanilla.Dynamic.Eval where
import Vanilla.Dynamic.Step
import Vanilla.Syntax.Expr
import Vanilla.Syntax.Program
eval :: Expr -> Expr
eval e = let e' = step e in if e' == e then e else eval e'
eval' :: Program -> Expr
eval' = eval . mainExpr
| null | https://raw.githubusercontent.com/zehaochen19/vanilla-lang/d1e2bbd3125151ce2c0ddc20f735d3a55aeb6bc8/src/Vanilla/Dynamic/Eval.hs | haskell | module Vanilla.Dynamic.Eval where
import Vanilla.Dynamic.Step
import Vanilla.Syntax.Expr
import Vanilla.Syntax.Program
eval :: Expr -> Expr
eval e = let e' = step e in if e' == e then e else eval e'
eval' :: Program -> Expr
eval' = eval . mainExpr
|
|
fba83f3540f9bd754a867a6d4648d01b80b1b6848ad5d96a5547f714a39f39dc | alekcz/pcp | info.clj | (ns api.info
(:require [cheshire.core :as json]
[pcp :as pcp]))
(def names ["pcp" "sci" "clojure"])
(def repo "")
(pcp/response
200
(json/encode {:engine (first names)
:interpreter (second names)
:core (last names)
:repo repo})
"application/json") | null | https://raw.githubusercontent.com/alekcz/pcp/b9309929a15eafcee9265d7b9683f70ef0d7e726/resources/pcp-templates/api/info.clj | clojure | (ns api.info
(:require [cheshire.core :as json]
[pcp :as pcp]))
(def names ["pcp" "sci" "clojure"])
(def repo "")
(pcp/response
200
(json/encode {:engine (first names)
:interpreter (second names)
:core (last names)
:repo repo})
"application/json") |
|
fddc5cef4408be3537693be3f09e823db7a8329183ba00097c51bd6d08e321f3 | channable/alfred-margaret | Searcher.hs | -- Alfred-Margaret: Fast Aho-Corasick string searching
Copyright 2019 Channable
--
Licensed under the 3 - clause BSD license , see the LICENSE file in the
-- repository root.
{-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleInstances #
module Data.Text.BoyerMooreCI.Searcher
( Searcher
, automata
, build
, buildNeedleIdSearcher
, buildWithValues
, containsAll
, containsAny
, needles
, numNeedles
) where
import Control.DeepSeq (NFData)
import Data.Bifunctor (first)
import Data.Hashable (Hashable (hashWithSalt), Hashed, hashed, unhashed)
import GHC.Generics (Generic)
import Data.Text.Utf8 (Text)
import Data.Text.BoyerMooreCI.Automaton (Automaton)
import qualified Data.Text.BoyerMooreCI.Automaton as BoyerMoore
| A set of needles with associated values , and Boyer - Moore automata to
-- efficiently find those needles.
--
INVARIANT : searcherAutomaton = BoyerMoore.buildAutomaton .
-- To enforce this invariant, the fields are not exposed from this module.
-- There is a separate constructor function.
--
The purpose of this wrapper is to have a type that is Hashable and , so we
can derive those for the types that embed the searcher ,
requiring the automaton itself to be Hashable or Eq , which would be both
-- wasteful and tedious. Because the automaton is fully determined by the
needles and associated values , it is sufficient to implement and Hashable
-- in terms of the needles only.
--
-- We also use Hashed to cache the hash of the needles.
data Searcher v = Searcher
{ searcherNeedles :: Hashed [(Text, v)]
, searcherNumNeedles :: Int
, searcherAutomata :: [(Automaton, v)]
} deriving (Generic)
instance Show (Searcher v) where
show _ = "Searcher _ _ _"
instance Hashable v => Hashable (Searcher v) where
hashWithSalt salt searcher = hashWithSalt salt $ searcherNeedles searcher
# INLINE hashWithSalt #
instance Eq v => Eq (Searcher v) where
Searcher xs nx _ == Searcher ys ny _ = nx == ny && xs == ys
# INLINE (= =) #
instance NFData v => NFData (Searcher v)
-- | Builds the Searcher for a list of needles without values.
-- This is useful for just checking whether the haystack contains the needles.
build :: [Text] -> Searcher ()
# INLINABLE build #
build = buildWithValues . flip zip (repeat ())
-- | Builds the Searcher for a list of needles.
buildWithValues :: Hashable v => [(Text, v)] -> Searcher v
# INLINABLE buildWithValues #
buildWithValues ns =
Searcher (hashed ns) (length ns) $ map (first BoyerMoore.buildAutomaton) ns
needles :: Searcher v -> [(Text, v)]
needles = unhashed . searcherNeedles
automata :: Searcher v -> [(Automaton, v)]
automata = searcherAutomata
numNeedles :: Searcher v -> Int
numNeedles = searcherNumNeedles
-- | Return whether the haystack contains any of the needles.
This function is marked noinline as an inlining boundary . BoyerMoore.runText is
-- marked inline, so this function will be optimized to report only whether
-- there is a match, and not construct a list of matches. We don't want this
-- function be inline, to make sure that the conditions of the caller don't
-- affect how this function is optimized. There is little to gain from
-- additional inlining. The pragma is not an optimization in itself, rather it
-- is a defence against fragile optimizer decisions.
# NOINLINE containsAny #
containsAny :: Searcher () -> Text -> Bool
containsAny !searcher !text =
let
On the first match , return True immediately .
f _acc _matchStart _matchEnd = BoyerMoore.Done True
in
any (\(automaton, ()) -> BoyerMoore.runText False f automaton text) (automata searcher)
-- | Build a 'Searcher' that returns the needle's index in the needle list when it matches.
buildNeedleIdSearcher :: [Text] -> Searcher Int
buildNeedleIdSearcher !ns =
buildWithValues $ zip ns [0..]
-- | Like 'containsAny', but checks whether all needles match instead.
-- Use 'buildNeedleIdSearcher' to get an appropriate 'Searcher'.
# NOINLINE containsAll #
containsAll :: Searcher Int -> Text -> Bool
containsAll !searcher !text =
let
On the first match , return True immediately .
f _acc _matchStart _matchEnd = BoyerMoore.Done True
in
all (\(automaton, _) -> BoyerMoore.runText False f automaton text) (automata searcher)
| null | https://raw.githubusercontent.com/channable/alfred-margaret/be96f07bb74c5aa65ce01d664de5d98badbee307/src/Data/Text/BoyerMooreCI/Searcher.hs | haskell | Alfred-Margaret: Fast Aho-Corasick string searching
repository root.
# LANGUAGE BangPatterns #
efficiently find those needles.
To enforce this invariant, the fields are not exposed from this module.
There is a separate constructor function.
wasteful and tedious. Because the automaton is fully determined by the
in terms of the needles only.
We also use Hashed to cache the hash of the needles.
| Builds the Searcher for a list of needles without values.
This is useful for just checking whether the haystack contains the needles.
| Builds the Searcher for a list of needles.
| Return whether the haystack contains any of the needles.
marked inline, so this function will be optimized to report only whether
there is a match, and not construct a list of matches. We don't want this
function be inline, to make sure that the conditions of the caller don't
affect how this function is optimized. There is little to gain from
additional inlining. The pragma is not an optimization in itself, rather it
is a defence against fragile optimizer decisions.
| Build a 'Searcher' that returns the needle's index in the needle list when it matches.
| Like 'containsAny', but checks whether all needles match instead.
Use 'buildNeedleIdSearcher' to get an appropriate 'Searcher'. | Copyright 2019 Channable
Licensed under the 3 - clause BSD license , see the LICENSE file in the
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleInstances #
module Data.Text.BoyerMooreCI.Searcher
( Searcher
, automata
, build
, buildNeedleIdSearcher
, buildWithValues
, containsAll
, containsAny
, needles
, numNeedles
) where
import Control.DeepSeq (NFData)
import Data.Bifunctor (first)
import Data.Hashable (Hashable (hashWithSalt), Hashed, hashed, unhashed)
import GHC.Generics (Generic)
import Data.Text.Utf8 (Text)
import Data.Text.BoyerMooreCI.Automaton (Automaton)
import qualified Data.Text.BoyerMooreCI.Automaton as BoyerMoore
| A set of needles with associated values , and Boyer - Moore automata to
INVARIANT : searcherAutomaton = BoyerMoore.buildAutomaton .
The purpose of this wrapper is to have a type that is Hashable and , so we
can derive those for the types that embed the searcher ,
requiring the automaton itself to be Hashable or Eq , which would be both
needles and associated values , it is sufficient to implement and Hashable
data Searcher v = Searcher
{ searcherNeedles :: Hashed [(Text, v)]
, searcherNumNeedles :: Int
, searcherAutomata :: [(Automaton, v)]
} deriving (Generic)
instance Show (Searcher v) where
show _ = "Searcher _ _ _"
instance Hashable v => Hashable (Searcher v) where
hashWithSalt salt searcher = hashWithSalt salt $ searcherNeedles searcher
# INLINE hashWithSalt #
instance Eq v => Eq (Searcher v) where
Searcher xs nx _ == Searcher ys ny _ = nx == ny && xs == ys
# INLINE (= =) #
instance NFData v => NFData (Searcher v)
build :: [Text] -> Searcher ()
# INLINABLE build #
build = buildWithValues . flip zip (repeat ())
buildWithValues :: Hashable v => [(Text, v)] -> Searcher v
# INLINABLE buildWithValues #
buildWithValues ns =
Searcher (hashed ns) (length ns) $ map (first BoyerMoore.buildAutomaton) ns
needles :: Searcher v -> [(Text, v)]
needles = unhashed . searcherNeedles
automata :: Searcher v -> [(Automaton, v)]
automata = searcherAutomata
numNeedles :: Searcher v -> Int
numNeedles = searcherNumNeedles
This function is marked noinline as an inlining boundary . BoyerMoore.runText is
# NOINLINE containsAny #
containsAny :: Searcher () -> Text -> Bool
containsAny !searcher !text =
let
On the first match , return True immediately .
f _acc _matchStart _matchEnd = BoyerMoore.Done True
in
any (\(automaton, ()) -> BoyerMoore.runText False f automaton text) (automata searcher)
buildNeedleIdSearcher :: [Text] -> Searcher Int
buildNeedleIdSearcher !ns =
buildWithValues $ zip ns [0..]
# NOINLINE containsAll #
containsAll :: Searcher Int -> Text -> Bool
containsAll !searcher !text =
let
On the first match , return True immediately .
f _acc _matchStart _matchEnd = BoyerMoore.Done True
in
all (\(automaton, _) -> BoyerMoore.runText False f automaton text) (automata searcher)
|
16782fafc0e689f38b728a728729f51cfb4d87e16c332fd73f89f33f10a94bcd | robert-strandh/SICL | elt.lisp | (cl:in-package #:sicl-sequence)
(declaim (notinline %invalid-sequence-index))
(defun %invalid-sequence-index (index sequence end)
(error 'invalid-sequence-index
:datum index
:in-sequence sequence
:expected-type `(integer 0 (,end))))
;;; List
;;; Returns the cons cell at INDEX, or signals an appropriate error.
(declaim (inline elt-aux))
(defun elt-aux (list index)
(declare (list list) (list-index index))
(loop for rest = list then (cdr rest)
for position of-type list-length from 0
when (endp rest) do
(%invalid-sequence-index index list position)
when (= position index) do (loop-finish)
finally (return rest)))
(defmethod elt ((list list) index)
(declare (method-properties inlineable))
(check-type index list-index)
(car (elt-aux list index)))
(seal-domain #'elt '(list t))
(defmethod (setf elt) (value (list list) index)
(declare (method-properties inlineable))
(check-type index list-index)
(setf (car (elt-aux list index)) value))
(seal-domain #'(setf elt) '(t list t))
;;; Vector
(defmethod elt ((vector vector) index)
(declare (method-properties inlineable))
(let ((end (if (array-has-fill-pointer-p vector)
(fill-pointer vector)
(array-dimension vector 0))))
(unless (< -1 index end)
(%invalid-sequence-index index vector end)))
(aref vector index))
(seal-domain #'elt '(vector t))
(defmethod (setf elt) (value (vector vector) index)
(declare (method-properties inlineable))
(let ((end (if (array-has-fill-pointer-p vector)
(fill-pointer vector)
(array-dimension vector 0))))
(unless (< -1 index end)
(%invalid-sequence-index index vector end)))
(setf (aref vector index) value))
(seal-domain #'(setf elt) '(t vector t))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/837dc6b421174b6a307f26a1cea1caedfa1d5ebf/Code/Sequence/elt.lisp | lisp | List
Returns the cons cell at INDEX, or signals an appropriate error.
Vector | (cl:in-package #:sicl-sequence)
(declaim (notinline %invalid-sequence-index))
(defun %invalid-sequence-index (index sequence end)
(error 'invalid-sequence-index
:datum index
:in-sequence sequence
:expected-type `(integer 0 (,end))))
(declaim (inline elt-aux))
(defun elt-aux (list index)
(declare (list list) (list-index index))
(loop for rest = list then (cdr rest)
for position of-type list-length from 0
when (endp rest) do
(%invalid-sequence-index index list position)
when (= position index) do (loop-finish)
finally (return rest)))
(defmethod elt ((list list) index)
(declare (method-properties inlineable))
(check-type index list-index)
(car (elt-aux list index)))
(seal-domain #'elt '(list t))
(defmethod (setf elt) (value (list list) index)
(declare (method-properties inlineable))
(check-type index list-index)
(setf (car (elt-aux list index)) value))
(seal-domain #'(setf elt) '(t list t))
(defmethod elt ((vector vector) index)
(declare (method-properties inlineable))
(let ((end (if (array-has-fill-pointer-p vector)
(fill-pointer vector)
(array-dimension vector 0))))
(unless (< -1 index end)
(%invalid-sequence-index index vector end)))
(aref vector index))
(seal-domain #'elt '(vector t))
(defmethod (setf elt) (value (vector vector) index)
(declare (method-properties inlineable))
(let ((end (if (array-has-fill-pointer-p vector)
(fill-pointer vector)
(array-dimension vector 0))))
(unless (< -1 index end)
(%invalid-sequence-index index vector end)))
(setf (aref vector index) value))
(seal-domain #'(setf elt) '(t vector t))
|
7723f41876cf032c5ccc748ebf4442cef7414843259e6d731f7741a561c17055 | plexus/chestnut | styles.clj | (ns {{project-ns}}.styles
(:require [garden-watcher.def :refer [defstyles]]))
(defstyles style
[:h1 {:text-decoration "underline"}])
| null | https://raw.githubusercontent.com/plexus/chestnut/684b668141586ed5ef4389f94a4dc7f4fde13112/src/leiningen/new/chestnut/src/clj/chestnut/styles.clj | clojure | (ns {{project-ns}}.styles
(:require [garden-watcher.def :refer [defstyles]]))
(defstyles style
[:h1 {:text-decoration "underline"}])
|
|
a32f1ec5a08119abfa90a2fa4a3d9fc1bf2136c36665b47e41b11b488edb4eed | softwarelanguageslab/maf | R5RS_rosetta_easter-1.scm | ; Changes:
* removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
* swapped branches : 1
; * calls to id fun: 0
(letrec ((easter (lambda (year)
(let* ((a (remainder year 19))
(b (quotient year 100))
(c (remainder year 100))
(d (quotient b 4))
(e (remainder b 4))
(f (quotient (+ b 8) 25))
(g (quotient (+ 1 (- b f)) 3))
(h (remainder (+ (* 19 a) (- b d g) 15) 30))
(i (quotient c 4))
(k (remainder c 4))
(l (remainder (+ e e i i (- 32 h k)) 7))
(m (quotient (+ a (* 11 h) (* 22 l)) 451))
(n (+ h l (- 114 (* 7 m)))))
(list (quotient n 31) (+ 1 (remainder n 31)))))))
(if (<change> (equal? (easter 2017) (__toplevel_cons 4 (__toplevel_cons 16 ()))) (not (equal? (easter 2017) (__toplevel_cons 4 (__toplevel_cons 16 ())))))
(if (equal? (easter 1027) (__toplevel_cons 4 (__toplevel_cons 1 ())))
(if (equal? (easter 2016) (__toplevel_cons 3 (__toplevel_cons 27 ())))
(<change>
(equal? (easter 172) (__toplevel_cons 3 (__toplevel_cons 29 ())))
#f)
(<change>
#f
(equal? (easter 172) (__toplevel_cons 3 (__toplevel_cons 29 ())))))
#f)
#f)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_rosetta_easter-1.scm | scheme | Changes:
* calls to id fun: 0 | * removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
* swapped branches : 1
(letrec ((easter (lambda (year)
(let* ((a (remainder year 19))
(b (quotient year 100))
(c (remainder year 100))
(d (quotient b 4))
(e (remainder b 4))
(f (quotient (+ b 8) 25))
(g (quotient (+ 1 (- b f)) 3))
(h (remainder (+ (* 19 a) (- b d g) 15) 30))
(i (quotient c 4))
(k (remainder c 4))
(l (remainder (+ e e i i (- 32 h k)) 7))
(m (quotient (+ a (* 11 h) (* 22 l)) 451))
(n (+ h l (- 114 (* 7 m)))))
(list (quotient n 31) (+ 1 (remainder n 31)))))))
(if (<change> (equal? (easter 2017) (__toplevel_cons 4 (__toplevel_cons 16 ()))) (not (equal? (easter 2017) (__toplevel_cons 4 (__toplevel_cons 16 ())))))
(if (equal? (easter 1027) (__toplevel_cons 4 (__toplevel_cons 1 ())))
(if (equal? (easter 2016) (__toplevel_cons 3 (__toplevel_cons 27 ())))
(<change>
(equal? (easter 172) (__toplevel_cons 3 (__toplevel_cons 29 ())))
#f)
(<change>
#f
(equal? (easter 172) (__toplevel_cons 3 (__toplevel_cons 29 ())))))
#f)
#f)) |
731e3253bf39e3b80d65e5dc8d620154c5b5ff3859ac6918a72fbaed8c5042ec | marmelab/ocaml-invader | score.ml | let render score =
GlDraw.color (0.5, 1., 1.);
Utils.drawString 20. 550. (Printf.sprintf "Score: %d" score);
| null | https://raw.githubusercontent.com/marmelab/ocaml-invader/037280642cdd1b7800df4b9199aa0cdb802efe1c/src/score.ml | ocaml | let render score =
GlDraw.color (0.5, 1., 1.);
Utils.drawString 20. 550. (Printf.sprintf "Score: %d" score);
|
|
1d4753d6fc9fe3d11a77b6a98260aeaf96bbf745f1f5a09040eeb376349e4c0b | asivitz/Hickory | ParseJson.hs | # LANGUAGE DeriveGeneric #
{ - # LANGUAGE DerivingStrategies # - }
# LANGUAGE DerivingVia #
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# LANGUAGE DuplicateRecordFields #
module Hickory.Text.ParseJson where
import Hickory.Math (Scalar)
import Data.Text (Text)
import GHC.Generics (Generic)
import Data.Aeson.Deriving (GenericEncoded(..), FieldLabelModifier, type (:=), ConstructorTagModifier)
import Data.Aeson.Deriving.Generic (DropSuffix, Lowercase)
import Data.Aeson (FromJSON, ToJSON, eitherDecode)
import Data.ByteString.Lazy (ByteString)
import Linear (V2 (..))
import qualified Data.HashMap.Strict as HashMap
import Data.Functor ((<&>))
Format from :
{
" atlas " : {
" type " : " msdf " ,
" distanceRange " : 2 ,
" size " : 32.34375 ,
" width " : 204 ,
" height " : 204 ,
" yOrigin " : " bottom "
} ,
" metrics " : {
" emSize " : 1 ,
" " : 1.212 ,
" ascender " : 0.95400000000000007 ,
" descender " : -0.25800000000000001 ,
" underlineY " : -0.17500000000000002 ,
" underlineThickness " : 0.089999999999999997
} ,
" glyphs " : [
{ " unicode " : 32 , " advance " : 0.22 } ,
{
" unicode " : 33 ,
" advance " : 0.31 ,
" planeBounds " : {
" left " : 0.046787439613526591 ,
" bottom " : -0.049973429951690851 ,
" right " : 0.26321256038647345 ,
" top " : 0.72297342995169089
} ,
" atlasBounds " : {
" left " : 41.5 ,
" bottom " : 143.5 ,
" right " : 48.5 ,
" top " : 168.5
}
} ,
...
" kerning " : [
{ " unicode1 " : 32 , " " : 84 , " advance " : -0.02 } ,
...
]
}
{
"atlas": {
"type": "msdf",
"distanceRange": 2,
"size": 32.34375,
"width": 204,
"height": 204,
"yOrigin": "bottom"
},
"metrics": {
"emSize": 1,
"lineHeight": 1.212,
"ascender": 0.95400000000000007,
"descender": -0.25800000000000001,
"underlineY": -0.17500000000000002,
"underlineThickness": 0.089999999999999997
},
"glyphs": [
{ "unicode": 32, "advance": 0.22 },
{
"unicode": 33,
"advance": 0.31,
"planeBounds": {
"left": 0.046787439613526591,
"bottom": -0.049973429951690851,
"right": 0.26321256038647345,
"top": 0.72297342995169089
},
"atlasBounds": {
"left": 41.5,
"bottom": 143.5,
"right": 48.5,
"top": 168.5
}
},
...
"kerning": [
{ "unicode1": 32, "unicode2": 84, "advance": -0.02 },
...
]
}
-}
type Encoding = GenericEncoded
'[ FieldLabelModifier := DropSuffix "_"
, ConstructorTagModifier := Lowercase
]
data MSDFFont = MSDFFont
{ atlas :: Atlas
, metrics :: Metrics
, glyphs :: [Glyph]
, kerning :: [KerningPair]
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding MSDFFont
data Atlas = Atlas
{ type_ :: Text
, distanceRange :: Scalar
, size :: Scalar
, width :: Int
, height :: Int
, yOrigin :: YOrigin
} deriving stock (Generic)
deriving (FromJSON, ToJSON) via Encoding Atlas
data YOrigin = Bottom | Top
deriving stock (Generic)
deriving (FromJSON, ToJSON) via Encoding YOrigin
data Metrics = Metrics
{ emSize :: Scalar
, lineHeight :: Scalar
, ascender :: Scalar
, descender :: Scalar
, underlineY :: Scalar
, underlineThickness :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Metrics
data Glyph = Glyph
{ unicode :: Int
Cursor advancement in world X
, planeBounds :: Maybe Bounds -- World coordinates relative to cursor
, atlasBounds :: Maybe Bounds -- Position within the texture atlas
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Glyph
data KerningPair = KerningPair
{ unicode1 :: Int
, unicode2 :: Int
, advance :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding KerningPair
data Bounds = Bounds
{ left :: Scalar
, bottom :: Scalar
, right :: Scalar
, top :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Bounds
data Font = Font
{ atlas :: Atlas
, metrics :: Metrics
, glyphMap :: HashMap.HashMap Int (Glyph, Maybe GlyphVerts)
, kerningMap :: HashMap.HashMap (Int,Int) Scalar
} deriving stock Generic
data GlyphVerts = GlyphVerts
{ verts :: [V2 Scalar]
, texCoords :: [V2 Scalar]
} deriving Show
makeFont :: ByteString -> Either String Font
makeFont text = case eitherDecode text of
Right MSDFFont {..} ->
let -- Metrics {..} = metrics
glyphMap = HashMap.fromList $ glyphs <&> \g@Glyph {..} -> (unicode, (g, makeGlyphVerts atlas metrics g))
kerningMap = HashMap.fromList $ kerning <&> \KerningPair {..} -> ((unicode1, unicode2), advance)
in Right Font {..}
Left s -> Left s
where
makeGlyphVerts Atlas {..} Metrics {..} Glyph {..} = case (planeBounds, atlasBounds) of
(Just (Bounds pl pb pr pt), Just (Bounds al ab ar at)) ->
let
texCoords =
[ V2 (al / realToFrac width) (at / realToFrac height)
, V2 (ar / realToFrac width) (at / realToFrac height)
, V2 (al / realToFrac width) (ab / realToFrac height)
, V2 (ar / realToFrac width) (ab / realToFrac height)
]
verts =
[ V2 pl (-pt)
, V2 pr (-pt)
, V2 pl (-pb)
, V2 pr (-pb)
]
in Just GlyphVerts {..}
_ -> Nothing
| null | https://raw.githubusercontent.com/asivitz/Hickory/359eeff77dd31bb7b12826eef521c871a656ca3e/core/Hickory/Text/ParseJson.hs | haskell | World coordinates relative to cursor
Position within the texture atlas
Metrics {..} = metrics | # LANGUAGE DeriveGeneric #
{ - # LANGUAGE DerivingStrategies # - }
# LANGUAGE DerivingVia #
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# LANGUAGE DuplicateRecordFields #
module Hickory.Text.ParseJson where
import Hickory.Math (Scalar)
import Data.Text (Text)
import GHC.Generics (Generic)
import Data.Aeson.Deriving (GenericEncoded(..), FieldLabelModifier, type (:=), ConstructorTagModifier)
import Data.Aeson.Deriving.Generic (DropSuffix, Lowercase)
import Data.Aeson (FromJSON, ToJSON, eitherDecode)
import Data.ByteString.Lazy (ByteString)
import Linear (V2 (..))
import qualified Data.HashMap.Strict as HashMap
import Data.Functor ((<&>))
Format from :
{
" atlas " : {
" type " : " msdf " ,
" distanceRange " : 2 ,
" size " : 32.34375 ,
" width " : 204 ,
" height " : 204 ,
" yOrigin " : " bottom "
} ,
" metrics " : {
" emSize " : 1 ,
" " : 1.212 ,
" ascender " : 0.95400000000000007 ,
" descender " : -0.25800000000000001 ,
" underlineY " : -0.17500000000000002 ,
" underlineThickness " : 0.089999999999999997
} ,
" glyphs " : [
{ " unicode " : 32 , " advance " : 0.22 } ,
{
" unicode " : 33 ,
" advance " : 0.31 ,
" planeBounds " : {
" left " : 0.046787439613526591 ,
" bottom " : -0.049973429951690851 ,
" right " : 0.26321256038647345 ,
" top " : 0.72297342995169089
} ,
" atlasBounds " : {
" left " : 41.5 ,
" bottom " : 143.5 ,
" right " : 48.5 ,
" top " : 168.5
}
} ,
...
" kerning " : [
{ " unicode1 " : 32 , " " : 84 , " advance " : -0.02 } ,
...
]
}
{
"atlas": {
"type": "msdf",
"distanceRange": 2,
"size": 32.34375,
"width": 204,
"height": 204,
"yOrigin": "bottom"
},
"metrics": {
"emSize": 1,
"lineHeight": 1.212,
"ascender": 0.95400000000000007,
"descender": -0.25800000000000001,
"underlineY": -0.17500000000000002,
"underlineThickness": 0.089999999999999997
},
"glyphs": [
{ "unicode": 32, "advance": 0.22 },
{
"unicode": 33,
"advance": 0.31,
"planeBounds": {
"left": 0.046787439613526591,
"bottom": -0.049973429951690851,
"right": 0.26321256038647345,
"top": 0.72297342995169089
},
"atlasBounds": {
"left": 41.5,
"bottom": 143.5,
"right": 48.5,
"top": 168.5
}
},
...
"kerning": [
{ "unicode1": 32, "unicode2": 84, "advance": -0.02 },
...
]
}
-}
type Encoding = GenericEncoded
'[ FieldLabelModifier := DropSuffix "_"
, ConstructorTagModifier := Lowercase
]
data MSDFFont = MSDFFont
{ atlas :: Atlas
, metrics :: Metrics
, glyphs :: [Glyph]
, kerning :: [KerningPair]
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding MSDFFont
data Atlas = Atlas
{ type_ :: Text
, distanceRange :: Scalar
, size :: Scalar
, width :: Int
, height :: Int
, yOrigin :: YOrigin
} deriving stock (Generic)
deriving (FromJSON, ToJSON) via Encoding Atlas
data YOrigin = Bottom | Top
deriving stock (Generic)
deriving (FromJSON, ToJSON) via Encoding YOrigin
data Metrics = Metrics
{ emSize :: Scalar
, lineHeight :: Scalar
, ascender :: Scalar
, descender :: Scalar
, underlineY :: Scalar
, underlineThickness :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Metrics
data Glyph = Glyph
{ unicode :: Int
Cursor advancement in world X
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Glyph
data KerningPair = KerningPair
{ unicode1 :: Int
, unicode2 :: Int
, advance :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding KerningPair
data Bounds = Bounds
{ left :: Scalar
, bottom :: Scalar
, right :: Scalar
, top :: Scalar
} deriving stock Generic
deriving (FromJSON, ToJSON) via Encoding Bounds
data Font = Font
{ atlas :: Atlas
, metrics :: Metrics
, glyphMap :: HashMap.HashMap Int (Glyph, Maybe GlyphVerts)
, kerningMap :: HashMap.HashMap (Int,Int) Scalar
} deriving stock Generic
data GlyphVerts = GlyphVerts
{ verts :: [V2 Scalar]
, texCoords :: [V2 Scalar]
} deriving Show
makeFont :: ByteString -> Either String Font
makeFont text = case eitherDecode text of
Right MSDFFont {..} ->
glyphMap = HashMap.fromList $ glyphs <&> \g@Glyph {..} -> (unicode, (g, makeGlyphVerts atlas metrics g))
kerningMap = HashMap.fromList $ kerning <&> \KerningPair {..} -> ((unicode1, unicode2), advance)
in Right Font {..}
Left s -> Left s
where
makeGlyphVerts Atlas {..} Metrics {..} Glyph {..} = case (planeBounds, atlasBounds) of
(Just (Bounds pl pb pr pt), Just (Bounds al ab ar at)) ->
let
texCoords =
[ V2 (al / realToFrac width) (at / realToFrac height)
, V2 (ar / realToFrac width) (at / realToFrac height)
, V2 (al / realToFrac width) (ab / realToFrac height)
, V2 (ar / realToFrac width) (ab / realToFrac height)
]
verts =
[ V2 pl (-pt)
, V2 pr (-pt)
, V2 pl (-pb)
, V2 pr (-pb)
]
in Just GlyphVerts {..}
_ -> Nothing
|
201f52cec460be506c6e4634b69e17a3ea0a3e239e15f6b9e3604b007e6dfb54 | edsko/ChinesePodAPI | V2.hs | module Servant.ChinesePod.Analysis.State.V2 (
-- * Types
V1.Simpl
, AnalysisStatic(..)
, V1.AnalysisDynamic(..)
, AnalysisState
, V1.RelevantLesson(..)
, V1.HSKLevel
-- * Migration
, migrate
) where
import Prelude hiding (Word, words)
import Data.Binary (Binary)
import Data.Map (Map)
import GHC.Generics (Generic)
import Text.Show.Pretty (PrettyVal)
import Servant.ChinesePod.Vocab.V2
import Servant.ChinesePod.Util.Migrate
import qualified Servant.ChinesePod.Analysis.State.V1 as V1
------------------------------------------------------------------------------
State
------------------------------------------------------------------------------
State
-------------------------------------------------------------------------------}
-- | The static part of the analysis (that doesn't change over time)
data AnalysisStatic = AnalysisStatic {
-- | All lessons available
analysisAllLessons :: Map V3Id Lesson
-- | All words we're studying
, analysisAllWords :: [Word]
-- | "Inverse" index: which lessons cover a certain word?
--
-- We do not distinguish between key and supplemental vocabulary here,
-- but only consider supplemental vocabulary that appears in the dialog.
See comments for ` RelevantLesson ` .
, analysisInverse :: Map V1.Simpl [V3Id]
}
deriving (Generic, Show)
type AnalysisState = (AnalysisStatic, V1.AnalysisDynamic)
instance PrettyVal AnalysisStatic
instance Binary AnalysisStatic
{-------------------------------------------------------------------------------
Migration
-------------------------------------------------------------------------------}
instance Migrate AnalysisState where
type MigrateFrom AnalysisState = V1.AnalysisState
migrate (static, dynamic) = (migrate static, dynamic)
instance Migrate AnalysisStatic where
type MigrateFrom AnalysisStatic = V1.AnalysisStatic
migrate V1.AnalysisStatic{..} = AnalysisStatic{
analysisAllLessons = fmap migrate analysisAllLessons
, analysisAllWords = analysisAllWords
, analysisInverse = analysisInverse
}
| null | https://raw.githubusercontent.com/edsko/ChinesePodAPI/f77ebfd55286316c4a54c42c195d5a51b4a0e4cd/src/Servant/ChinesePod/Analysis/State/V2.hs | haskell | * Types
* Migration
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
| The static part of the analysis (that doesn't change over time)
| All lessons available
| All words we're studying
| "Inverse" index: which lessons cover a certain word?
We do not distinguish between key and supplemental vocabulary here,
but only consider supplemental vocabulary that appears in the dialog.
------------------------------------------------------------------------------
Migration
------------------------------------------------------------------------------ | module Servant.ChinesePod.Analysis.State.V2 (
V1.Simpl
, AnalysisStatic(..)
, V1.AnalysisDynamic(..)
, AnalysisState
, V1.RelevantLesson(..)
, V1.HSKLevel
, migrate
) where
import Prelude hiding (Word, words)
import Data.Binary (Binary)
import Data.Map (Map)
import GHC.Generics (Generic)
import Text.Show.Pretty (PrettyVal)
import Servant.ChinesePod.Vocab.V2
import Servant.ChinesePod.Util.Migrate
import qualified Servant.ChinesePod.Analysis.State.V1 as V1
State
State
data AnalysisStatic = AnalysisStatic {
analysisAllLessons :: Map V3Id Lesson
, analysisAllWords :: [Word]
See comments for ` RelevantLesson ` .
, analysisInverse :: Map V1.Simpl [V3Id]
}
deriving (Generic, Show)
type AnalysisState = (AnalysisStatic, V1.AnalysisDynamic)
instance PrettyVal AnalysisStatic
instance Binary AnalysisStatic
instance Migrate AnalysisState where
type MigrateFrom AnalysisState = V1.AnalysisState
migrate (static, dynamic) = (migrate static, dynamic)
instance Migrate AnalysisStatic where
type MigrateFrom AnalysisStatic = V1.AnalysisStatic
migrate V1.AnalysisStatic{..} = AnalysisStatic{
analysisAllLessons = fmap migrate analysisAllLessons
, analysisAllWords = analysisAllWords
, analysisInverse = analysisInverse
}
|
494520ba00e4fecf936f5c5a24f8cd9767377c7c8b8a6898146a15e237777b1e | mejgun/haskell-tdlib | JsonObjectMember.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Data.JsonObjectMember where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import {-# SOURCE #-} qualified TD.Data.JsonValue as JsonValue
import qualified Utils as U
-- |
| Represents one member of a JSON object @key Member 's key @value Member 's value
JsonObjectMember
{ -- |
value :: Maybe JsonValue.JsonValue,
-- |
key :: Maybe String
}
deriving (Eq)
instance Show JsonObjectMember where
show
JsonObjectMember
{ value = value_,
key = key_
} =
"JsonObjectMember"
++ U.cc
[ U.p "value" value_,
U.p "key" key_
]
instance T.FromJSON JsonObjectMember where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"jsonObjectMember" -> parseJsonObjectMember v
_ -> mempty
where
parseJsonObjectMember :: A.Value -> T.Parser JsonObjectMember
parseJsonObjectMember = A.withObject "JsonObjectMember" $ \o -> do
value_ <- o A..:? "value"
key_ <- o A..:? "key"
return $ JsonObjectMember {value = value_, key = key_}
parseJSON _ = mempty
instance T.ToJSON JsonObjectMember where
toJSON
JsonObjectMember
{ value = value_,
key = key_
} =
A.object
[ "@type" A..= T.String "jsonObjectMember",
"value" A..= value_,
"key" A..= key_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/beb6635177d7626b70fd909b1d89f2156a992cd2/src/TD/Data/JsonObjectMember.hs | haskell | # LANGUAGE OverloadedStrings #
|
# SOURCE #
|
|
| |
module TD.Data.JsonObjectMember where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified Utils as U
| Represents one member of a JSON object @key Member 's key @value Member 's value
JsonObjectMember
value :: Maybe JsonValue.JsonValue,
key :: Maybe String
}
deriving (Eq)
instance Show JsonObjectMember where
show
JsonObjectMember
{ value = value_,
key = key_
} =
"JsonObjectMember"
++ U.cc
[ U.p "value" value_,
U.p "key" key_
]
instance T.FromJSON JsonObjectMember where
parseJSON v@(T.Object obj) = do
t <- obj A..: "@type" :: T.Parser String
case t of
"jsonObjectMember" -> parseJsonObjectMember v
_ -> mempty
where
parseJsonObjectMember :: A.Value -> T.Parser JsonObjectMember
parseJsonObjectMember = A.withObject "JsonObjectMember" $ \o -> do
value_ <- o A..:? "value"
key_ <- o A..:? "key"
return $ JsonObjectMember {value = value_, key = key_}
parseJSON _ = mempty
instance T.ToJSON JsonObjectMember where
toJSON
JsonObjectMember
{ value = value_,
key = key_
} =
A.object
[ "@type" A..= T.String "jsonObjectMember",
"value" A..= value_,
"key" A..= key_
]
|
bcda4466fd80a0efbd9f77612c503e4c11e60808dcaa1003efc30b4bb11c79d0 | CDSoft/abp | Environment.hs |
This file is part of ABP .
ABP is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
ABP is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ABP . If not , see < / > .
For further information about ABP you can visit
This file is part of ABP.
ABP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ABP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ABP. If not, see </>.
For further information about ABP you can visit
-}
{-# LANGUAGE OverloadedStrings #-}
module Environment
( Env(..)
, EnvMVar
, newEnv
, readEnv
, setVar
, getVar, getVarStr
)
where
import Config
import Tools
import Control.Concurrent.MVar
import Data.Bifunctor
import Data.Maybe
import qualified Data.Text as T
import System.Environment
import Text.Pandoc.JSON
data Env = Env { format :: Maybe Format
, vars :: [(T.Text, Inline)]
, quiet :: Bool
, deps :: [FilePath]
}
type EnvMVar = MVar Env
newEnv :: Maybe Format -> IO EnvMVar
newEnv maybeFormat = do
envVars <- map (bimap T.pack (Str . T.pack)) <$> getEnvironment
abpPath <- T.pack <$> getExecutablePath
let vs = [ ("format", Str fmt) | Format fmt <- maybeToList maybeFormat ]
++ [ (kAbpPath, Str abpPath)
]
++ envVars
let q = isJust (lookup kAbpQuiet vs)
newMVar $ Env { format = maybeFormat
, vars = vs
, quiet = q
, deps = []
}
readEnv :: EnvMVar -> IO Env
readEnv = readMVar
setVar :: EnvMVar -> T.Text -> Inline -> IO ()
setVar mvar var val = modifyMVar_ mvar (\e -> return e { vars = (var, val) : vars e })
getVar :: EnvMVar -> T.Text -> IO (Maybe Inline)
getVar e var = lookup var . vars <$> readMVar e
getVarStr :: EnvMVar -> T.Text -> IO (Maybe T.Text)
getVarStr e var = mapM inlineToPlainText =<< getVar e var
| null | https://raw.githubusercontent.com/CDSoft/abp/98e042066869aa5b898096c31b51d2b6dc316456/src/Environment.hs | haskell | # LANGUAGE OverloadedStrings # |
This file is part of ABP .
ABP is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
ABP is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ABP . If not , see < / > .
For further information about ABP you can visit
This file is part of ABP.
ABP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ABP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ABP. If not, see </>.
For further information about ABP you can visit
-}
module Environment
( Env(..)
, EnvMVar
, newEnv
, readEnv
, setVar
, getVar, getVarStr
)
where
import Config
import Tools
import Control.Concurrent.MVar
import Data.Bifunctor
import Data.Maybe
import qualified Data.Text as T
import System.Environment
import Text.Pandoc.JSON
data Env = Env { format :: Maybe Format
, vars :: [(T.Text, Inline)]
, quiet :: Bool
, deps :: [FilePath]
}
type EnvMVar = MVar Env
newEnv :: Maybe Format -> IO EnvMVar
newEnv maybeFormat = do
envVars <- map (bimap T.pack (Str . T.pack)) <$> getEnvironment
abpPath <- T.pack <$> getExecutablePath
let vs = [ ("format", Str fmt) | Format fmt <- maybeToList maybeFormat ]
++ [ (kAbpPath, Str abpPath)
]
++ envVars
let q = isJust (lookup kAbpQuiet vs)
newMVar $ Env { format = maybeFormat
, vars = vs
, quiet = q
, deps = []
}
readEnv :: EnvMVar -> IO Env
readEnv = readMVar
setVar :: EnvMVar -> T.Text -> Inline -> IO ()
setVar mvar var val = modifyMVar_ mvar (\e -> return e { vars = (var, val) : vars e })
getVar :: EnvMVar -> T.Text -> IO (Maybe Inline)
getVar e var = lookup var . vars <$> readMVar e
getVarStr :: EnvMVar -> T.Text -> IO (Maybe T.Text)
getVarStr e var = mapM inlineToPlainText =<< getVar e var
|
912b4b8af4d2b55ba55fbe0eae1dc7a85280a72687b5abd2474b5f4ca63409eb | kawasima/darzana | user.cljs | (ns cljs.user
(:require [devtools.core :as devtools]
[figwheel.client :as figwheel]))
(js/console.info "Starting in development mode")
(enable-console-print!)
(devtools/install!)
(figwheel/start {:websocket-url "ws:3449/figwheel-ws"})
(defn log [& args]
(.apply js/console.log js/console (apply array args)))
| null | https://raw.githubusercontent.com/kawasima/darzana/4b37c8556f74219b707d23cb2d6dce70509a0c1b/dev/src/cljs/user.cljs | clojure | (ns cljs.user
(:require [devtools.core :as devtools]
[figwheel.client :as figwheel]))
(js/console.info "Starting in development mode")
(enable-console-print!)
(devtools/install!)
(figwheel/start {:websocket-url "ws:3449/figwheel-ws"})
(defn log [& args]
(.apply js/console.log js/console (apply array args)))
|
|
b80698636486775c5f784e3b63acd464b2aa0ae8667470c0b7ad63a417370d17 | juspay/atlas | PerExtraKmRate.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Storage . Queries . FarePolicy . PerExtraKmRate
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Storage.Queries.FarePolicy.PerExtraKmRate
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Storage.Queries.FarePolicy.PerExtraKmRate
( Storage.Queries.FarePolicy.PerExtraKmRate.findAll,
deleteAll,
)
where
import Beckn.Prelude
import Beckn.Storage.Esqueleto as Esq
import Beckn.Types.Id
import Domain.Types.FarePolicy.PerExtraKmRate
import Domain.Types.Organization (Organization)
import Storage.Tabular.FarePolicy.PerExtraKmRate
import Types.Error (FarePolicyError (NoPerExtraKmRate))
import Utils.Common
findAll ::
( Transactionable m,
Monad m,
MonadThrow m,
Log m
) =>
Id Organization ->
m (NonEmpty PerExtraKmRate)
findAll orgId = do
rez <- Esq.findAll $ do
perExtraKmRate <- from $ table @PerExtraKmRateT
where_ $
perExtraKmRate ^. PerExtraKmRateOrganizationId ==. val (toKey orgId)
orderBy [asc $ perExtraKmRate ^. PerExtraKmRateDistanceRangeStart]
return perExtraKmRate
noneEmptyRez <- case rez of
e : es -> pure $ e :| es
[] -> throwError NoPerExtraKmRate
return (getDomainPart <$> noneEmptyRez)
deleteAll :: Id Organization -> SqlDB ()
deleteAll orgId =
delete' $ do
perExtraKmRate <- from $ table @PerExtraKmRateT
where_ $
perExtraKmRate ^. PerExtraKmRateOrganizationId ==. val (toKey orgId)
getDomainPart :: FullPerExtraKmRate -> PerExtraKmRate
getDomainPart (_, _, domain) = domain
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/driver-offer-bpp/src/Storage/Queries/FarePolicy/PerExtraKmRate.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Storage . Queries . FarePolicy . PerExtraKmRate
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Storage.Queries.FarePolicy.PerExtraKmRate
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Storage.Queries.FarePolicy.PerExtraKmRate
( Storage.Queries.FarePolicy.PerExtraKmRate.findAll,
deleteAll,
)
where
import Beckn.Prelude
import Beckn.Storage.Esqueleto as Esq
import Beckn.Types.Id
import Domain.Types.FarePolicy.PerExtraKmRate
import Domain.Types.Organization (Organization)
import Storage.Tabular.FarePolicy.PerExtraKmRate
import Types.Error (FarePolicyError (NoPerExtraKmRate))
import Utils.Common
findAll ::
( Transactionable m,
Monad m,
MonadThrow m,
Log m
) =>
Id Organization ->
m (NonEmpty PerExtraKmRate)
findAll orgId = do
rez <- Esq.findAll $ do
perExtraKmRate <- from $ table @PerExtraKmRateT
where_ $
perExtraKmRate ^. PerExtraKmRateOrganizationId ==. val (toKey orgId)
orderBy [asc $ perExtraKmRate ^. PerExtraKmRateDistanceRangeStart]
return perExtraKmRate
noneEmptyRez <- case rez of
e : es -> pure $ e :| es
[] -> throwError NoPerExtraKmRate
return (getDomainPart <$> noneEmptyRez)
deleteAll :: Id Organization -> SqlDB ()
deleteAll orgId =
delete' $ do
perExtraKmRate <- from $ table @PerExtraKmRateT
where_ $
perExtraKmRate ^. PerExtraKmRateOrganizationId ==. val (toKey orgId)
getDomainPart :: FullPerExtraKmRate -> PerExtraKmRate
getDomainPart (_, _, domain) = domain
|
|
dc10c0f2e593240d84e81f4969367130c93023b35a6ec4b3b764b6d45a3c6830 | technomancy/leiningen | core.clj | (ns sample2.core
(:require sample2.alt)
(:gen-class))
| null | https://raw.githubusercontent.com/technomancy/leiningen/24fb93936133bd7fc30c393c127e9e69bb5f2392/test_projects/sample/checkouts/sample2/src/sample2/core.clj | clojure | (ns sample2.core
(:require sample2.alt)
(:gen-class))
|
|
78a2e36d780ce96ec031c697dbc1afef017da417cd00c721d5201385a13b3f0c | tfausak/rattletrap | Keyframe.hs | module Rattletrap.Type.Keyframe where
import qualified Rattletrap.ByteGet as ByteGet
import qualified Rattletrap.BytePut as BytePut
import qualified Rattletrap.Schema as Schema
import qualified Rattletrap.Type.F32 as F32
import qualified Rattletrap.Type.U32 as U32
import qualified Rattletrap.Utility.Json as Json
data Keyframe = Keyframe
| When this key frame occurs , in seconds .
time :: F32.F32,
-- | The frame number of this key frame, starting from 0.
frame :: U32.U32,
-- | The bit position of this key frame in the stream.
position :: U32.U32
}
deriving (Eq, Show)
instance Json.FromJSON Keyframe where
parseJSON = Json.withObject "Keyframe" $ \object -> do
time <- Json.required object "time"
frame <- Json.required object "frame"
position <- Json.required object "position"
pure Keyframe {time, frame, position}
instance Json.ToJSON Keyframe where
toJSON x =
Json.object
[ Json.pair "time" $ time x,
Json.pair "frame" $ frame x,
Json.pair "position" $ position x
]
schema :: Schema.Schema
schema =
Schema.named "keyframe" $
Schema.object
[ (Json.pair "time" $ Schema.ref F32.schema, True),
(Json.pair "frame" $ Schema.ref U32.schema, True),
(Json.pair "position" $ Schema.ref U32.schema, True)
]
bytePut :: Keyframe -> BytePut.BytePut
bytePut x =
F32.bytePut (time x) <> U32.bytePut (frame x) <> U32.bytePut (position x)
byteGet :: ByteGet.ByteGet Keyframe
byteGet = ByteGet.label "Keyframe" $ do
time <- ByteGet.label "time" F32.byteGet
frame <- ByteGet.label "frame" U32.byteGet
position <- ByteGet.label "position" U32.byteGet
pure Keyframe {time, frame, position}
| null | https://raw.githubusercontent.com/tfausak/rattletrap/cc6d6aba923d840f23de7673cab9a043096d3099/src/lib/Rattletrap/Type/Keyframe.hs | haskell | | The frame number of this key frame, starting from 0.
| The bit position of this key frame in the stream. | module Rattletrap.Type.Keyframe where
import qualified Rattletrap.ByteGet as ByteGet
import qualified Rattletrap.BytePut as BytePut
import qualified Rattletrap.Schema as Schema
import qualified Rattletrap.Type.F32 as F32
import qualified Rattletrap.Type.U32 as U32
import qualified Rattletrap.Utility.Json as Json
data Keyframe = Keyframe
| When this key frame occurs , in seconds .
time :: F32.F32,
frame :: U32.U32,
position :: U32.U32
}
deriving (Eq, Show)
instance Json.FromJSON Keyframe where
parseJSON = Json.withObject "Keyframe" $ \object -> do
time <- Json.required object "time"
frame <- Json.required object "frame"
position <- Json.required object "position"
pure Keyframe {time, frame, position}
instance Json.ToJSON Keyframe where
toJSON x =
Json.object
[ Json.pair "time" $ time x,
Json.pair "frame" $ frame x,
Json.pair "position" $ position x
]
schema :: Schema.Schema
schema =
Schema.named "keyframe" $
Schema.object
[ (Json.pair "time" $ Schema.ref F32.schema, True),
(Json.pair "frame" $ Schema.ref U32.schema, True),
(Json.pair "position" $ Schema.ref U32.schema, True)
]
bytePut :: Keyframe -> BytePut.BytePut
bytePut x =
F32.bytePut (time x) <> U32.bytePut (frame x) <> U32.bytePut (position x)
byteGet :: ByteGet.ByteGet Keyframe
byteGet = ByteGet.label "Keyframe" $ do
time <- ByteGet.label "time" F32.byteGet
frame <- ByteGet.label "frame" U32.byteGet
position <- ByteGet.label "position" U32.byteGet
pure Keyframe {time, frame, position}
|
24eba7a64ac249a99740d59da7edce5bbd1779457306003b4e426c98554097e0 | panda-planner-dev/ipc2020-domains | d-10.lisp | (defdomain domain (
(:operator (!obtain_permit ?op_h)
;; preconditions
(
(type_Hazardous ?op_h)
(not (Have_Permit ?op_h))
)
;; delete effects
()
;; add effects
((Have_Permit ?op_h))
)
(:operator (!collect_fees ?cf_p)
;; preconditions
(
(type_Package ?cf_p)
(not (Fees_Collected ?cf_p))
)
;; delete effects
()
;; add effects
((Fees_Collected ?cf_p))
)
(:operator (!collect_insurance ?ci_v)
;; preconditions
(
(type_Valuable ?ci_v)
(not (Insured ?ci_v))
)
;; delete effects
()
;; add effects
((Insured ?ci_v))
)
(:operator (!go_through_tcenter_cc ?gttc_lo ?gttc_ld ?gttc_co ?gttc_cd ?gttc_tc)
;; preconditions
(
(type_Not_TCenter ?gttc_lo) (type_Not_TCenter ?gttc_ld) (type_City ?gttc_co) (type_City ?gttc_cd) (type_TCenter ?gttc_tc)
(In_City ?gttc_lo ?gttc_co) (In_City ?gttc_ld ?gttc_cd) (Serves ?gttc_tc ?gttc_co) (Serves ?gttc_tc ?gttc_cd) (Available ?gttc_tc)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_cities_ottd ?gtttcc_lo ?gtttcc_ld ?gtttcc_co ?gtttcc_cd ?gtttcc_t1 ?gtttcc_t2)
;; preconditions
(
(type_Not_TCenter ?gtttcc_lo) (type_Not_TCenter ?gtttcc_ld) (type_City ?gtttcc_co) (type_City ?gtttcc_cd) (type_TCenter ?gtttcc_t1) (type_TCenter ?gtttcc_t2)
(In_City ?gtttcc_lo ?gtttcc_co) (In_City ?gtttcc_ld ?gtttcc_cd) (Serves ?gtttcc_t1 ?gtttcc_co) (Serves ?gtttcc_t2 ?gtttcc_cd)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_cities_otd ?gtttccotd_ld ?gtttccotd_co ?gtttccotd_cd ?gtttccotd_to ?gtttccotd_t1)
;; preconditions
(
(type_Not_TCenter ?gtttccotd_ld) (type_City ?gtttccotd_co) (type_City ?gtttccotd_cd) (type_TCenter ?gtttccotd_to) (type_TCenter ?gtttccotd_t1)
(In_City ?gtttccotd_to ?gtttccotd_co) (In_City ?gtttccotd_ld ?gtttccotd_cd) (Serves ?gtttccotd_t1 ?gtttccotd_cd)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_cities_ott ?gtttccott_ld ?gtttccott_co ?gtttccott_cd ?gtttccott_to ?gtttccott_td)
;; preconditions
(
(type_City_Location ?gtttccott_ld) (type_City ?gtttccott_co) (type_City ?gtttccott_cd) (type_TCenter ?gtttccott_to) (type_TCenter ?gtttccott_td)
(In_City ?gtttccott_ld ?gtttccott_co) (In_City ?gtttccott_td ?gtttccott_cd) (Serves ?gtttccott_to ?gtttccott_co)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters ?gtttc_to ?gtttc_td)
;; preconditions
(
(type_TCenter ?gtttc_to) (type_TCenter ?gtttc_td)
(Available ?gtttc_to) (Available ?gtttc_td)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_tt ?gtttctt_to ?gtttctt_td ?gtttctt_co ?gtttctt_cd)
;; preconditions
(
(type_TCenter ?gtttctt_to) (type_TCenter ?gtttctt_td) (type_City ?gtttctt_co) (type_City ?gtttctt_cd)
(In_City ?gtttctt_to ?gtttctt_co) (In_City ?gtttctt_td ?gtttctt_cd)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_via_hub_hazardous ?gtttcvhh_to ?gtttcvhh_td ?gtttcvhh_h ?gtttcvhh_co ?gtttcvhh_ch ?gtttcvhh_cd ?gtttcvhh_ro ?gtttcvhh_rd)
;; preconditions
(
(type_TCenter ?gtttcvhh_to) (type_TCenter ?gtttcvhh_td) (type_Hub ?gtttcvhh_h) (type_City ?gtttcvhh_co) (type_City ?gtttcvhh_ch) (type_City ?gtttcvhh_cd) (type_Region ?gtttcvhh_ro) (type_Region ?gtttcvhh_rd)
(Available ?gtttcvhh_to) (Available ?gtttcvhh_td) (In_City ?gtttcvhh_h ?gtttcvhh_ch) (City_Hazardous_Compatible ?gtttcvhh_ch) (In_City ?gtttcvhh_to ?gtttcvhh_co) (In_City ?gtttcvhh_td ?gtttcvhh_cd) (In_Region ?gtttcvhh_co ?gtttcvhh_ro) (In_Region ?gtttcvhh_cd ?gtttcvhh_rd) (Serves ?gtttcvhh_h ?gtttcvhh_ro) (Serves ?gtttcvhh_h ?gtttcvhh_rd) (Available ?gtttcvhh_h)
)
;; delete effects
()
;; add effects
()
)
(:operator (!go_through_two_tcenters_via_hub_not_hazardous ?gtttcvhnh_to ?gtttcvhnh_td ?gtttcvhnh_co ?gtttcvhnh_cd ?gtttcvhnh_ro ?gtttcvhnh_rd ?gtttcvhnh_h)
;; preconditions
(
(type_TCenter ?gtttcvhnh_to) (type_TCenter ?gtttcvhnh_td) (type_City ?gtttcvhnh_co) (type_City ?gtttcvhnh_cd) (type_Region ?gtttcvhnh_ro) (type_Region ?gtttcvhnh_rd) (type_Hub ?gtttcvhnh_h)
(Available ?gtttcvhnh_to) (Available ?gtttcvhnh_td) (In_City ?gtttcvhnh_to ?gtttcvhnh_co) (In_City ?gtttcvhnh_td ?gtttcvhnh_cd) (In_Region ?gtttcvhnh_co ?gtttcvhnh_ro) (In_Region ?gtttcvhnh_cd ?gtttcvhnh_rd) (Serves ?gtttcvhnh_h ?gtttcvhnh_ro) (Serves ?gtttcvhnh_h ?gtttcvhnh_rd) (Available ?gtttcvhnh_h)
)
;; delete effects
()
;; add effects
()
)
(:operator (!deliver_p ?dp_p)
;; preconditions
(
(type_Package ?dp_p)
(Fees_Collected ?dp_p)
)
;; delete effects
((Fees_Collected ?dp_p))
;; add effects
((Delivered ?dp_p))
)
(:operator (!deliver_h ?dh_h)
;; preconditions
(
(type_Hazardous ?dh_h)
(Fees_Collected ?dh_h) (Have_Permit ?dh_h)
)
;; delete effects
((Have_Permit ?dh_h) (Fees_Collected ?dh_h))
;; add effects
((Delivered ?dh_h))
)
(:operator (!deliver_v ?dv_v)
;; preconditions
(
(type_Valuable ?dv_v)
(Fees_Collected ?dv_v) (Insured ?dv_v)
)
;; delete effects
((Fees_Collected ?dv_v) (Insured ?dv_v))
;; add effects
((Delivered ?dv_v))
)
(:operator (!post_guard_outside ?pco_a)
;; preconditions
(
(type_Armored ?pco_a)
)
;; delete effects
((Guard_Inside ?pco_a))
;; add effects
((Guard_Outside ?pco_a))
)
(:operator (!post_guard_inside ?pci_a)
;; preconditions
(
(type_Armored ?pci_a)
)
;; delete effects
((Guard_Outside ?pci_a))
;; add effects
((Guard_Inside ?pci_a))
)
(:operator (!remove_guard ?mc_a)
;; preconditions
(
(type_Armored ?mc_a)
)
;; delete effects
((Guard_Outside ?mc_a) (Guard_Inside ?mc_a))
;; add effects
()
)
(:operator (!decontaminate_interior ?di_v)
;; preconditions
(
(type_Vehicle ?di_v)
)
;; delete effects
()
;; add effects
((Decontaminated_Interior ?di_v))
)
(:operator (!affix_warning_signs ?fws_v)
;; preconditions
(
(type_Vehicle ?fws_v)
(not (Warning_Signs_Affixed ?fws_v))
)
;; delete effects
()
;; add effects
((Warning_Signs_Affixed ?fws_v))
)
(:operator (!remove_warning_signs ?mws_v)
;; preconditions
(
(type_Vehicle ?mws_v)
(Warning_Signs_Affixed ?mws_v)
)
;; delete effects
((Warning_Signs_Affixed ?mws_v))
;; add effects
()
)
(:operator (!attach_train_car ?atc_t ?atc_tc ?atc_l)
;; preconditions
(
(type_Train ?atc_t) (type_Traincar ?atc_tc) (type_Location ?atc_l)
(At_Vehicle ?atc_tc ?atc_l) (At_Vehicle ?atc_t ?atc_l) (not (Connected_To ?atc_tc ?atc_t))
)
;; delete effects
((At_Vehicle ?atc_tc ?atc_l))
;; add effects
((Connected_To ?atc_tc ?atc_t))
)
(:operator (!detach_train_car ?dtc_t ?dtc_tc ?dtc_l)
;; preconditions
(
(type_Train ?dtc_t) (type_Traincar ?dtc_tc) (type_Location ?dtc_l)
(At_Vehicle ?dtc_t ?dtc_l) (Connected_To ?dtc_tc ?dtc_t)
)
;; delete effects
((Connected_To ?dtc_tc ?dtc_t))
;; add effects
((At_Vehicle ?dtc_tc ?dtc_l))
)
(:operator (!connect_hose ?ch_tv ?ch_l)
;; preconditions
(
(type_Tanker_Vehicle ?ch_tv) (type_Liquid ?ch_l)
(not (Hose_Connected ?ch_tv ?ch_l))
)
;; delete effects
()
;; add effects
((Hose_Connected ?ch_tv ?ch_l))
)
(:operator (!disconnect_hose ?dch_tv ?dch_l)
;; preconditions
(
(type_Tanker_Vehicle ?dch_tv) (type_Liquid ?dch_l)
(Hose_Connected ?dch_tv ?dch_l)
)
;; delete effects
((Hose_Connected ?dch_tv ?dch_l))
;; add effects
()
)
(:operator (!open_valve ?ov_tv)
;; preconditions
(
(type_Tanker_Vehicle ?ov_tv)
(not (Valve_Open ?ov_tv))
)
;; delete effects
()
;; add effects
((Valve_Open ?ov_tv))
)
(:operator (!close_valve ?cv_tv)
;; preconditions
(
(type_Tanker_Vehicle ?cv_tv)
(Valve_Open ?cv_tv)
)
;; delete effects
((Valve_Open ?cv_tv))
;; add effects
()
)
(:operator (!fill_tank ?ft_tv ?ft_li ?ft_lo)
;; preconditions
(
(type_Tanker_Vehicle ?ft_tv) (type_Liquid ?ft_li) (type_Location ?ft_lo)
(Hose_Connected ?ft_tv ?ft_li) (Valve_Open ?ft_tv) (At_Package ?ft_li ?ft_lo) (At_Vehicle ?ft_tv ?ft_lo) (PV_Compatible ?ft_li ?ft_tv)
)
;; delete effects
((At_Package ?ft_li ?ft_lo))
;; add effects
((At_Package ?ft_li ?ft_tv))
)
(:operator (!empty_tank ?et_tv ?et_li ?et_lo)
;; preconditions
(
(type_Tanker_Vehicle ?et_tv) (type_Liquid ?et_li) (type_Location ?et_lo)
(Hose_Connected ?et_tv ?et_li) (Valve_Open ?et_tv) (At_Package ?et_li ?et_tv) (At_Vehicle ?et_tv ?et_lo)
)
;; delete effects
((At_Package ?et_li ?et_tv))
;; add effects
((At_Package ?et_li ?et_lo))
)
(:operator (!load_cars ?lc_c ?lc_v ?lc_l)
;; preconditions
(
(type_Cars ?lc_c) (type_Auto_Vehicle ?lc_v) (type_Location ?lc_l)
(At_Package ?lc_c ?lc_l) (At_Vehicle ?lc_v ?lc_l) (Ramp_Down ?lc_v) (PV_Compatible ?lc_c ?lc_v)
)
;; delete effects
((At_Package ?lc_c ?lc_l))
;; add effects
((At_Package ?lc_c ?lc_v))
)
(:operator (!unload_cars ?uc_c ?uc_v ?uc_l)
;; preconditions
(
(type_Cars ?uc_c) (type_Auto_Vehicle ?uc_v) (type_Location ?uc_l)
(At_Package ?uc_c ?uc_v) (At_Vehicle ?uc_v ?uc_l) (Ramp_Down ?uc_v)
)
;; delete effects
((At_Package ?uc_c ?uc_v))
;; add effects
((At_Package ?uc_c ?uc_l))
)
(:operator (!raise_ramp ?rr_v)
;; preconditions
(
(type_Vehicle ?rr_v)
(Ramp_Down ?rr_v)
)
;; delete effects
((Ramp_Down ?rr_v))
;; add effects
()
)
(:operator (!lower_ramp ?lr_v)
;; preconditions
(
(type_Vehicle ?lr_v)
(not (Ramp_Down ?lr_v))
)
;; delete effects
()
;; add effects
((Ramp_Down ?lr_v))
)
(:operator (!load_livestock ?ll_p ?ll_v ?ll_l)
;; preconditions
(
(type_Livestock_Package ?ll_p) (type_Livestock_Vehicle ?ll_v) (type_Location ?ll_l)
(At_Package ?ll_p ?ll_l) (At_Vehicle ?ll_v ?ll_l) (Ramp_Down ?ll_v) (PV_Compatible ?ll_p ?ll_v)
)
;; delete effects
((At_Package ?ll_p ?ll_l) (Clean_Interior ?ll_v))
;; add effects
((At_Package ?ll_p ?ll_v))
)
(:operator (!unload_livestock ?ull_p ?ull_v ?ull_l)
;; preconditions
(
(type_Livestock_Package ?ull_p) (type_Livestock_Vehicle ?ull_v) (type_Location ?ull_l)
(At_Package ?ull_p ?ull_v) (At_Vehicle ?ull_v ?ull_l) (Ramp_Down ?ull_v)
)
;; delete effects
((At_Package ?ull_p ?ull_v) (Trough_Full ?ull_v))
;; add effects
((At_Package ?ull_p ?ull_l))
)
(:operator (!fill_trough ?ftr_v)
;; preconditions
(
(type_Livestock_Vehicle ?ftr_v)
)
;; delete effects
()
;; add effects
((Trough_Full ?ftr_v))
)
(:operator (!do_clean_interior ?cli_v)
;; preconditions
(
(type_Vehicle ?cli_v)
)
;; delete effects
()
;; add effects
((Clean_Interior ?cli_v))
)
(:operator (!attach_conveyor_ramp ?acr_ap ?acr_pr ?acr_l)
;; preconditions
(
(type_Airplane ?acr_ap) (type_Plane_Ramp ?acr_pr) (type_Location ?acr_l)
(Available ?acr_pr) (At_Equipment ?acr_pr ?acr_l) (At_Vehicle ?acr_ap ?acr_l)
)
;; delete effects
((Available ?acr_pr))
;; add effects
((Ramp_Connected ?acr_pr ?acr_ap))
)
(:operator (!detach_conveyor_ramp ?dcr_ap ?dcr_pr ?dcr_l)
;; preconditions
(
(type_Airplane ?dcr_ap) (type_Plane_Ramp ?dcr_pr) (type_Location ?dcr_l)
(Ramp_Connected ?dcr_pr ?dcr_ap) (At_Equipment ?dcr_pr ?dcr_l) (At_Vehicle ?dcr_ap ?dcr_l)
)
;; delete effects
((Ramp_Connected ?dcr_pr ?dcr_ap))
;; add effects
((Available ?dcr_pr))
)
(:operator (!connect_chute ?cc_h)
;; preconditions
(
(type_Hopper_Vehicle ?cc_h)
(not (Chute_Connected ?cc_h))
)
;; delete effects
()
;; add effects
((Chute_Connected ?cc_h))
)
(:operator (!disconnect_chute ?dc_h)
;; preconditions
(
(type_Hopper_Vehicle ?dc_h)
(Chute_Connected ?dc_h)
)
;; delete effects
((Chute_Connected ?dc_h))
;; add effects
()
)
(:operator (!fill_hopper ?fh_p ?fh_hv ?fh_l)
;; preconditions
(
(type_Package ?fh_p) (type_Hopper_Vehicle ?fh_hv) (type_Location ?fh_l)
(Chute_Connected ?fh_hv) (At_Vehicle ?fh_hv ?fh_l) (At_Package ?fh_p ?fh_l) (PV_Compatible ?fh_p ?fh_hv)
)
;; delete effects
((At_Package ?fh_p ?fh_l))
;; add effects
((At_Package ?fh_p ?fh_hv))
)
(:operator (!empty_hopper ?eh_p ?eh_hv ?eh_l)
;; preconditions
(
(type_Package ?eh_p) (type_Hopper_Vehicle ?eh_hv) (type_Location ?eh_l)
(Chute_Connected ?eh_hv) (At_Vehicle ?eh_hv ?eh_l) (At_Package ?eh_p ?eh_hv)
)
;; delete effects
((At_Package ?eh_p ?eh_hv))
;; add effects
((At_Package ?eh_p ?eh_l))
)
(:operator (!pick_up_package_ground ?pupg_p ?pupg_c ?pupg_l)
;; preconditions
(
(type_Package ?pupg_p) (type_Crane ?pupg_c) (type_Location ?pupg_l)
(Empty ?pupg_c) (Available ?pupg_c) (At_Equipment ?pupg_c ?pupg_l) (At_Package ?pupg_p ?pupg_l)
)
;; delete effects
((Empty ?pupg_c) (At_Package ?pupg_p ?pupg_l))
;; add effects
((At_Package ?pupg_p ?pupg_c))
)
(:operator (!put_down_package_ground ?pdpg_p ?pdpg_c ?pdpg_l)
;; preconditions
(
(type_Package ?pdpg_p) (type_Crane ?pdpg_c) (type_Location ?pdpg_l)
(Available ?pdpg_c) (At_Equipment ?pdpg_c ?pdpg_l) (At_Package ?pdpg_p ?pdpg_c)
)
;; delete effects
((At_Package ?pdpg_p ?pdpg_c))
;; add effects
((At_Package ?pdpg_p ?pdpg_l) (Empty ?pdpg_c))
)
(:operator (!pick_up_package_vehicle ?pupv_p ?pupv_c ?pupv_fv ?pupv_l)
;; preconditions
(
(type_Package ?pupv_p) (type_Crane ?pupv_c) (type_Flatbed_Vehicle ?pupv_fv) (type_Location ?pupv_l)
(Empty ?pupv_c) (Available ?pupv_c) (At_Equipment ?pupv_c ?pupv_l) (At_Package ?pupv_p ?pupv_fv) (At_Vehicle ?pupv_fv ?pupv_l)
)
;; delete effects
((Empty ?pupv_c) (At_Package ?pupv_p ?pupv_fv))
;; add effects
((At_Package ?pupv_p ?pupv_c))
)
(:operator (!put_down_package_vehicle ?pdpv_p ?pdpv_c ?pdpv_fv ?pdpv_l)
;; preconditions
(
(type_Package ?pdpv_p) (type_Crane ?pdpv_c) (type_Flatbed_Vehicle ?pdpv_fv) (type_Location ?pdpv_l)
(Available ?pdpv_c) (At_Package ?pdpv_p ?pdpv_c) (At_Equipment ?pdpv_c ?pdpv_l) (At_Vehicle ?pdpv_fv ?pdpv_l) (PV_Compatible ?pdpv_p ?pdpv_fv)
)
;; delete effects
((At_Package ?pdpv_p ?pdpv_c))
;; add effects
((Empty ?pdpv_c) (At_Package ?pdpv_p ?pdpv_fv))
)
(:operator (!open_door ?od_rv)
;; preconditions
(
(type_Regular_Vehicle ?od_rv)
(not (Door_Open ?od_rv))
)
;; delete effects
()
;; add effects
((Door_Open ?od_rv))
)
(:operator (!close_door ?cd_rv)
;; preconditions
(
(type_Regular_Vehicle ?cd_rv)
(Door_Open ?cd_rv)
)
;; delete effects
((Door_Open ?cd_rv))
;; add effects
()
)
(:operator (!load_package ?lp_p ?lp_v ?lp_l)
;; preconditions
(
(type_Package ?lp_p) (type_Vehicle ?lp_v) (type_Location ?lp_l)
(At_Package ?lp_p ?lp_l) (At_Vehicle ?lp_v ?lp_l) (PV_Compatible ?lp_p ?lp_v)
)
;; delete effects
((At_Package ?lp_p ?lp_l))
;; add effects
((At_Package ?lp_p ?lp_v))
)
(:operator (!unload_package ?up_p ?up_v ?up_l)
;; preconditions
(
(type_Package ?up_p) (type_Vehicle ?up_v) (type_Location ?up_l)
(At_Package ?up_p ?up_v) (At_Vehicle ?up_v ?up_l)
)
;; delete effects
((At_Package ?up_p ?up_v))
;; add effects
((At_Package ?up_p ?up_l))
)
(:operator (!move_vehicle_no_traincar ?hmnt_v ?hmnt_o ?hmnt_r ?hmnt_d)
;; preconditions
(
(type_Vehicle ?hmnt_v) (type_Location ?hmnt_o) (type_Route ?hmnt_r) (type_Location ?hmnt_d)
(Connects ?hmnt_r ?hmnt_o ?hmnt_d) (Available ?hmnt_v) (Available ?hmnt_r) (RV_Compatible ?hmnt_r ?hmnt_v) (At_Vehicle ?hmnt_v ?hmnt_o)
)
;; delete effects
((At_Vehicle ?hmnt_v ?hmnt_o))
;; add effects
((At_Vehicle ?hmnt_v ?hmnt_d))
)
(:method (__top)
__top_method
(
(type_sort_for_Essen ?var_for_Essen_1) (type_sort_for_HauptbahnhofMuenchen ?var_for_HauptbahnhofMuenchen_2) (type_sort_for_HauptbahnhofUlm ?var_for_HauptbahnhofUlm_3)
)
((transport ?var_for_Essen_1 ?var_for_HauptbahnhofUlm_3 ?var_for_HauptbahnhofMuenchen_2))
)
(:method (carry ?mccd_cd_p ?mccd_cd_lo ?mccd_cd_ld)
method_carry_cd
(
(type_Package ?mccd_cd_p) (type_Location ?mccd_cd_lo) (type_Location ?mccd_cd_ld)
(type_Location ?mccd_cd_ld) (type_Location ?mccd_cd_lo) (type_Package ?mccd_cd_p)
)
((carry_direct ?mccd_cd_p ?mccd_cd_lo ?mccd_cd_ld))
)
(:method (carry ?mch_hctt_p ?mch_hctt_o ?mch_hctt_d)
method_carry_cvh
(
(type_Package ?mch_hctt_p) (type_Location ?mch_hctt_o) (type_Location ?mch_hctt_d)
(type_City ?mch_hctt_cd) (type_City ?mch_hctt_co) (type_TCenter ?mch_hctt_d) (type_TCenter ?mch_hctt_o) (type_Package ?mch_hctt_p)
)
((helper_carry_tt ?mch_hctt_p ?mch_hctt_o ?mch_hctt_co ?mch_hctt_d ?mch_hctt_cd))
)
(:method (carry ?mccct_hcott_p ?mccct_hcott_o ?mccct_hcott_d)
method_carry_cd_cbtc
(
(type_Package ?mccct_hcott_p) (type_Location ?mccct_hcott_o) (type_Location ?mccct_hcott_d)
(type_City ?mccct_hcott_cd) (type_City ?mccct_hcott_co) (type_TCenter ?mccct_hcott_d) (type_City_Location ?mccct_hcott_o) (type_Package ?mccct_hcott_p) (type_TCenter ?mccct_hcott_t1)
)
((helper_carry_ott ?mccct_hcott_p ?mccct_hcott_o ?mccct_hcott_co ?mccct_hcott_t1 ?mccct_hcott_d ?mccct_hcott_cd))
)
(:method (carry ?mcctc_hcotd_p ?mcctc_hcotd_o ?mcctc_hcotd_d)
method_carry_cbtc_cd
(
(type_Package ?mcctc_hcotd_p) (type_Location ?mcctc_hcotd_o) (type_Location ?mcctc_hcotd_d)
(type_City ?mcctc_hcotd_cd) (type_City ?mcctc_hcotd_co) (type_Not_TCenter ?mcctc_hcotd_d) (type_TCenter ?mcctc_hcotd_o) (type_Package ?mcctc_hcotd_p) (type_TCenter ?mcctc_hcotd_t1)
)
((helper_carry_otd ?mcctc_hcotd_p ?mcctc_hcotd_o ?mcctc_hcotd_co ?mcctc_hcotd_t1 ?mcctc_hcotd_d ?mcctc_hcotd_cd))
)
(:method (carry ?mcccc_hcottd_p ?mcccc_hcottd_o ?mcccc_hcottd_d)
method_carry_cd_cbtc_cd
(
(type_Package ?mcccc_hcottd_p) (type_Location ?mcccc_hcottd_o) (type_Location ?mcccc_hcottd_d)
(type_City ?mcccc_hcottd_cd) (type_City ?mcccc_hcottd_co) (type_Not_TCenter ?mcccc_hcottd_d) (type_Not_TCenter ?mcccc_hcottd_o) (type_Package ?mcccc_hcottd_p) (type_TCenter ?mcccc_hcottd_t1) (type_TCenter ?mcccc_hcottd_t2)
)
((helper_carry_ottd ?mcccc_hcottd_p ?mcccc_hcottd_o ?mcccc_hcottd_co ?mcccc_hcottd_t1 ?mcccc_hcottd_t2 ?mcccc_hcottd_d ?mcccc_hcottd_cd))
)
(:method (carry ?mccc_hccc_p ?mccc_hccc_o ?mccc_hccc_d)
method_carry_cd_cd
(
(type_Package ?mccc_hccc_p) (type_Location ?mccc_hccc_o) (type_Location ?mccc_hccc_d)
(type_City ?mccc_hccc_cd) (type_City ?mccc_hccc_co) (type_Not_TCenter ?mccc_hccc_d) (type_Not_TCenter ?mccc_hccc_o) (type_Package ?mccc_hccc_p) (type_TCenter ?mccc_hccc_t)
)
((helper_carry_cc ?mccc_hccc_p ?mccc_hccc_o ?mccc_hccc_co ?mccc_hccc_t ?mccc_hccc_d ?mccc_hccc_cd))
)
(:method (carry_between_tcenters ?mcbtc_cd_p ?mcbtc_gtttc_to ?mcbtc_gtttc_td)
method_carry_between_tcenters_cd
(
(type_Package ?mcbtc_cd_p) (type_TCenter ?mcbtc_gtttc_to) (type_TCenter ?mcbtc_gtttc_td)
(type_Package ?mcbtc_cd_p) (type_TCenter ?mcbtc_gtttc_td) (type_TCenter ?mcbtc_gtttc_to)
)
(:unordered (!go_through_two_tcenters ?mcbtc_gtttc_to ?mcbtc_gtttc_td) (carry_direct ?mcbtc_cd_p ?mcbtc_gtttc_to ?mcbtc_gtttc_td))
)
(:method (carry_between_tcenters ?mcbth_tch_p ?mcbth_tch_tco ?mcbth_tch_tcd)
method_carry_between_tcenters_cvh
(
(type_Package ?mcbth_tch_p) (type_TCenter ?mcbth_tch_tco) (type_TCenter ?mcbth_tch_tcd)
(type_Package ?mcbth_tch_p) (type_TCenter ?mcbth_tch_tcd) (type_TCenter ?mcbth_tch_tco)
)
((carry_via_hub ?mcbth_tch_p ?mcbth_tch_tco ?mcbth_tch_tcd))
)
(:method (carry_direct ?mcd_hmcd_p ?mcd_hmcd_o ?mcd_hmcd_d)
method_carry_direct
(
(type_Package ?mcd_hmcd_p) (type_Location ?mcd_hmcd_o) (type_Location ?mcd_hmcd_d)
(type_Location ?mcd_hmcd_d) (type_Location ?mcd_hmcd_o) (type_Package ?mcd_hmcd_p) (type_Vehicle ?mcd_hmcd_v)
)
((helper_carry_direct ?mcd_hmcd_v ?mcd_hmcd_p ?mcd_hmcd_o ?mcd_hmcd_d))
)
(:method (carry_via_hub ?mcvhn_hcvhn_p ?mcvhn_hcvhn_tco ?mcvhn_hcvhn_tcd)
method_carry_via_hub_not_hazardous
(
(type_Package ?mcvhn_hcvhn_p) (type_TCenter ?mcvhn_hcvhn_tco) (type_TCenter ?mcvhn_hcvhn_tcd)
(type_City ?mcvhn_hcvhn_ctcd) (type_City ?mcvhn_hcvhn_ctco) (type_Hub ?mcvhn_hcvhn_h) (type_Package ?mcvhn_hcvhn_p) (type_Region ?mcvhn_hcvhn_rctcd) (type_Region ?mcvhn_hcvhn_rctco) (type_TCenter ?mcvhn_hcvhn_tcd) (type_TCenter ?mcvhn_hcvhn_tco)
)
((helper_carry_via_hub_not_hazardous ?mcvhn_hcvhn_p ?mcvhn_hcvhn_tco ?mcvhn_hcvhn_ctco ?mcvhn_hcvhn_rctco ?mcvhn_hcvhn_h ?mcvhn_hcvhn_tcd ?mcvhn_hcvhn_ctcd ?mcvhn_hcvhn_rctcd))
)
(:method (carry_via_hub ?mcvhh_hcvhh_p ?mcvhh_hcvhh_tco ?mcvhh_hcvhh_tcd)
method_carry_via_hub_hazardous
(
(type_Package ?mcvhh_hcvhh_p) (type_TCenter ?mcvhh_hcvhh_tco) (type_TCenter ?mcvhh_hcvhh_tcd)
(type_City ?mcvhh_hcvhh_ch) (type_City ?mcvhh_hcvhh_ctcd) (type_City ?mcvhh_hcvhh_ctco) (type_Hub ?mcvhh_hcvhh_h) (type_Package ?mcvhh_hcvhh_p) (type_Region ?mcvhh_hcvhh_rctcd) (type_Region ?mcvhh_hcvhh_rctco) (type_TCenter ?mcvhh_hcvhh_tcd) (type_TCenter ?mcvhh_hcvhh_tco)
)
((helper_carry_via_hub_hazardous ?mcvhh_hcvhh_p ?mcvhh_hcvhh_tco ?mcvhh_hcvhh_ctco ?mcvhh_hcvhh_rctco ?mcvhh_hcvhh_h ?mcvhh_hcvhh_ch ?mcvhh_hcvhh_tcd ?mcvhh_hcvhh_ctcd ?mcvhh_hcvhh_rctcd))
)
(:method (deliver ?mddp_dp_p)
method_deliver_dp
(
(type_Package ?mddp_dp_p)
(type_Package ?mddp_dp_p)
)
((!deliver_p ?mddp_dp_p))
)
(:method (deliver ?mddv_dv_v)
method_deliver_dv
(
(type_Package ?mddv_dv_v)
(type_Valuable ?mddv_dv_v)
)
((!deliver_v ?mddv_dv_v))
)
(:method (deliver ?mddh_dh_h)
method_deliver_dh
(
(type_Package ?mddh_dh_h)
(type_Hazardous ?mddh_dh_h)
)
((!deliver_h ?mddh_dh_h))
)
(:method (helper_carry_cc ?mhccc_cdd_p ?mhccc_gttc_lo ?mhccc_gttc_co ?mhccc_gttc_tc ?mhccc_gttc_ld ?mhccc_gttc_cd)
method_helper_carry_cd_cd
(
(type_Package ?mhccc_cdd_p) (type_Not_TCenter ?mhccc_gttc_lo) (type_City ?mhccc_gttc_co) (type_TCenter ?mhccc_gttc_tc) (type_Not_TCenter ?mhccc_gttc_ld) (type_City ?mhccc_gttc_cd)
(type_Package ?mhccc_cdd_p) (type_City ?mhccc_gttc_cd) (type_City ?mhccc_gttc_co) (type_Not_TCenter ?mhccc_gttc_ld) (type_Not_TCenter ?mhccc_gttc_lo) (type_TCenter ?mhccc_gttc_tc)
)
((carry_direct ?mhccc_cdd_p ?mhccc_gttc_lo ?mhccc_gttc_tc) (!go_through_tcenter_cc ?mhccc_gttc_lo ?mhccc_gttc_ld ?mhccc_gttc_co ?mhccc_gttc_cd ?mhccc_gttc_tc) (carry_direct ?mhccc_cdd_p ?mhccc_gttc_tc ?mhccc_gttc_ld))
)
(:method (helper_carry_direct ?mhcd_ult_v ?mhcd_ult_p ?mhcd_mvd_lo ?mhcd_ult_l)
method_helper_carry_direct
(
(type_Vehicle ?mhcd_ult_v) (type_Package ?mhcd_ult_p) (type_Location ?mhcd_mvd_lo) (type_Location ?mhcd_ult_l)
(type_Location ?mhcd_mvd_lo) (type_Location ?mhcd_mvo_lo) (type_Location ?mhcd_ult_l) (type_Package ?mhcd_ult_p) (type_Vehicle ?mhcd_ult_v)
)
((move ?mhcd_ult_v ?mhcd_mvo_lo ?mhcd_mvd_lo) (load_top ?mhcd_ult_p ?mhcd_ult_v ?mhcd_mvd_lo) (move ?mhcd_ult_v ?mhcd_mvd_lo ?mhcd_ult_l) (unload_top ?mhcd_ult_p ?mhcd_ult_v ?mhcd_ult_l))
)
(:method (helper_carry_direct ?mhcdo_ult_v ?mhcdo_ult_p ?mhcdo_m_lo ?mhcdo_ult_l)
method_helper_carry_direct_noMoveFirst
(
(type_Vehicle ?mhcdo_ult_v) (type_Package ?mhcdo_ult_p) (type_Location ?mhcdo_m_lo) (type_Location ?mhcdo_ult_l)
(type_Location ?mhcdo_m_lo) (type_Location ?mhcdo_ult_l) (type_Package ?mhcdo_ult_p) (type_Vehicle ?mhcdo_ult_v)
)
((load_top ?mhcdo_ult_p ?mhcdo_ult_v ?mhcdo_m_lo) (move ?mhcdo_ult_v ?mhcdo_m_lo ?mhcdo_ult_l) (unload_top ?mhcdo_ult_p ?mhcdo_ult_v ?mhcdo_ult_l))
)
(:method (helper_carry_otd ?mhcctc_cd_p ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_co ?mhcctc_gtttccotd_t1 ?mhcctc_gtttccotd_cl ?mhcctc_gtttccotd_cd)
method_helper_carry_cbtc_cd
(
(type_Package ?mhcctc_cd_p) (type_TCenter ?mhcctc_gtttccotd_o) (type_City ?mhcctc_gtttccotd_co) (type_TCenter ?mhcctc_gtttccotd_t1) (type_Not_TCenter ?mhcctc_gtttccotd_cl) (type_City ?mhcctc_gtttccotd_cd)
(type_Package ?mhcctc_cd_p) (type_City ?mhcctc_gtttccotd_cd) (type_Not_TCenter ?mhcctc_gtttccotd_cl) (type_City ?mhcctc_gtttccotd_co) (type_TCenter ?mhcctc_gtttccotd_o) (type_TCenter ?mhcctc_gtttccotd_t1)
)
((carry_between_tcenters ?mhcctc_cd_p ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_t1) (!go_through_two_tcenters_cities_otd ?mhcctc_gtttccotd_cl ?mhcctc_gtttccotd_co ?mhcctc_gtttccotd_cd ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_t1) (carry_direct ?mhcctc_cd_p ?mhcctc_gtttccotd_t1 ?mhcctc_gtttccotd_cl))
)
(:method (helper_carry_ott ?mhccct_cbt_p ?mhccct_gtttccott_cl ?mhccct_gtttccott_co ?mhccct_gtttccott_to ?mhccct_gtttccott_td ?mhccct_gtttccott_cd)
method_helper_carry_cd_cbtc
(
(type_Package ?mhccct_cbt_p) (type_City_Location ?mhccct_gtttccott_cl) (type_City ?mhccct_gtttccott_co) (type_TCenter ?mhccct_gtttccott_to) (type_TCenter ?mhccct_gtttccott_td) (type_City ?mhccct_gtttccott_cd)
(type_Package ?mhccct_cbt_p) (type_City ?mhccct_gtttccott_cd) (type_City_Location ?mhccct_gtttccott_cl) (type_City ?mhccct_gtttccott_co) (type_TCenter ?mhccct_gtttccott_td) (type_TCenter ?mhccct_gtttccott_to)
)
((carry_direct ?mhccct_cbt_p ?mhccct_gtttccott_cl ?mhccct_gtttccott_to) (!go_through_two_tcenters_cities_ott ?mhccct_gtttccott_cl ?mhccct_gtttccott_co ?mhccct_gtttccott_cd ?mhccct_gtttccott_to ?mhccct_gtttccott_td) (carry_between_tcenters ?mhccct_cbt_p ?mhccct_gtttccott_to ?mhccct_gtttccott_td))
)
(:method (helper_carry_ottd ?mhcccc_cdd_p ?mhcccc_gtttc_lo ?mhcccc_gtttc_co ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2 ?mhcccc_gtttc_ld ?mhcccc_gtttc_cd)
method_helper_carry_cd_cbtc_cd
(
(type_Package ?mhcccc_cdd_p) (type_Not_TCenter ?mhcccc_gtttc_lo) (type_City ?mhcccc_gtttc_co) (type_TCenter ?mhcccc_gtttc_t1) (type_TCenter ?mhcccc_gtttc_t2) (type_Not_TCenter ?mhcccc_gtttc_ld) (type_City ?mhcccc_gtttc_cd)
(type_Package ?mhcccc_cdd_p) (type_City ?mhcccc_gtttc_cd) (type_City ?mhcccc_gtttc_co) (type_Not_TCenter ?mhcccc_gtttc_ld) (type_Not_TCenter ?mhcccc_gtttc_lo) (type_TCenter ?mhcccc_gtttc_t1) (type_TCenter ?mhcccc_gtttc_t2)
)
((carry_direct ?mhcccc_cdd_p ?mhcccc_gtttc_lo ?mhcccc_gtttc_t1) (!go_through_two_tcenters_cities_ottd ?mhcccc_gtttc_lo ?mhcccc_gtttc_ld ?mhcccc_gtttc_co ?mhcccc_gtttc_cd ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2) (carry_between_tcenters ?mhcccc_cdd_p ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2) (carry_direct ?mhcccc_cdd_p ?mhcccc_gtttc_t2 ?mhcccc_gtttc_ld))
)
(:method (helper_carry_tt ?mhch_tch_p ?mhch_gtttctt_to ?mhch_gtttctt_co ?mhch_gtttctt_td ?mhch_gtttctt_cd)
method_helper_carry_cvh
(
(type_Package ?mhch_tch_p) (type_TCenter ?mhch_gtttctt_to) (type_City ?mhch_gtttctt_co) (type_TCenter ?mhch_gtttctt_td) (type_City ?mhch_gtttctt_cd)
(type_City ?mhch_gtttctt_cd) (type_City ?mhch_gtttctt_co) (type_TCenter ?mhch_gtttctt_td) (type_TCenter ?mhch_gtttctt_to) (type_Package ?mhch_tch_p)
)
((carry_via_hub ?mhch_tch_p ?mhch_gtttctt_to ?mhch_gtttctt_td) (!go_through_two_tcenters_tt ?mhch_gtttctt_to ?mhch_gtttctt_td ?mhch_gtttctt_co ?mhch_gtttctt_cd))
)
(:method (helper_carry_via_hub_hazardous ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_co ?mhcvhh_gtttcvhh_ro ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_ch ?mhcvhh_gtttcvhh_td ?mhcvhh_gtttcvhh_cd ?mhcvhh_gtttcvhh_rd)
method_helper_carry_via_hub_hazardous
(
(type_Package ?mhcvhh_cd2_p) (type_TCenter ?mhcvhh_gtttcvhh_to) (type_City ?mhcvhh_gtttcvhh_co) (type_Region ?mhcvhh_gtttcvhh_ro) (type_Hub ?mhcvhh_gtttcvhh_h) (type_City ?mhcvhh_gtttcvhh_ch) (type_TCenter ?mhcvhh_gtttcvhh_td) (type_City ?mhcvhh_gtttcvhh_cd) (type_Region ?mhcvhh_gtttcvhh_rd)
(type_Package ?mhcvhh_cd2_p) (type_City ?mhcvhh_gtttcvhh_cd) (type_City ?mhcvhh_gtttcvhh_ch) (type_City ?mhcvhh_gtttcvhh_co) (type_Hub ?mhcvhh_gtttcvhh_h) (type_Region ?mhcvhh_gtttcvhh_rd) (type_Region ?mhcvhh_gtttcvhh_ro) (type_TCenter ?mhcvhh_gtttcvhh_td) (type_TCenter ?mhcvhh_gtttcvhh_to)
)
((carry_direct ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_h) (!go_through_two_tcenters_via_hub_hazardous ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_td ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_co ?mhcvhh_gtttcvhh_ch ?mhcvhh_gtttcvhh_cd ?mhcvhh_gtttcvhh_ro ?mhcvhh_gtttcvhh_rd) (carry_direct ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_td))
)
(:method (helper_carry_via_hub_not_hazardous ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_co ?mhcvhn_gtttcvhnh_ro ?mhcvhn_gtttcvhnh_h ?mhcvhn_gtttcvhnh_td ?mhcvhn_gtttcvhnh_cd ?mhcvhn_gtttcvhnh_rd)
method_helper_carry_via_hub_not_hazardous
(
(type_Package ?mhcvhn_cd2_p) (type_TCenter ?mhcvhn_gtttcvhnh_to) (type_City ?mhcvhn_gtttcvhnh_co) (type_Region ?mhcvhn_gtttcvhnh_ro) (type_Hub ?mhcvhn_gtttcvhnh_h) (type_TCenter ?mhcvhn_gtttcvhnh_td) (type_City ?mhcvhn_gtttcvhnh_cd) (type_Region ?mhcvhn_gtttcvhnh_rd)
(type_Package ?mhcvhn_cd2_p) (type_City ?mhcvhn_gtttcvhnh_cd) (type_City ?mhcvhn_gtttcvhnh_co) (type_Hub ?mhcvhn_gtttcvhnh_h) (type_Region ?mhcvhn_gtttcvhnh_rd) (type_Region ?mhcvhn_gtttcvhnh_ro) (type_TCenter ?mhcvhn_gtttcvhnh_td) (type_TCenter ?mhcvhn_gtttcvhnh_to)
)
((carry_direct ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_h) (!go_through_two_tcenters_via_hub_not_hazardous ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_td ?mhcvhn_gtttcvhnh_co ?mhcvhn_gtttcvhnh_cd ?mhcvhn_gtttcvhnh_ro ?mhcvhn_gtttcvhnh_rd ?mhcvhn_gtttcvhnh_h) (carry_direct ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_h ?mhcvhn_gtttcvhnh_td))
)
(:method (helper_move_traincar ?mhmt_dtc_tc ?mhmt_dtc_t ?mhmt_md_lo ?mhmt_dtc_l)
method_helper_move_traincar
(
(type_Traincar ?mhmt_dtc_tc) (type_Train ?mhmt_dtc_t) (type_Location ?mhmt_md_lo) (type_Location ?mhmt_dtc_l)
(type_Location ?mhmt_dtc_l) (type_Train ?mhmt_dtc_t) (type_Traincar ?mhmt_dtc_tc) (type_Location ?mhmt_md_lo) (type_Location ?mhmt_mo_lo)
)
((move ?mhmt_dtc_t ?mhmt_mo_lo ?mhmt_md_lo) (!attach_train_car ?mhmt_dtc_t ?mhmt_dtc_tc ?mhmt_md_lo) (move ?mhmt_dtc_t ?mhmt_md_lo ?mhmt_dtc_l) (!detach_train_car ?mhmt_dtc_t ?mhmt_dtc_tc ?mhmt_dtc_l))
)
(:method (helper_move_traincar ?mhmtn_dtc_tc ?mhmtn_dtc_t ?mhmtn_md_lo ?mhmtn_dtc_l)
method_helper_move_traincar_noMoveFirst
(
(type_Traincar ?mhmtn_dtc_tc) (type_Train ?mhmtn_dtc_t) (type_Location ?mhmtn_md_lo) (type_Location ?mhmtn_dtc_l)
(type_Location ?mhmtn_dtc_l) (type_Train ?mhmtn_dtc_t) (type_Traincar ?mhmtn_dtc_tc) (type_Location ?mhmtn_md_lo)
)
((!attach_train_car ?mhmtn_dtc_t ?mhmtn_dtc_tc ?mhmtn_md_lo) (move ?mhmtn_dtc_t ?mhmtn_md_lo ?mhmtn_dtc_l) (!detach_train_car ?mhmtn_dtc_t ?mhmtn_dtc_tc ?mhmtn_dtc_l))
)
(:method (load ?mlr_lp_p ?mlr_cd_rv ?mlr_lp_l)
method_load_regular
(
(type_Package ?mlr_lp_p) (type_Vehicle ?mlr_cd_rv) (type_Location ?mlr_lp_l)
(type_Regular_Vehicle ?mlr_cd_rv) (type_Location ?mlr_lp_l) (type_Package ?mlr_lp_p)
)
((!open_door ?mlr_cd_rv) (!load_package ?mlr_lp_p ?mlr_cd_rv ?mlr_lp_l) (!close_door ?mlr_cd_rv))
)
(:method (load ?mlf_pdpv_p ?mlf_pdpv_fv ?mlf_pdpv_l)
method_load_flatbed
(
(type_Package ?mlf_pdpv_p) (type_Vehicle ?mlf_pdpv_fv) (type_Location ?mlf_pdpv_l)
(type_Crane ?mlf_pdpv_c) (type_Flatbed_Vehicle ?mlf_pdpv_fv) (type_Location ?mlf_pdpv_l) (type_Package ?mlf_pdpv_p)
)
((!pick_up_package_ground ?mlf_pdpv_p ?mlf_pdpv_c ?mlf_pdpv_l) (!put_down_package_vehicle ?mlf_pdpv_p ?mlf_pdpv_c ?mlf_pdpv_fv ?mlf_pdpv_l))
)
(:method (load ?mlh_fh_p ?mlh_dc_h ?mlh_fh_l)
method_load_hopper
(
(type_Package ?mlh_fh_p) (type_Vehicle ?mlh_dc_h) (type_Location ?mlh_fh_l)
(type_Hopper_Vehicle ?mlh_dc_h) (type_Location ?mlh_fh_l) (type_Package ?mlh_fh_p)
)
((!connect_chute ?mlh_dc_h) (!fill_hopper ?mlh_fh_p ?mlh_dc_h ?mlh_fh_l) (!disconnect_chute ?mlh_dc_h))
)
(:method (load ?mlt_dch_l ?mlt_dch_tv ?mlt_ft_lo)
method_load_tanker
(
(type_Package ?mlt_dch_l) (type_Vehicle ?mlt_dch_tv) (type_Location ?mlt_ft_lo)
(type_Liquid ?mlt_dch_l) (type_Tanker_Vehicle ?mlt_dch_tv) (type_Location ?mlt_ft_lo)
)
((!connect_hose ?mlt_dch_tv ?mlt_dch_l) (!open_valve ?mlt_dch_tv) (!fill_tank ?mlt_dch_tv ?mlt_dch_l ?mlt_ft_lo) (!close_valve ?mlt_dch_tv) (!disconnect_hose ?mlt_dch_tv ?mlt_dch_l))
)
(:method (load ?mll_ll_p ?mll_rr_v ?mll_ll_l)
method_load_livestock
(
(type_Package ?mll_ll_p) (type_Vehicle ?mll_rr_v) (type_Location ?mll_ll_l)
(type_Location ?mll_ll_l) (type_Livestock_Package ?mll_ll_p) (type_Vehicle ?mll_rr_v)
)
((!lower_ramp ?mll_rr_v) (!fill_trough ?mll_rr_v) (!load_livestock ?mll_ll_p ?mll_rr_v ?mll_ll_l) (!raise_ramp ?mll_rr_v))
)
(:method (load ?mlc_lc_c ?mlc_rr_v ?mlc_lc_l)
method_load_cars
(
(type_Package ?mlc_lc_c) (type_Vehicle ?mlc_rr_v) (type_Location ?mlc_lc_l)
(type_Cars ?mlc_lc_c) (type_Location ?mlc_lc_l) (type_Vehicle ?mlc_rr_v)
)
((!lower_ramp ?mlc_rr_v) (!load_cars ?mlc_lc_c ?mlc_rr_v ?mlc_lc_l) (!raise_ramp ?mlc_rr_v))
)
(:method (load ?mla_lp_p ?mla_dcr_ap ?mla_dcr_l)
method_load_airplane
(
(type_Package ?mla_lp_p) (type_Vehicle ?mla_dcr_ap) (type_Location ?mla_dcr_l)
(type_Airplane ?mla_dcr_ap) (type_Location ?mla_dcr_l) (type_Plane_Ramp ?mla_dcr_pr) (type_Package ?mla_lp_p)
)
((!attach_conveyor_ramp ?mla_dcr_ap ?mla_dcr_pr ?mla_dcr_l) (!open_door ?mla_dcr_ap) (!load_package ?mla_lp_p ?mla_dcr_ap ?mla_dcr_l) (!close_door ?mla_dcr_ap) (!detach_conveyor_ramp ?mla_dcr_ap ?mla_dcr_pr ?mla_dcr_l))
)
(:method (load_top ?mlmn_l_p ?mlmn_l_v ?mlmn_l_l)
method_load_top_normal
(
(type_Package ?mlmn_l_p) (type_Vehicle ?mlmn_l_v) (type_Location ?mlmn_l_l)
(type_Location ?mlmn_l_l) (type_Package ?mlmn_l_p) (type_Vehicle ?mlmn_l_v)
)
((load ?mlmn_l_p ?mlmn_l_v ?mlmn_l_l))
)
(:method (load_top ?mlmh_l_p ?mlmh_l_v ?mlmh_l_l)
method_load_top_hazardous
(
(type_Package ?mlmh_l_p) (type_Vehicle ?mlmh_l_v) (type_Location ?mlmh_l_l)
(type_Location ?mlmh_l_l) (type_Package ?mlmh_l_p) (type_Vehicle ?mlmh_l_v)
)
((!affix_warning_signs ?mlmh_l_v) (load ?mlmh_l_p ?mlmh_l_v ?mlmh_l_l))
)
(:method (load_top ?mlmv_l_p ?mlmv_pci_a ?mlmv_l_l)
method_load_top_valuable
(
(type_Package ?mlmv_l_p) (type_Vehicle ?mlmv_pci_a) (type_Location ?mlmv_l_l)
(type_Location ?mlmv_l_l) (type_Package ?mlmv_l_p) (type_Armored ?mlmv_pci_a)
)
((!post_guard_outside ?mlmv_pci_a) (load ?mlmv_l_p ?mlmv_pci_a ?mlmv_l_l) (!post_guard_inside ?mlmv_pci_a))
)
(:method (move ?mmnt_mvnt_v ?mmnt_mvnt_o ?mmnt_mvnt_d)
method_move_no_traincar
(
(type_Vehicle ?mmnt_mvnt_v) (type_Location ?mmnt_mvnt_o) (type_Location ?mmnt_mvnt_d)
(type_Location ?mmnt_mvnt_d) (type_Location ?mmnt_mvnt_o) (type_Route ?mmnt_mvnt_r) (type_Vehicle ?mmnt_mvnt_v)
)
((!move_vehicle_no_traincar ?mmnt_mvnt_v ?mmnt_mvnt_o ?mmnt_mvnt_r ?mmnt_mvnt_d))
)
(:method (move ?mmt_hmt_v ?mmt_hmt_o ?mmt_hmt_d)
method_move_traincar
(
(type_Vehicle ?mmt_hmt_v) (type_Location ?mmt_hmt_o) (type_Location ?mmt_hmt_d)
(type_Location ?mmt_hmt_d) (type_Location ?mmt_hmt_o) (type_Train ?mmt_hmt_t) (type_Traincar ?mmt_hmt_v)
)
((helper_move_traincar ?mmt_hmt_v ?mmt_hmt_t ?mmt_hmt_o ?mmt_hmt_d))
)
(:method (pickup ?mpn_cf_p)
method_pickup_normal
(
(type_Package ?mpn_cf_p)
(type_Package ?mpn_cf_p)
)
((!collect_fees ?mpn_cf_p))
)
(:method (pickup ?mph_op_h)
method_pickup_hazardous
(
(type_Package ?mph_op_h)
(type_Hazardous ?mph_op_h)
)
((!collect_fees ?mph_op_h) (!obtain_permit ?mph_op_h))
)
(:method (pickup ?mpv_ci_v)
method_pickup_valuable
(
(type_Package ?mpv_ci_v)
(type_Valuable ?mpv_ci_v)
)
((!collect_fees ?mpv_ci_v) (!collect_insurance ?mpv_ci_v))
)
(:method (transport ?mtpcd_de_p ?mtpcd_ca_lo ?mtpcd_ca_ld)
method_transport_pi_ca_de
(
(type_Package ?mtpcd_de_p) (type_Location ?mtpcd_ca_lo) (type_Location ?mtpcd_ca_ld)
(type_Location ?mtpcd_ca_ld) (type_Location ?mtpcd_ca_lo) (type_Package ?mtpcd_de_p)
)
((pickup ?mtpcd_de_p) (carry ?mtpcd_de_p ?mtpcd_ca_lo ?mtpcd_ca_ld) (deliver ?mtpcd_de_p))
)
(:method (unload ?mur_up_p ?mur_cd_rv ?mur_up_l)
method_unload_regular
(
(type_Package ?mur_up_p) (type_Vehicle ?mur_cd_rv) (type_Location ?mur_up_l)
(type_Regular_Vehicle ?mur_cd_rv) (type_Location ?mur_up_l) (type_Package ?mur_up_p)
)
((!open_door ?mur_cd_rv) (!unload_package ?mur_up_p ?mur_cd_rv ?mur_up_l) (!close_door ?mur_cd_rv))
)
(:method (unload ?muf_pdpg_p ?muf_pupv_fv ?muf_pdpg_l)
method_unload_flatbed
(
(type_Package ?muf_pdpg_p) (type_Vehicle ?muf_pupv_fv) (type_Location ?muf_pdpg_l)
(type_Crane ?muf_pdpg_c) (type_Location ?muf_pdpg_l) (type_Package ?muf_pdpg_p) (type_Flatbed_Vehicle ?muf_pupv_fv)
)
((!pick_up_package_vehicle ?muf_pdpg_p ?muf_pdpg_c ?muf_pupv_fv ?muf_pdpg_l) (!put_down_package_ground ?muf_pdpg_p ?muf_pdpg_c ?muf_pdpg_l))
)
(:method (unload ?muh_eh_p ?muh_dc_h ?muh_eh_l)
method_unload_hopper
(
(type_Package ?muh_eh_p) (type_Vehicle ?muh_dc_h) (type_Location ?muh_eh_l)
(type_Hopper_Vehicle ?muh_dc_h) (type_Location ?muh_eh_l) (type_Package ?muh_eh_p)
)
((!connect_chute ?muh_dc_h) (!empty_hopper ?muh_eh_p ?muh_dc_h ?muh_eh_l) (!disconnect_chute ?muh_dc_h))
)
(:method (unload ?mut_dch_l ?mut_dch_tv ?mut_et_lo)
method_unload_tanker
(
(type_Package ?mut_dch_l) (type_Vehicle ?mut_dch_tv) (type_Location ?mut_et_lo)
(type_Liquid ?mut_dch_l) (type_Tanker_Vehicle ?mut_dch_tv) (type_Location ?mut_et_lo)
)
((!connect_hose ?mut_dch_tv ?mut_dch_l) (!open_valve ?mut_dch_tv) (!empty_tank ?mut_dch_tv ?mut_dch_l ?mut_et_lo) (!close_valve ?mut_dch_tv) (!disconnect_hose ?mut_dch_tv ?mut_dch_l))
)
(:method (unload ?mul_ull_p ?mul_rr_v ?mul_ull_l)
method_unload_livestock
(
(type_Package ?mul_ull_p) (type_Vehicle ?mul_rr_v) (type_Location ?mul_ull_l)
(type_Vehicle ?mul_rr_v) (type_Location ?mul_ull_l) (type_Livestock_Package ?mul_ull_p)
)
((!lower_ramp ?mul_rr_v) (!unload_livestock ?mul_ull_p ?mul_rr_v ?mul_ull_l) (!do_clean_interior ?mul_rr_v) (!raise_ramp ?mul_rr_v))
)
(:method (unload ?muc_uc_c ?muc_rr_v ?muc_uc_l)
method_unload_cars
(
(type_Package ?muc_uc_c) (type_Vehicle ?muc_rr_v) (type_Location ?muc_uc_l)
(type_Vehicle ?muc_rr_v) (type_Cars ?muc_uc_c) (type_Location ?muc_uc_l)
)
((!lower_ramp ?muc_rr_v) (!unload_cars ?muc_uc_c ?muc_rr_v ?muc_uc_l) (!raise_ramp ?muc_rr_v))
)
(:method (unload ?mua_up_p ?mua_dcr_ap ?mua_dcr_l)
method_unload_airplane
(
(type_Package ?mua_up_p) (type_Vehicle ?mua_dcr_ap) (type_Location ?mua_dcr_l)
(type_Airplane ?mua_dcr_ap) (type_Location ?mua_dcr_l) (type_Plane_Ramp ?mua_dcr_pr) (type_Package ?mua_up_p)
)
((!attach_conveyor_ramp ?mua_dcr_ap ?mua_dcr_pr ?mua_dcr_l) (!open_door ?mua_dcr_ap) (!unload_package ?mua_up_p ?mua_dcr_ap ?mua_dcr_l) (!close_door ?mua_dcr_ap) (!detach_conveyor_ramp ?mua_dcr_ap ?mua_dcr_pr ?mua_dcr_l))
)
(:method (unload_top ?mumn_ul_p ?mumn_ul_v ?mumn_ul_l)
method_unload_top_normal
(
(type_Package ?mumn_ul_p) (type_Vehicle ?mumn_ul_v) (type_Location ?mumn_ul_l)
(type_Location ?mumn_ul_l) (type_Package ?mumn_ul_p) (type_Vehicle ?mumn_ul_v)
)
((unload ?mumn_ul_p ?mumn_ul_v ?mumn_ul_l))
)
(:method (unload_top ?mumh_ul_p ?mumh_ul_v ?mumh_ul_l)
method_unload_top_hazardous
(
(type_Package ?mumh_ul_p) (type_Vehicle ?mumh_ul_v) (type_Location ?mumh_ul_l)
(type_Location ?mumh_ul_l) (type_Package ?mumh_ul_p) (type_Vehicle ?mumh_ul_v)
)
((unload ?mumh_ul_p ?mumh_ul_v ?mumh_ul_l) (!decontaminate_interior ?mumh_ul_v) (!remove_warning_signs ?mumh_ul_v))
)
(:method (unload_top ?mumv_ul_p ?mumv_ul_v ?mumv_ul_l)
method_unload_top_valuable
(
(type_Package ?mumv_ul_p) (type_Vehicle ?mumv_ul_v) (type_Location ?mumv_ul_l)
(type_Location ?mumv_ul_l) (type_Package ?mumv_ul_p) (type_Vehicle ?mumv_ul_v)
)
((!post_guard_outside ?mumv_ul_v) (unload ?mumv_ul_p ?mumv_ul_v ?mumv_ul_l) (!remove_guard ?mumv_ul_v))
)
))
| null | https://raw.githubusercontent.com/panda-planner-dev/ipc2020-domains/9adb54325d3df35907adc7115fcc65f0ce5953cc/partial-order/UM-Translog/other/SHOP2/d-10.lisp | lisp | preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects
preconditions
delete effects
add effects | (defdomain domain (
(:operator (!obtain_permit ?op_h)
(
(type_Hazardous ?op_h)
(not (Have_Permit ?op_h))
)
()
((Have_Permit ?op_h))
)
(:operator (!collect_fees ?cf_p)
(
(type_Package ?cf_p)
(not (Fees_Collected ?cf_p))
)
()
((Fees_Collected ?cf_p))
)
(:operator (!collect_insurance ?ci_v)
(
(type_Valuable ?ci_v)
(not (Insured ?ci_v))
)
()
((Insured ?ci_v))
)
(:operator (!go_through_tcenter_cc ?gttc_lo ?gttc_ld ?gttc_co ?gttc_cd ?gttc_tc)
(
(type_Not_TCenter ?gttc_lo) (type_Not_TCenter ?gttc_ld) (type_City ?gttc_co) (type_City ?gttc_cd) (type_TCenter ?gttc_tc)
(In_City ?gttc_lo ?gttc_co) (In_City ?gttc_ld ?gttc_cd) (Serves ?gttc_tc ?gttc_co) (Serves ?gttc_tc ?gttc_cd) (Available ?gttc_tc)
)
()
()
)
(:operator (!go_through_two_tcenters_cities_ottd ?gtttcc_lo ?gtttcc_ld ?gtttcc_co ?gtttcc_cd ?gtttcc_t1 ?gtttcc_t2)
(
(type_Not_TCenter ?gtttcc_lo) (type_Not_TCenter ?gtttcc_ld) (type_City ?gtttcc_co) (type_City ?gtttcc_cd) (type_TCenter ?gtttcc_t1) (type_TCenter ?gtttcc_t2)
(In_City ?gtttcc_lo ?gtttcc_co) (In_City ?gtttcc_ld ?gtttcc_cd) (Serves ?gtttcc_t1 ?gtttcc_co) (Serves ?gtttcc_t2 ?gtttcc_cd)
)
()
()
)
(:operator (!go_through_two_tcenters_cities_otd ?gtttccotd_ld ?gtttccotd_co ?gtttccotd_cd ?gtttccotd_to ?gtttccotd_t1)
(
(type_Not_TCenter ?gtttccotd_ld) (type_City ?gtttccotd_co) (type_City ?gtttccotd_cd) (type_TCenter ?gtttccotd_to) (type_TCenter ?gtttccotd_t1)
(In_City ?gtttccotd_to ?gtttccotd_co) (In_City ?gtttccotd_ld ?gtttccotd_cd) (Serves ?gtttccotd_t1 ?gtttccotd_cd)
)
()
()
)
(:operator (!go_through_two_tcenters_cities_ott ?gtttccott_ld ?gtttccott_co ?gtttccott_cd ?gtttccott_to ?gtttccott_td)
(
(type_City_Location ?gtttccott_ld) (type_City ?gtttccott_co) (type_City ?gtttccott_cd) (type_TCenter ?gtttccott_to) (type_TCenter ?gtttccott_td)
(In_City ?gtttccott_ld ?gtttccott_co) (In_City ?gtttccott_td ?gtttccott_cd) (Serves ?gtttccott_to ?gtttccott_co)
)
()
()
)
(:operator (!go_through_two_tcenters ?gtttc_to ?gtttc_td)
(
(type_TCenter ?gtttc_to) (type_TCenter ?gtttc_td)
(Available ?gtttc_to) (Available ?gtttc_td)
)
()
()
)
(:operator (!go_through_two_tcenters_tt ?gtttctt_to ?gtttctt_td ?gtttctt_co ?gtttctt_cd)
(
(type_TCenter ?gtttctt_to) (type_TCenter ?gtttctt_td) (type_City ?gtttctt_co) (type_City ?gtttctt_cd)
(In_City ?gtttctt_to ?gtttctt_co) (In_City ?gtttctt_td ?gtttctt_cd)
)
()
()
)
(:operator (!go_through_two_tcenters_via_hub_hazardous ?gtttcvhh_to ?gtttcvhh_td ?gtttcvhh_h ?gtttcvhh_co ?gtttcvhh_ch ?gtttcvhh_cd ?gtttcvhh_ro ?gtttcvhh_rd)
(
(type_TCenter ?gtttcvhh_to) (type_TCenter ?gtttcvhh_td) (type_Hub ?gtttcvhh_h) (type_City ?gtttcvhh_co) (type_City ?gtttcvhh_ch) (type_City ?gtttcvhh_cd) (type_Region ?gtttcvhh_ro) (type_Region ?gtttcvhh_rd)
(Available ?gtttcvhh_to) (Available ?gtttcvhh_td) (In_City ?gtttcvhh_h ?gtttcvhh_ch) (City_Hazardous_Compatible ?gtttcvhh_ch) (In_City ?gtttcvhh_to ?gtttcvhh_co) (In_City ?gtttcvhh_td ?gtttcvhh_cd) (In_Region ?gtttcvhh_co ?gtttcvhh_ro) (In_Region ?gtttcvhh_cd ?gtttcvhh_rd) (Serves ?gtttcvhh_h ?gtttcvhh_ro) (Serves ?gtttcvhh_h ?gtttcvhh_rd) (Available ?gtttcvhh_h)
)
()
()
)
(:operator (!go_through_two_tcenters_via_hub_not_hazardous ?gtttcvhnh_to ?gtttcvhnh_td ?gtttcvhnh_co ?gtttcvhnh_cd ?gtttcvhnh_ro ?gtttcvhnh_rd ?gtttcvhnh_h)
(
(type_TCenter ?gtttcvhnh_to) (type_TCenter ?gtttcvhnh_td) (type_City ?gtttcvhnh_co) (type_City ?gtttcvhnh_cd) (type_Region ?gtttcvhnh_ro) (type_Region ?gtttcvhnh_rd) (type_Hub ?gtttcvhnh_h)
(Available ?gtttcvhnh_to) (Available ?gtttcvhnh_td) (In_City ?gtttcvhnh_to ?gtttcvhnh_co) (In_City ?gtttcvhnh_td ?gtttcvhnh_cd) (In_Region ?gtttcvhnh_co ?gtttcvhnh_ro) (In_Region ?gtttcvhnh_cd ?gtttcvhnh_rd) (Serves ?gtttcvhnh_h ?gtttcvhnh_ro) (Serves ?gtttcvhnh_h ?gtttcvhnh_rd) (Available ?gtttcvhnh_h)
)
()
()
)
(:operator (!deliver_p ?dp_p)
(
(type_Package ?dp_p)
(Fees_Collected ?dp_p)
)
((Fees_Collected ?dp_p))
((Delivered ?dp_p))
)
(:operator (!deliver_h ?dh_h)
(
(type_Hazardous ?dh_h)
(Fees_Collected ?dh_h) (Have_Permit ?dh_h)
)
((Have_Permit ?dh_h) (Fees_Collected ?dh_h))
((Delivered ?dh_h))
)
(:operator (!deliver_v ?dv_v)
(
(type_Valuable ?dv_v)
(Fees_Collected ?dv_v) (Insured ?dv_v)
)
((Fees_Collected ?dv_v) (Insured ?dv_v))
((Delivered ?dv_v))
)
(:operator (!post_guard_outside ?pco_a)
(
(type_Armored ?pco_a)
)
((Guard_Inside ?pco_a))
((Guard_Outside ?pco_a))
)
(:operator (!post_guard_inside ?pci_a)
(
(type_Armored ?pci_a)
)
((Guard_Outside ?pci_a))
((Guard_Inside ?pci_a))
)
(:operator (!remove_guard ?mc_a)
(
(type_Armored ?mc_a)
)
((Guard_Outside ?mc_a) (Guard_Inside ?mc_a))
()
)
(:operator (!decontaminate_interior ?di_v)
(
(type_Vehicle ?di_v)
)
()
((Decontaminated_Interior ?di_v))
)
(:operator (!affix_warning_signs ?fws_v)
(
(type_Vehicle ?fws_v)
(not (Warning_Signs_Affixed ?fws_v))
)
()
((Warning_Signs_Affixed ?fws_v))
)
(:operator (!remove_warning_signs ?mws_v)
(
(type_Vehicle ?mws_v)
(Warning_Signs_Affixed ?mws_v)
)
((Warning_Signs_Affixed ?mws_v))
()
)
(:operator (!attach_train_car ?atc_t ?atc_tc ?atc_l)
(
(type_Train ?atc_t) (type_Traincar ?atc_tc) (type_Location ?atc_l)
(At_Vehicle ?atc_tc ?atc_l) (At_Vehicle ?atc_t ?atc_l) (not (Connected_To ?atc_tc ?atc_t))
)
((At_Vehicle ?atc_tc ?atc_l))
((Connected_To ?atc_tc ?atc_t))
)
(:operator (!detach_train_car ?dtc_t ?dtc_tc ?dtc_l)
(
(type_Train ?dtc_t) (type_Traincar ?dtc_tc) (type_Location ?dtc_l)
(At_Vehicle ?dtc_t ?dtc_l) (Connected_To ?dtc_tc ?dtc_t)
)
((Connected_To ?dtc_tc ?dtc_t))
((At_Vehicle ?dtc_tc ?dtc_l))
)
(:operator (!connect_hose ?ch_tv ?ch_l)
(
(type_Tanker_Vehicle ?ch_tv) (type_Liquid ?ch_l)
(not (Hose_Connected ?ch_tv ?ch_l))
)
()
((Hose_Connected ?ch_tv ?ch_l))
)
(:operator (!disconnect_hose ?dch_tv ?dch_l)
(
(type_Tanker_Vehicle ?dch_tv) (type_Liquid ?dch_l)
(Hose_Connected ?dch_tv ?dch_l)
)
((Hose_Connected ?dch_tv ?dch_l))
()
)
(:operator (!open_valve ?ov_tv)
(
(type_Tanker_Vehicle ?ov_tv)
(not (Valve_Open ?ov_tv))
)
()
((Valve_Open ?ov_tv))
)
(:operator (!close_valve ?cv_tv)
(
(type_Tanker_Vehicle ?cv_tv)
(Valve_Open ?cv_tv)
)
((Valve_Open ?cv_tv))
()
)
(:operator (!fill_tank ?ft_tv ?ft_li ?ft_lo)
(
(type_Tanker_Vehicle ?ft_tv) (type_Liquid ?ft_li) (type_Location ?ft_lo)
(Hose_Connected ?ft_tv ?ft_li) (Valve_Open ?ft_tv) (At_Package ?ft_li ?ft_lo) (At_Vehicle ?ft_tv ?ft_lo) (PV_Compatible ?ft_li ?ft_tv)
)
((At_Package ?ft_li ?ft_lo))
((At_Package ?ft_li ?ft_tv))
)
(:operator (!empty_tank ?et_tv ?et_li ?et_lo)
(
(type_Tanker_Vehicle ?et_tv) (type_Liquid ?et_li) (type_Location ?et_lo)
(Hose_Connected ?et_tv ?et_li) (Valve_Open ?et_tv) (At_Package ?et_li ?et_tv) (At_Vehicle ?et_tv ?et_lo)
)
((At_Package ?et_li ?et_tv))
((At_Package ?et_li ?et_lo))
)
(:operator (!load_cars ?lc_c ?lc_v ?lc_l)
(
(type_Cars ?lc_c) (type_Auto_Vehicle ?lc_v) (type_Location ?lc_l)
(At_Package ?lc_c ?lc_l) (At_Vehicle ?lc_v ?lc_l) (Ramp_Down ?lc_v) (PV_Compatible ?lc_c ?lc_v)
)
((At_Package ?lc_c ?lc_l))
((At_Package ?lc_c ?lc_v))
)
(:operator (!unload_cars ?uc_c ?uc_v ?uc_l)
(
(type_Cars ?uc_c) (type_Auto_Vehicle ?uc_v) (type_Location ?uc_l)
(At_Package ?uc_c ?uc_v) (At_Vehicle ?uc_v ?uc_l) (Ramp_Down ?uc_v)
)
((At_Package ?uc_c ?uc_v))
((At_Package ?uc_c ?uc_l))
)
(:operator (!raise_ramp ?rr_v)
(
(type_Vehicle ?rr_v)
(Ramp_Down ?rr_v)
)
((Ramp_Down ?rr_v))
()
)
(:operator (!lower_ramp ?lr_v)
(
(type_Vehicle ?lr_v)
(not (Ramp_Down ?lr_v))
)
()
((Ramp_Down ?lr_v))
)
(:operator (!load_livestock ?ll_p ?ll_v ?ll_l)
(
(type_Livestock_Package ?ll_p) (type_Livestock_Vehicle ?ll_v) (type_Location ?ll_l)
(At_Package ?ll_p ?ll_l) (At_Vehicle ?ll_v ?ll_l) (Ramp_Down ?ll_v) (PV_Compatible ?ll_p ?ll_v)
)
((At_Package ?ll_p ?ll_l) (Clean_Interior ?ll_v))
((At_Package ?ll_p ?ll_v))
)
(:operator (!unload_livestock ?ull_p ?ull_v ?ull_l)
(
(type_Livestock_Package ?ull_p) (type_Livestock_Vehicle ?ull_v) (type_Location ?ull_l)
(At_Package ?ull_p ?ull_v) (At_Vehicle ?ull_v ?ull_l) (Ramp_Down ?ull_v)
)
((At_Package ?ull_p ?ull_v) (Trough_Full ?ull_v))
((At_Package ?ull_p ?ull_l))
)
(:operator (!fill_trough ?ftr_v)
(
(type_Livestock_Vehicle ?ftr_v)
)
()
((Trough_Full ?ftr_v))
)
(:operator (!do_clean_interior ?cli_v)
(
(type_Vehicle ?cli_v)
)
()
((Clean_Interior ?cli_v))
)
(:operator (!attach_conveyor_ramp ?acr_ap ?acr_pr ?acr_l)
(
(type_Airplane ?acr_ap) (type_Plane_Ramp ?acr_pr) (type_Location ?acr_l)
(Available ?acr_pr) (At_Equipment ?acr_pr ?acr_l) (At_Vehicle ?acr_ap ?acr_l)
)
((Available ?acr_pr))
((Ramp_Connected ?acr_pr ?acr_ap))
)
(:operator (!detach_conveyor_ramp ?dcr_ap ?dcr_pr ?dcr_l)
(
(type_Airplane ?dcr_ap) (type_Plane_Ramp ?dcr_pr) (type_Location ?dcr_l)
(Ramp_Connected ?dcr_pr ?dcr_ap) (At_Equipment ?dcr_pr ?dcr_l) (At_Vehicle ?dcr_ap ?dcr_l)
)
((Ramp_Connected ?dcr_pr ?dcr_ap))
((Available ?dcr_pr))
)
(:operator (!connect_chute ?cc_h)
(
(type_Hopper_Vehicle ?cc_h)
(not (Chute_Connected ?cc_h))
)
()
((Chute_Connected ?cc_h))
)
(:operator (!disconnect_chute ?dc_h)
(
(type_Hopper_Vehicle ?dc_h)
(Chute_Connected ?dc_h)
)
((Chute_Connected ?dc_h))
()
)
(:operator (!fill_hopper ?fh_p ?fh_hv ?fh_l)
(
(type_Package ?fh_p) (type_Hopper_Vehicle ?fh_hv) (type_Location ?fh_l)
(Chute_Connected ?fh_hv) (At_Vehicle ?fh_hv ?fh_l) (At_Package ?fh_p ?fh_l) (PV_Compatible ?fh_p ?fh_hv)
)
((At_Package ?fh_p ?fh_l))
((At_Package ?fh_p ?fh_hv))
)
(:operator (!empty_hopper ?eh_p ?eh_hv ?eh_l)
(
(type_Package ?eh_p) (type_Hopper_Vehicle ?eh_hv) (type_Location ?eh_l)
(Chute_Connected ?eh_hv) (At_Vehicle ?eh_hv ?eh_l) (At_Package ?eh_p ?eh_hv)
)
((At_Package ?eh_p ?eh_hv))
((At_Package ?eh_p ?eh_l))
)
(:operator (!pick_up_package_ground ?pupg_p ?pupg_c ?pupg_l)
(
(type_Package ?pupg_p) (type_Crane ?pupg_c) (type_Location ?pupg_l)
(Empty ?pupg_c) (Available ?pupg_c) (At_Equipment ?pupg_c ?pupg_l) (At_Package ?pupg_p ?pupg_l)
)
((Empty ?pupg_c) (At_Package ?pupg_p ?pupg_l))
((At_Package ?pupg_p ?pupg_c))
)
(:operator (!put_down_package_ground ?pdpg_p ?pdpg_c ?pdpg_l)
(
(type_Package ?pdpg_p) (type_Crane ?pdpg_c) (type_Location ?pdpg_l)
(Available ?pdpg_c) (At_Equipment ?pdpg_c ?pdpg_l) (At_Package ?pdpg_p ?pdpg_c)
)
((At_Package ?pdpg_p ?pdpg_c))
((At_Package ?pdpg_p ?pdpg_l) (Empty ?pdpg_c))
)
(:operator (!pick_up_package_vehicle ?pupv_p ?pupv_c ?pupv_fv ?pupv_l)
(
(type_Package ?pupv_p) (type_Crane ?pupv_c) (type_Flatbed_Vehicle ?pupv_fv) (type_Location ?pupv_l)
(Empty ?pupv_c) (Available ?pupv_c) (At_Equipment ?pupv_c ?pupv_l) (At_Package ?pupv_p ?pupv_fv) (At_Vehicle ?pupv_fv ?pupv_l)
)
((Empty ?pupv_c) (At_Package ?pupv_p ?pupv_fv))
((At_Package ?pupv_p ?pupv_c))
)
(:operator (!put_down_package_vehicle ?pdpv_p ?pdpv_c ?pdpv_fv ?pdpv_l)
(
(type_Package ?pdpv_p) (type_Crane ?pdpv_c) (type_Flatbed_Vehicle ?pdpv_fv) (type_Location ?pdpv_l)
(Available ?pdpv_c) (At_Package ?pdpv_p ?pdpv_c) (At_Equipment ?pdpv_c ?pdpv_l) (At_Vehicle ?pdpv_fv ?pdpv_l) (PV_Compatible ?pdpv_p ?pdpv_fv)
)
((At_Package ?pdpv_p ?pdpv_c))
((Empty ?pdpv_c) (At_Package ?pdpv_p ?pdpv_fv))
)
(:operator (!open_door ?od_rv)
(
(type_Regular_Vehicle ?od_rv)
(not (Door_Open ?od_rv))
)
()
((Door_Open ?od_rv))
)
(:operator (!close_door ?cd_rv)
(
(type_Regular_Vehicle ?cd_rv)
(Door_Open ?cd_rv)
)
((Door_Open ?cd_rv))
()
)
(:operator (!load_package ?lp_p ?lp_v ?lp_l)
(
(type_Package ?lp_p) (type_Vehicle ?lp_v) (type_Location ?lp_l)
(At_Package ?lp_p ?lp_l) (At_Vehicle ?lp_v ?lp_l) (PV_Compatible ?lp_p ?lp_v)
)
((At_Package ?lp_p ?lp_l))
((At_Package ?lp_p ?lp_v))
)
(:operator (!unload_package ?up_p ?up_v ?up_l)
(
(type_Package ?up_p) (type_Vehicle ?up_v) (type_Location ?up_l)
(At_Package ?up_p ?up_v) (At_Vehicle ?up_v ?up_l)
)
((At_Package ?up_p ?up_v))
((At_Package ?up_p ?up_l))
)
(:operator (!move_vehicle_no_traincar ?hmnt_v ?hmnt_o ?hmnt_r ?hmnt_d)
(
(type_Vehicle ?hmnt_v) (type_Location ?hmnt_o) (type_Route ?hmnt_r) (type_Location ?hmnt_d)
(Connects ?hmnt_r ?hmnt_o ?hmnt_d) (Available ?hmnt_v) (Available ?hmnt_r) (RV_Compatible ?hmnt_r ?hmnt_v) (At_Vehicle ?hmnt_v ?hmnt_o)
)
((At_Vehicle ?hmnt_v ?hmnt_o))
((At_Vehicle ?hmnt_v ?hmnt_d))
)
(:method (__top)
__top_method
(
(type_sort_for_Essen ?var_for_Essen_1) (type_sort_for_HauptbahnhofMuenchen ?var_for_HauptbahnhofMuenchen_2) (type_sort_for_HauptbahnhofUlm ?var_for_HauptbahnhofUlm_3)
)
((transport ?var_for_Essen_1 ?var_for_HauptbahnhofUlm_3 ?var_for_HauptbahnhofMuenchen_2))
)
(:method (carry ?mccd_cd_p ?mccd_cd_lo ?mccd_cd_ld)
method_carry_cd
(
(type_Package ?mccd_cd_p) (type_Location ?mccd_cd_lo) (type_Location ?mccd_cd_ld)
(type_Location ?mccd_cd_ld) (type_Location ?mccd_cd_lo) (type_Package ?mccd_cd_p)
)
((carry_direct ?mccd_cd_p ?mccd_cd_lo ?mccd_cd_ld))
)
(:method (carry ?mch_hctt_p ?mch_hctt_o ?mch_hctt_d)
method_carry_cvh
(
(type_Package ?mch_hctt_p) (type_Location ?mch_hctt_o) (type_Location ?mch_hctt_d)
(type_City ?mch_hctt_cd) (type_City ?mch_hctt_co) (type_TCenter ?mch_hctt_d) (type_TCenter ?mch_hctt_o) (type_Package ?mch_hctt_p)
)
((helper_carry_tt ?mch_hctt_p ?mch_hctt_o ?mch_hctt_co ?mch_hctt_d ?mch_hctt_cd))
)
(:method (carry ?mccct_hcott_p ?mccct_hcott_o ?mccct_hcott_d)
method_carry_cd_cbtc
(
(type_Package ?mccct_hcott_p) (type_Location ?mccct_hcott_o) (type_Location ?mccct_hcott_d)
(type_City ?mccct_hcott_cd) (type_City ?mccct_hcott_co) (type_TCenter ?mccct_hcott_d) (type_City_Location ?mccct_hcott_o) (type_Package ?mccct_hcott_p) (type_TCenter ?mccct_hcott_t1)
)
((helper_carry_ott ?mccct_hcott_p ?mccct_hcott_o ?mccct_hcott_co ?mccct_hcott_t1 ?mccct_hcott_d ?mccct_hcott_cd))
)
(:method (carry ?mcctc_hcotd_p ?mcctc_hcotd_o ?mcctc_hcotd_d)
method_carry_cbtc_cd
(
(type_Package ?mcctc_hcotd_p) (type_Location ?mcctc_hcotd_o) (type_Location ?mcctc_hcotd_d)
(type_City ?mcctc_hcotd_cd) (type_City ?mcctc_hcotd_co) (type_Not_TCenter ?mcctc_hcotd_d) (type_TCenter ?mcctc_hcotd_o) (type_Package ?mcctc_hcotd_p) (type_TCenter ?mcctc_hcotd_t1)
)
((helper_carry_otd ?mcctc_hcotd_p ?mcctc_hcotd_o ?mcctc_hcotd_co ?mcctc_hcotd_t1 ?mcctc_hcotd_d ?mcctc_hcotd_cd))
)
(:method (carry ?mcccc_hcottd_p ?mcccc_hcottd_o ?mcccc_hcottd_d)
method_carry_cd_cbtc_cd
(
(type_Package ?mcccc_hcottd_p) (type_Location ?mcccc_hcottd_o) (type_Location ?mcccc_hcottd_d)
(type_City ?mcccc_hcottd_cd) (type_City ?mcccc_hcottd_co) (type_Not_TCenter ?mcccc_hcottd_d) (type_Not_TCenter ?mcccc_hcottd_o) (type_Package ?mcccc_hcottd_p) (type_TCenter ?mcccc_hcottd_t1) (type_TCenter ?mcccc_hcottd_t2)
)
((helper_carry_ottd ?mcccc_hcottd_p ?mcccc_hcottd_o ?mcccc_hcottd_co ?mcccc_hcottd_t1 ?mcccc_hcottd_t2 ?mcccc_hcottd_d ?mcccc_hcottd_cd))
)
(:method (carry ?mccc_hccc_p ?mccc_hccc_o ?mccc_hccc_d)
method_carry_cd_cd
(
(type_Package ?mccc_hccc_p) (type_Location ?mccc_hccc_o) (type_Location ?mccc_hccc_d)
(type_City ?mccc_hccc_cd) (type_City ?mccc_hccc_co) (type_Not_TCenter ?mccc_hccc_d) (type_Not_TCenter ?mccc_hccc_o) (type_Package ?mccc_hccc_p) (type_TCenter ?mccc_hccc_t)
)
((helper_carry_cc ?mccc_hccc_p ?mccc_hccc_o ?mccc_hccc_co ?mccc_hccc_t ?mccc_hccc_d ?mccc_hccc_cd))
)
(:method (carry_between_tcenters ?mcbtc_cd_p ?mcbtc_gtttc_to ?mcbtc_gtttc_td)
method_carry_between_tcenters_cd
(
(type_Package ?mcbtc_cd_p) (type_TCenter ?mcbtc_gtttc_to) (type_TCenter ?mcbtc_gtttc_td)
(type_Package ?mcbtc_cd_p) (type_TCenter ?mcbtc_gtttc_td) (type_TCenter ?mcbtc_gtttc_to)
)
(:unordered (!go_through_two_tcenters ?mcbtc_gtttc_to ?mcbtc_gtttc_td) (carry_direct ?mcbtc_cd_p ?mcbtc_gtttc_to ?mcbtc_gtttc_td))
)
(:method (carry_between_tcenters ?mcbth_tch_p ?mcbth_tch_tco ?mcbth_tch_tcd)
method_carry_between_tcenters_cvh
(
(type_Package ?mcbth_tch_p) (type_TCenter ?mcbth_tch_tco) (type_TCenter ?mcbth_tch_tcd)
(type_Package ?mcbth_tch_p) (type_TCenter ?mcbth_tch_tcd) (type_TCenter ?mcbth_tch_tco)
)
((carry_via_hub ?mcbth_tch_p ?mcbth_tch_tco ?mcbth_tch_tcd))
)
(:method (carry_direct ?mcd_hmcd_p ?mcd_hmcd_o ?mcd_hmcd_d)
method_carry_direct
(
(type_Package ?mcd_hmcd_p) (type_Location ?mcd_hmcd_o) (type_Location ?mcd_hmcd_d)
(type_Location ?mcd_hmcd_d) (type_Location ?mcd_hmcd_o) (type_Package ?mcd_hmcd_p) (type_Vehicle ?mcd_hmcd_v)
)
((helper_carry_direct ?mcd_hmcd_v ?mcd_hmcd_p ?mcd_hmcd_o ?mcd_hmcd_d))
)
(:method (carry_via_hub ?mcvhn_hcvhn_p ?mcvhn_hcvhn_tco ?mcvhn_hcvhn_tcd)
method_carry_via_hub_not_hazardous
(
(type_Package ?mcvhn_hcvhn_p) (type_TCenter ?mcvhn_hcvhn_tco) (type_TCenter ?mcvhn_hcvhn_tcd)
(type_City ?mcvhn_hcvhn_ctcd) (type_City ?mcvhn_hcvhn_ctco) (type_Hub ?mcvhn_hcvhn_h) (type_Package ?mcvhn_hcvhn_p) (type_Region ?mcvhn_hcvhn_rctcd) (type_Region ?mcvhn_hcvhn_rctco) (type_TCenter ?mcvhn_hcvhn_tcd) (type_TCenter ?mcvhn_hcvhn_tco)
)
((helper_carry_via_hub_not_hazardous ?mcvhn_hcvhn_p ?mcvhn_hcvhn_tco ?mcvhn_hcvhn_ctco ?mcvhn_hcvhn_rctco ?mcvhn_hcvhn_h ?mcvhn_hcvhn_tcd ?mcvhn_hcvhn_ctcd ?mcvhn_hcvhn_rctcd))
)
(:method (carry_via_hub ?mcvhh_hcvhh_p ?mcvhh_hcvhh_tco ?mcvhh_hcvhh_tcd)
method_carry_via_hub_hazardous
(
(type_Package ?mcvhh_hcvhh_p) (type_TCenter ?mcvhh_hcvhh_tco) (type_TCenter ?mcvhh_hcvhh_tcd)
(type_City ?mcvhh_hcvhh_ch) (type_City ?mcvhh_hcvhh_ctcd) (type_City ?mcvhh_hcvhh_ctco) (type_Hub ?mcvhh_hcvhh_h) (type_Package ?mcvhh_hcvhh_p) (type_Region ?mcvhh_hcvhh_rctcd) (type_Region ?mcvhh_hcvhh_rctco) (type_TCenter ?mcvhh_hcvhh_tcd) (type_TCenter ?mcvhh_hcvhh_tco)
)
((helper_carry_via_hub_hazardous ?mcvhh_hcvhh_p ?mcvhh_hcvhh_tco ?mcvhh_hcvhh_ctco ?mcvhh_hcvhh_rctco ?mcvhh_hcvhh_h ?mcvhh_hcvhh_ch ?mcvhh_hcvhh_tcd ?mcvhh_hcvhh_ctcd ?mcvhh_hcvhh_rctcd))
)
(:method (deliver ?mddp_dp_p)
method_deliver_dp
(
(type_Package ?mddp_dp_p)
(type_Package ?mddp_dp_p)
)
((!deliver_p ?mddp_dp_p))
)
(:method (deliver ?mddv_dv_v)
method_deliver_dv
(
(type_Package ?mddv_dv_v)
(type_Valuable ?mddv_dv_v)
)
((!deliver_v ?mddv_dv_v))
)
(:method (deliver ?mddh_dh_h)
method_deliver_dh
(
(type_Package ?mddh_dh_h)
(type_Hazardous ?mddh_dh_h)
)
((!deliver_h ?mddh_dh_h))
)
(:method (helper_carry_cc ?mhccc_cdd_p ?mhccc_gttc_lo ?mhccc_gttc_co ?mhccc_gttc_tc ?mhccc_gttc_ld ?mhccc_gttc_cd)
method_helper_carry_cd_cd
(
(type_Package ?mhccc_cdd_p) (type_Not_TCenter ?mhccc_gttc_lo) (type_City ?mhccc_gttc_co) (type_TCenter ?mhccc_gttc_tc) (type_Not_TCenter ?mhccc_gttc_ld) (type_City ?mhccc_gttc_cd)
(type_Package ?mhccc_cdd_p) (type_City ?mhccc_gttc_cd) (type_City ?mhccc_gttc_co) (type_Not_TCenter ?mhccc_gttc_ld) (type_Not_TCenter ?mhccc_gttc_lo) (type_TCenter ?mhccc_gttc_tc)
)
((carry_direct ?mhccc_cdd_p ?mhccc_gttc_lo ?mhccc_gttc_tc) (!go_through_tcenter_cc ?mhccc_gttc_lo ?mhccc_gttc_ld ?mhccc_gttc_co ?mhccc_gttc_cd ?mhccc_gttc_tc) (carry_direct ?mhccc_cdd_p ?mhccc_gttc_tc ?mhccc_gttc_ld))
)
(:method (helper_carry_direct ?mhcd_ult_v ?mhcd_ult_p ?mhcd_mvd_lo ?mhcd_ult_l)
method_helper_carry_direct
(
(type_Vehicle ?mhcd_ult_v) (type_Package ?mhcd_ult_p) (type_Location ?mhcd_mvd_lo) (type_Location ?mhcd_ult_l)
(type_Location ?mhcd_mvd_lo) (type_Location ?mhcd_mvo_lo) (type_Location ?mhcd_ult_l) (type_Package ?mhcd_ult_p) (type_Vehicle ?mhcd_ult_v)
)
((move ?mhcd_ult_v ?mhcd_mvo_lo ?mhcd_mvd_lo) (load_top ?mhcd_ult_p ?mhcd_ult_v ?mhcd_mvd_lo) (move ?mhcd_ult_v ?mhcd_mvd_lo ?mhcd_ult_l) (unload_top ?mhcd_ult_p ?mhcd_ult_v ?mhcd_ult_l))
)
(:method (helper_carry_direct ?mhcdo_ult_v ?mhcdo_ult_p ?mhcdo_m_lo ?mhcdo_ult_l)
method_helper_carry_direct_noMoveFirst
(
(type_Vehicle ?mhcdo_ult_v) (type_Package ?mhcdo_ult_p) (type_Location ?mhcdo_m_lo) (type_Location ?mhcdo_ult_l)
(type_Location ?mhcdo_m_lo) (type_Location ?mhcdo_ult_l) (type_Package ?mhcdo_ult_p) (type_Vehicle ?mhcdo_ult_v)
)
((load_top ?mhcdo_ult_p ?mhcdo_ult_v ?mhcdo_m_lo) (move ?mhcdo_ult_v ?mhcdo_m_lo ?mhcdo_ult_l) (unload_top ?mhcdo_ult_p ?mhcdo_ult_v ?mhcdo_ult_l))
)
(:method (helper_carry_otd ?mhcctc_cd_p ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_co ?mhcctc_gtttccotd_t1 ?mhcctc_gtttccotd_cl ?mhcctc_gtttccotd_cd)
method_helper_carry_cbtc_cd
(
(type_Package ?mhcctc_cd_p) (type_TCenter ?mhcctc_gtttccotd_o) (type_City ?mhcctc_gtttccotd_co) (type_TCenter ?mhcctc_gtttccotd_t1) (type_Not_TCenter ?mhcctc_gtttccotd_cl) (type_City ?mhcctc_gtttccotd_cd)
(type_Package ?mhcctc_cd_p) (type_City ?mhcctc_gtttccotd_cd) (type_Not_TCenter ?mhcctc_gtttccotd_cl) (type_City ?mhcctc_gtttccotd_co) (type_TCenter ?mhcctc_gtttccotd_o) (type_TCenter ?mhcctc_gtttccotd_t1)
)
((carry_between_tcenters ?mhcctc_cd_p ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_t1) (!go_through_two_tcenters_cities_otd ?mhcctc_gtttccotd_cl ?mhcctc_gtttccotd_co ?mhcctc_gtttccotd_cd ?mhcctc_gtttccotd_o ?mhcctc_gtttccotd_t1) (carry_direct ?mhcctc_cd_p ?mhcctc_gtttccotd_t1 ?mhcctc_gtttccotd_cl))
)
(:method (helper_carry_ott ?mhccct_cbt_p ?mhccct_gtttccott_cl ?mhccct_gtttccott_co ?mhccct_gtttccott_to ?mhccct_gtttccott_td ?mhccct_gtttccott_cd)
method_helper_carry_cd_cbtc
(
(type_Package ?mhccct_cbt_p) (type_City_Location ?mhccct_gtttccott_cl) (type_City ?mhccct_gtttccott_co) (type_TCenter ?mhccct_gtttccott_to) (type_TCenter ?mhccct_gtttccott_td) (type_City ?mhccct_gtttccott_cd)
(type_Package ?mhccct_cbt_p) (type_City ?mhccct_gtttccott_cd) (type_City_Location ?mhccct_gtttccott_cl) (type_City ?mhccct_gtttccott_co) (type_TCenter ?mhccct_gtttccott_td) (type_TCenter ?mhccct_gtttccott_to)
)
((carry_direct ?mhccct_cbt_p ?mhccct_gtttccott_cl ?mhccct_gtttccott_to) (!go_through_two_tcenters_cities_ott ?mhccct_gtttccott_cl ?mhccct_gtttccott_co ?mhccct_gtttccott_cd ?mhccct_gtttccott_to ?mhccct_gtttccott_td) (carry_between_tcenters ?mhccct_cbt_p ?mhccct_gtttccott_to ?mhccct_gtttccott_td))
)
(:method (helper_carry_ottd ?mhcccc_cdd_p ?mhcccc_gtttc_lo ?mhcccc_gtttc_co ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2 ?mhcccc_gtttc_ld ?mhcccc_gtttc_cd)
method_helper_carry_cd_cbtc_cd
(
(type_Package ?mhcccc_cdd_p) (type_Not_TCenter ?mhcccc_gtttc_lo) (type_City ?mhcccc_gtttc_co) (type_TCenter ?mhcccc_gtttc_t1) (type_TCenter ?mhcccc_gtttc_t2) (type_Not_TCenter ?mhcccc_gtttc_ld) (type_City ?mhcccc_gtttc_cd)
(type_Package ?mhcccc_cdd_p) (type_City ?mhcccc_gtttc_cd) (type_City ?mhcccc_gtttc_co) (type_Not_TCenter ?mhcccc_gtttc_ld) (type_Not_TCenter ?mhcccc_gtttc_lo) (type_TCenter ?mhcccc_gtttc_t1) (type_TCenter ?mhcccc_gtttc_t2)
)
((carry_direct ?mhcccc_cdd_p ?mhcccc_gtttc_lo ?mhcccc_gtttc_t1) (!go_through_two_tcenters_cities_ottd ?mhcccc_gtttc_lo ?mhcccc_gtttc_ld ?mhcccc_gtttc_co ?mhcccc_gtttc_cd ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2) (carry_between_tcenters ?mhcccc_cdd_p ?mhcccc_gtttc_t1 ?mhcccc_gtttc_t2) (carry_direct ?mhcccc_cdd_p ?mhcccc_gtttc_t2 ?mhcccc_gtttc_ld))
)
(:method (helper_carry_tt ?mhch_tch_p ?mhch_gtttctt_to ?mhch_gtttctt_co ?mhch_gtttctt_td ?mhch_gtttctt_cd)
method_helper_carry_cvh
(
(type_Package ?mhch_tch_p) (type_TCenter ?mhch_gtttctt_to) (type_City ?mhch_gtttctt_co) (type_TCenter ?mhch_gtttctt_td) (type_City ?mhch_gtttctt_cd)
(type_City ?mhch_gtttctt_cd) (type_City ?mhch_gtttctt_co) (type_TCenter ?mhch_gtttctt_td) (type_TCenter ?mhch_gtttctt_to) (type_Package ?mhch_tch_p)
)
((carry_via_hub ?mhch_tch_p ?mhch_gtttctt_to ?mhch_gtttctt_td) (!go_through_two_tcenters_tt ?mhch_gtttctt_to ?mhch_gtttctt_td ?mhch_gtttctt_co ?mhch_gtttctt_cd))
)
(:method (helper_carry_via_hub_hazardous ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_co ?mhcvhh_gtttcvhh_ro ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_ch ?mhcvhh_gtttcvhh_td ?mhcvhh_gtttcvhh_cd ?mhcvhh_gtttcvhh_rd)
method_helper_carry_via_hub_hazardous
(
(type_Package ?mhcvhh_cd2_p) (type_TCenter ?mhcvhh_gtttcvhh_to) (type_City ?mhcvhh_gtttcvhh_co) (type_Region ?mhcvhh_gtttcvhh_ro) (type_Hub ?mhcvhh_gtttcvhh_h) (type_City ?mhcvhh_gtttcvhh_ch) (type_TCenter ?mhcvhh_gtttcvhh_td) (type_City ?mhcvhh_gtttcvhh_cd) (type_Region ?mhcvhh_gtttcvhh_rd)
(type_Package ?mhcvhh_cd2_p) (type_City ?mhcvhh_gtttcvhh_cd) (type_City ?mhcvhh_gtttcvhh_ch) (type_City ?mhcvhh_gtttcvhh_co) (type_Hub ?mhcvhh_gtttcvhh_h) (type_Region ?mhcvhh_gtttcvhh_rd) (type_Region ?mhcvhh_gtttcvhh_ro) (type_TCenter ?mhcvhh_gtttcvhh_td) (type_TCenter ?mhcvhh_gtttcvhh_to)
)
((carry_direct ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_h) (!go_through_two_tcenters_via_hub_hazardous ?mhcvhh_gtttcvhh_to ?mhcvhh_gtttcvhh_td ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_co ?mhcvhh_gtttcvhh_ch ?mhcvhh_gtttcvhh_cd ?mhcvhh_gtttcvhh_ro ?mhcvhh_gtttcvhh_rd) (carry_direct ?mhcvhh_cd2_p ?mhcvhh_gtttcvhh_h ?mhcvhh_gtttcvhh_td))
)
(:method (helper_carry_via_hub_not_hazardous ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_co ?mhcvhn_gtttcvhnh_ro ?mhcvhn_gtttcvhnh_h ?mhcvhn_gtttcvhnh_td ?mhcvhn_gtttcvhnh_cd ?mhcvhn_gtttcvhnh_rd)
method_helper_carry_via_hub_not_hazardous
(
(type_Package ?mhcvhn_cd2_p) (type_TCenter ?mhcvhn_gtttcvhnh_to) (type_City ?mhcvhn_gtttcvhnh_co) (type_Region ?mhcvhn_gtttcvhnh_ro) (type_Hub ?mhcvhn_gtttcvhnh_h) (type_TCenter ?mhcvhn_gtttcvhnh_td) (type_City ?mhcvhn_gtttcvhnh_cd) (type_Region ?mhcvhn_gtttcvhnh_rd)
(type_Package ?mhcvhn_cd2_p) (type_City ?mhcvhn_gtttcvhnh_cd) (type_City ?mhcvhn_gtttcvhnh_co) (type_Hub ?mhcvhn_gtttcvhnh_h) (type_Region ?mhcvhn_gtttcvhnh_rd) (type_Region ?mhcvhn_gtttcvhnh_ro) (type_TCenter ?mhcvhn_gtttcvhnh_td) (type_TCenter ?mhcvhn_gtttcvhnh_to)
)
((carry_direct ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_h) (!go_through_two_tcenters_via_hub_not_hazardous ?mhcvhn_gtttcvhnh_to ?mhcvhn_gtttcvhnh_td ?mhcvhn_gtttcvhnh_co ?mhcvhn_gtttcvhnh_cd ?mhcvhn_gtttcvhnh_ro ?mhcvhn_gtttcvhnh_rd ?mhcvhn_gtttcvhnh_h) (carry_direct ?mhcvhn_cd2_p ?mhcvhn_gtttcvhnh_h ?mhcvhn_gtttcvhnh_td))
)
(:method (helper_move_traincar ?mhmt_dtc_tc ?mhmt_dtc_t ?mhmt_md_lo ?mhmt_dtc_l)
method_helper_move_traincar
(
(type_Traincar ?mhmt_dtc_tc) (type_Train ?mhmt_dtc_t) (type_Location ?mhmt_md_lo) (type_Location ?mhmt_dtc_l)
(type_Location ?mhmt_dtc_l) (type_Train ?mhmt_dtc_t) (type_Traincar ?mhmt_dtc_tc) (type_Location ?mhmt_md_lo) (type_Location ?mhmt_mo_lo)
)
((move ?mhmt_dtc_t ?mhmt_mo_lo ?mhmt_md_lo) (!attach_train_car ?mhmt_dtc_t ?mhmt_dtc_tc ?mhmt_md_lo) (move ?mhmt_dtc_t ?mhmt_md_lo ?mhmt_dtc_l) (!detach_train_car ?mhmt_dtc_t ?mhmt_dtc_tc ?mhmt_dtc_l))
)
(:method (helper_move_traincar ?mhmtn_dtc_tc ?mhmtn_dtc_t ?mhmtn_md_lo ?mhmtn_dtc_l)
method_helper_move_traincar_noMoveFirst
(
(type_Traincar ?mhmtn_dtc_tc) (type_Train ?mhmtn_dtc_t) (type_Location ?mhmtn_md_lo) (type_Location ?mhmtn_dtc_l)
(type_Location ?mhmtn_dtc_l) (type_Train ?mhmtn_dtc_t) (type_Traincar ?mhmtn_dtc_tc) (type_Location ?mhmtn_md_lo)
)
((!attach_train_car ?mhmtn_dtc_t ?mhmtn_dtc_tc ?mhmtn_md_lo) (move ?mhmtn_dtc_t ?mhmtn_md_lo ?mhmtn_dtc_l) (!detach_train_car ?mhmtn_dtc_t ?mhmtn_dtc_tc ?mhmtn_dtc_l))
)
(:method (load ?mlr_lp_p ?mlr_cd_rv ?mlr_lp_l)
method_load_regular
(
(type_Package ?mlr_lp_p) (type_Vehicle ?mlr_cd_rv) (type_Location ?mlr_lp_l)
(type_Regular_Vehicle ?mlr_cd_rv) (type_Location ?mlr_lp_l) (type_Package ?mlr_lp_p)
)
((!open_door ?mlr_cd_rv) (!load_package ?mlr_lp_p ?mlr_cd_rv ?mlr_lp_l) (!close_door ?mlr_cd_rv))
)
(:method (load ?mlf_pdpv_p ?mlf_pdpv_fv ?mlf_pdpv_l)
method_load_flatbed
(
(type_Package ?mlf_pdpv_p) (type_Vehicle ?mlf_pdpv_fv) (type_Location ?mlf_pdpv_l)
(type_Crane ?mlf_pdpv_c) (type_Flatbed_Vehicle ?mlf_pdpv_fv) (type_Location ?mlf_pdpv_l) (type_Package ?mlf_pdpv_p)
)
((!pick_up_package_ground ?mlf_pdpv_p ?mlf_pdpv_c ?mlf_pdpv_l) (!put_down_package_vehicle ?mlf_pdpv_p ?mlf_pdpv_c ?mlf_pdpv_fv ?mlf_pdpv_l))
)
(:method (load ?mlh_fh_p ?mlh_dc_h ?mlh_fh_l)
method_load_hopper
(
(type_Package ?mlh_fh_p) (type_Vehicle ?mlh_dc_h) (type_Location ?mlh_fh_l)
(type_Hopper_Vehicle ?mlh_dc_h) (type_Location ?mlh_fh_l) (type_Package ?mlh_fh_p)
)
((!connect_chute ?mlh_dc_h) (!fill_hopper ?mlh_fh_p ?mlh_dc_h ?mlh_fh_l) (!disconnect_chute ?mlh_dc_h))
)
(:method (load ?mlt_dch_l ?mlt_dch_tv ?mlt_ft_lo)
method_load_tanker
(
(type_Package ?mlt_dch_l) (type_Vehicle ?mlt_dch_tv) (type_Location ?mlt_ft_lo)
(type_Liquid ?mlt_dch_l) (type_Tanker_Vehicle ?mlt_dch_tv) (type_Location ?mlt_ft_lo)
)
((!connect_hose ?mlt_dch_tv ?mlt_dch_l) (!open_valve ?mlt_dch_tv) (!fill_tank ?mlt_dch_tv ?mlt_dch_l ?mlt_ft_lo) (!close_valve ?mlt_dch_tv) (!disconnect_hose ?mlt_dch_tv ?mlt_dch_l))
)
(:method (load ?mll_ll_p ?mll_rr_v ?mll_ll_l)
method_load_livestock
(
(type_Package ?mll_ll_p) (type_Vehicle ?mll_rr_v) (type_Location ?mll_ll_l)
(type_Location ?mll_ll_l) (type_Livestock_Package ?mll_ll_p) (type_Vehicle ?mll_rr_v)
)
((!lower_ramp ?mll_rr_v) (!fill_trough ?mll_rr_v) (!load_livestock ?mll_ll_p ?mll_rr_v ?mll_ll_l) (!raise_ramp ?mll_rr_v))
)
(:method (load ?mlc_lc_c ?mlc_rr_v ?mlc_lc_l)
method_load_cars
(
(type_Package ?mlc_lc_c) (type_Vehicle ?mlc_rr_v) (type_Location ?mlc_lc_l)
(type_Cars ?mlc_lc_c) (type_Location ?mlc_lc_l) (type_Vehicle ?mlc_rr_v)
)
((!lower_ramp ?mlc_rr_v) (!load_cars ?mlc_lc_c ?mlc_rr_v ?mlc_lc_l) (!raise_ramp ?mlc_rr_v))
)
(:method (load ?mla_lp_p ?mla_dcr_ap ?mla_dcr_l)
method_load_airplane
(
(type_Package ?mla_lp_p) (type_Vehicle ?mla_dcr_ap) (type_Location ?mla_dcr_l)
(type_Airplane ?mla_dcr_ap) (type_Location ?mla_dcr_l) (type_Plane_Ramp ?mla_dcr_pr) (type_Package ?mla_lp_p)
)
((!attach_conveyor_ramp ?mla_dcr_ap ?mla_dcr_pr ?mla_dcr_l) (!open_door ?mla_dcr_ap) (!load_package ?mla_lp_p ?mla_dcr_ap ?mla_dcr_l) (!close_door ?mla_dcr_ap) (!detach_conveyor_ramp ?mla_dcr_ap ?mla_dcr_pr ?mla_dcr_l))
)
(:method (load_top ?mlmn_l_p ?mlmn_l_v ?mlmn_l_l)
method_load_top_normal
(
(type_Package ?mlmn_l_p) (type_Vehicle ?mlmn_l_v) (type_Location ?mlmn_l_l)
(type_Location ?mlmn_l_l) (type_Package ?mlmn_l_p) (type_Vehicle ?mlmn_l_v)
)
((load ?mlmn_l_p ?mlmn_l_v ?mlmn_l_l))
)
(:method (load_top ?mlmh_l_p ?mlmh_l_v ?mlmh_l_l)
method_load_top_hazardous
(
(type_Package ?mlmh_l_p) (type_Vehicle ?mlmh_l_v) (type_Location ?mlmh_l_l)
(type_Location ?mlmh_l_l) (type_Package ?mlmh_l_p) (type_Vehicle ?mlmh_l_v)
)
((!affix_warning_signs ?mlmh_l_v) (load ?mlmh_l_p ?mlmh_l_v ?mlmh_l_l))
)
(:method (load_top ?mlmv_l_p ?mlmv_pci_a ?mlmv_l_l)
method_load_top_valuable
(
(type_Package ?mlmv_l_p) (type_Vehicle ?mlmv_pci_a) (type_Location ?mlmv_l_l)
(type_Location ?mlmv_l_l) (type_Package ?mlmv_l_p) (type_Armored ?mlmv_pci_a)
)
((!post_guard_outside ?mlmv_pci_a) (load ?mlmv_l_p ?mlmv_pci_a ?mlmv_l_l) (!post_guard_inside ?mlmv_pci_a))
)
(:method (move ?mmnt_mvnt_v ?mmnt_mvnt_o ?mmnt_mvnt_d)
method_move_no_traincar
(
(type_Vehicle ?mmnt_mvnt_v) (type_Location ?mmnt_mvnt_o) (type_Location ?mmnt_mvnt_d)
(type_Location ?mmnt_mvnt_d) (type_Location ?mmnt_mvnt_o) (type_Route ?mmnt_mvnt_r) (type_Vehicle ?mmnt_mvnt_v)
)
((!move_vehicle_no_traincar ?mmnt_mvnt_v ?mmnt_mvnt_o ?mmnt_mvnt_r ?mmnt_mvnt_d))
)
(:method (move ?mmt_hmt_v ?mmt_hmt_o ?mmt_hmt_d)
method_move_traincar
(
(type_Vehicle ?mmt_hmt_v) (type_Location ?mmt_hmt_o) (type_Location ?mmt_hmt_d)
(type_Location ?mmt_hmt_d) (type_Location ?mmt_hmt_o) (type_Train ?mmt_hmt_t) (type_Traincar ?mmt_hmt_v)
)
((helper_move_traincar ?mmt_hmt_v ?mmt_hmt_t ?mmt_hmt_o ?mmt_hmt_d))
)
(:method (pickup ?mpn_cf_p)
method_pickup_normal
(
(type_Package ?mpn_cf_p)
(type_Package ?mpn_cf_p)
)
((!collect_fees ?mpn_cf_p))
)
(:method (pickup ?mph_op_h)
method_pickup_hazardous
(
(type_Package ?mph_op_h)
(type_Hazardous ?mph_op_h)
)
((!collect_fees ?mph_op_h) (!obtain_permit ?mph_op_h))
)
(:method (pickup ?mpv_ci_v)
method_pickup_valuable
(
(type_Package ?mpv_ci_v)
(type_Valuable ?mpv_ci_v)
)
((!collect_fees ?mpv_ci_v) (!collect_insurance ?mpv_ci_v))
)
(:method (transport ?mtpcd_de_p ?mtpcd_ca_lo ?mtpcd_ca_ld)
method_transport_pi_ca_de
(
(type_Package ?mtpcd_de_p) (type_Location ?mtpcd_ca_lo) (type_Location ?mtpcd_ca_ld)
(type_Location ?mtpcd_ca_ld) (type_Location ?mtpcd_ca_lo) (type_Package ?mtpcd_de_p)
)
((pickup ?mtpcd_de_p) (carry ?mtpcd_de_p ?mtpcd_ca_lo ?mtpcd_ca_ld) (deliver ?mtpcd_de_p))
)
(:method (unload ?mur_up_p ?mur_cd_rv ?mur_up_l)
method_unload_regular
(
(type_Package ?mur_up_p) (type_Vehicle ?mur_cd_rv) (type_Location ?mur_up_l)
(type_Regular_Vehicle ?mur_cd_rv) (type_Location ?mur_up_l) (type_Package ?mur_up_p)
)
((!open_door ?mur_cd_rv) (!unload_package ?mur_up_p ?mur_cd_rv ?mur_up_l) (!close_door ?mur_cd_rv))
)
(:method (unload ?muf_pdpg_p ?muf_pupv_fv ?muf_pdpg_l)
method_unload_flatbed
(
(type_Package ?muf_pdpg_p) (type_Vehicle ?muf_pupv_fv) (type_Location ?muf_pdpg_l)
(type_Crane ?muf_pdpg_c) (type_Location ?muf_pdpg_l) (type_Package ?muf_pdpg_p) (type_Flatbed_Vehicle ?muf_pupv_fv)
)
((!pick_up_package_vehicle ?muf_pdpg_p ?muf_pdpg_c ?muf_pupv_fv ?muf_pdpg_l) (!put_down_package_ground ?muf_pdpg_p ?muf_pdpg_c ?muf_pdpg_l))
)
(:method (unload ?muh_eh_p ?muh_dc_h ?muh_eh_l)
method_unload_hopper
(
(type_Package ?muh_eh_p) (type_Vehicle ?muh_dc_h) (type_Location ?muh_eh_l)
(type_Hopper_Vehicle ?muh_dc_h) (type_Location ?muh_eh_l) (type_Package ?muh_eh_p)
)
((!connect_chute ?muh_dc_h) (!empty_hopper ?muh_eh_p ?muh_dc_h ?muh_eh_l) (!disconnect_chute ?muh_dc_h))
)
(:method (unload ?mut_dch_l ?mut_dch_tv ?mut_et_lo)
method_unload_tanker
(
(type_Package ?mut_dch_l) (type_Vehicle ?mut_dch_tv) (type_Location ?mut_et_lo)
(type_Liquid ?mut_dch_l) (type_Tanker_Vehicle ?mut_dch_tv) (type_Location ?mut_et_lo)
)
((!connect_hose ?mut_dch_tv ?mut_dch_l) (!open_valve ?mut_dch_tv) (!empty_tank ?mut_dch_tv ?mut_dch_l ?mut_et_lo) (!close_valve ?mut_dch_tv) (!disconnect_hose ?mut_dch_tv ?mut_dch_l))
)
(:method (unload ?mul_ull_p ?mul_rr_v ?mul_ull_l)
method_unload_livestock
(
(type_Package ?mul_ull_p) (type_Vehicle ?mul_rr_v) (type_Location ?mul_ull_l)
(type_Vehicle ?mul_rr_v) (type_Location ?mul_ull_l) (type_Livestock_Package ?mul_ull_p)
)
((!lower_ramp ?mul_rr_v) (!unload_livestock ?mul_ull_p ?mul_rr_v ?mul_ull_l) (!do_clean_interior ?mul_rr_v) (!raise_ramp ?mul_rr_v))
)
(:method (unload ?muc_uc_c ?muc_rr_v ?muc_uc_l)
method_unload_cars
(
(type_Package ?muc_uc_c) (type_Vehicle ?muc_rr_v) (type_Location ?muc_uc_l)
(type_Vehicle ?muc_rr_v) (type_Cars ?muc_uc_c) (type_Location ?muc_uc_l)
)
((!lower_ramp ?muc_rr_v) (!unload_cars ?muc_uc_c ?muc_rr_v ?muc_uc_l) (!raise_ramp ?muc_rr_v))
)
(:method (unload ?mua_up_p ?mua_dcr_ap ?mua_dcr_l)
method_unload_airplane
(
(type_Package ?mua_up_p) (type_Vehicle ?mua_dcr_ap) (type_Location ?mua_dcr_l)
(type_Airplane ?mua_dcr_ap) (type_Location ?mua_dcr_l) (type_Plane_Ramp ?mua_dcr_pr) (type_Package ?mua_up_p)
)
((!attach_conveyor_ramp ?mua_dcr_ap ?mua_dcr_pr ?mua_dcr_l) (!open_door ?mua_dcr_ap) (!unload_package ?mua_up_p ?mua_dcr_ap ?mua_dcr_l) (!close_door ?mua_dcr_ap) (!detach_conveyor_ramp ?mua_dcr_ap ?mua_dcr_pr ?mua_dcr_l))
)
(:method (unload_top ?mumn_ul_p ?mumn_ul_v ?mumn_ul_l)
method_unload_top_normal
(
(type_Package ?mumn_ul_p) (type_Vehicle ?mumn_ul_v) (type_Location ?mumn_ul_l)
(type_Location ?mumn_ul_l) (type_Package ?mumn_ul_p) (type_Vehicle ?mumn_ul_v)
)
((unload ?mumn_ul_p ?mumn_ul_v ?mumn_ul_l))
)
(:method (unload_top ?mumh_ul_p ?mumh_ul_v ?mumh_ul_l)
method_unload_top_hazardous
(
(type_Package ?mumh_ul_p) (type_Vehicle ?mumh_ul_v) (type_Location ?mumh_ul_l)
(type_Location ?mumh_ul_l) (type_Package ?mumh_ul_p) (type_Vehicle ?mumh_ul_v)
)
((unload ?mumh_ul_p ?mumh_ul_v ?mumh_ul_l) (!decontaminate_interior ?mumh_ul_v) (!remove_warning_signs ?mumh_ul_v))
)
(:method (unload_top ?mumv_ul_p ?mumv_ul_v ?mumv_ul_l)
method_unload_top_valuable
(
(type_Package ?mumv_ul_p) (type_Vehicle ?mumv_ul_v) (type_Location ?mumv_ul_l)
(type_Location ?mumv_ul_l) (type_Package ?mumv_ul_p) (type_Vehicle ?mumv_ul_v)
)
((!post_guard_outside ?mumv_ul_v) (unload ?mumv_ul_p ?mumv_ul_v ?mumv_ul_l) (!remove_guard ?mumv_ul_v))
)
))
|
3513645a502ca2aed23b3066cf46663c5463d47b4073f8e758ac150faa9b2fba | pauek/arc-sbcl | cps.lisp |
(in-package :arc/test)
(deftest t-equal/cps
(chk (%equal/cps '(a b) '(a b)))
(chk (%equal/cps '(#\a 1) '(#\a 1)))
(chk (%equal/cps '(#:k) '(#:z)))
(chk (not (%equal/cps '(#:i #:i) '(#:a #:b))))
(chk (not (%equal/cps '(#:i #:j) '(#:a #:a))))
(chk (%equal/cps '(fn (#:k1) a b c) '(fn (#:k2) a b c)))
(chk (%equal/cps '(#:k2 #:k3 1 #\a "b") '(#:a2 #:a3 1 #\a "b")))
(chk (not (%equal/cps '(#:k2 #:k3 1 #\a "b") '(#:a3 #:a3 1 #\a "b")))))
(deftest c-simple
(chkcps "a" 'a)
(chkcps "#\\a" #\a)
(chkcps "\"hi\"" "hi")
(chkcps "(+ 1 x)" '(+ 1 x))
(chkcps "(sqrt (+ 1 x))" '(sqrt (+ 1 x)))
(chkcps "(+ '(1 2) '(3 4))" '(+ '(1 2) '(3 4))))
(deftest c-backq
(chkcps "`(,a)" '`(,a))
(chkcps "(fn (a) `(,a))" '(fn (#:k a) (#:k `(,a))))
(chkcps "(fn (a b) `(1 ,a ,@b))" '(fn (#:k a b) (#:k `(1 ,a ,@b))))
(chkcps "(fn (x) `(a ,(b x)))" '(fn (#:k x)
(b (fn (#:b)
(#:k `(a ,#:b)))
x))))
(deftest c-block
(chkcps "(fn () a)" '(fn (#:k) (#:k a)))
(chkcps "(fn () a b)" '(fn (#:k) (#:k b)))
(chkcps "(fn () a b c)" '(fn (#:k) (#:k c)))
(chkcps "(fn (x) (+ 1 (sqrt x)))" '(fn (#:k x) (#:k (+ 1 (sqrt x)))))
(chkcps "(fn () (a 1) b)" '(fn (#:k) (a (fn (#:r) (#:k b)) 1)))
(chkcps "(fn () a (b 1) c)" '(fn (#:k) (b (fn (#:r) (#:k c)) 1)))
(chkcps "(fn () a (b 1) c d)" '(fn (#:k) (b (fn (#:r) (#:k d)) 1)))
(chkcps "(fn () (a 1))" '(fn (#:k) (a (fn (#:r) (#:k #:r)) 1)))
(chkcps "(fn () (a 1) (b 2))"
'(fn (#:k) (a (fn (#:r1) (b (fn (#:r2) (#:k #:r2)) 2)) 1))))
(deftest c-funcall
(chkcps "(+)" '(+))
(chkcps "(a (+))" '(a (fn (#:k) #:k) (+)))
(chkcps "(a (b))" '(b (fn (#:k1) (a (fn (#:k2) #:k2) #:k1))))
(chkcps "(a (+ 1))" '(a (fn (#:k) #:k) (+ 1)))
(chkcps "(a (+ 1) (b 2))" '(b (fn (#:k1)
(a (fn (#:k2) #:k2)
(+ 1)
#:k1))
2))
(chkcps "(+ 1 (a 1 (b 2) 3))"
'(b (fn (#:k1) (a (fn (#:k2) (+ 1 #:k2)) 1 #:k1 3)) 2))
(chkcps "(+ 1 (- 2 (hi (sqrt x))))"
'(hi (fn (#:k)
(+ 1 (- 2 #:k)))
(sqrt x)))
(chkcps "(a (b x))"
'(b (fn (#:r1) (a (fn (#:r2) #:r2) #:r1)) x))
(chkcps "(+ (a x) 2)"
'(a (fn (#:r1) (+ #:r1 2)) x))
(chkcps "(+ (a 1) (b x))"
'(a (fn (#:r1)
(b (fn (#:r2)
(+ #:r1 #:r2))
x))
1))
(chkcps "(* (a (+ (b 1) 1)) 2)"
'(b (fn (#:k1)
(a (fn (#:k2)
(* #:k2 2))
(+ #:k1 1)))
1))
(chkcps "(fn (x f) (f (+ 1 x)))"
'(fn (#:k x f)
(f (fn (#:r1)
(#:k #:r1))
(+ 1 x)))))
(deftest c-if
(chkcps "(if a b)"
'((fn (#:k) (if a (#:k b) (#:k nil)))
(fn (#:r) #:r)))
(chkcps "(if a b c)"
'((fn (#:k) (if a (#:k b) (#:k c)))
(fn (#:r) #:r)))
(chkcps "(if (< a b) c d)"
'((fn (#:k) (if (< a b) (#:k c) (#:k d)))
(fn (#:r) #:r)))
(chkcps "(if (a 1) x y)"
'((fn (#:k) (a (fn (#:a) (if #:a (#:k x) (#:k y))) 1))
(fn (#:r) #:r)))
(chkcps "(if a (x 2) y)"
'((fn (#:k) (if a (x (fn (#:x) (#:k #:x)) 2) (#:k y)))
(fn (#:r) #:r)))
(chkcps "(+ 1 (if a x y))"
'((fn (#:k)
(if a (#:k x) (#:k y)))
(fn (#:r) (+ 1 #:r))))
(chkcps "(* 2 i (if (a 1) x y))"
'((fn (#:k)
(a (fn (#:r1)
(if #:r1 (#:k x) (#:k y)))
1))
(fn (#:x) (* 2 i #:x))))
(chkcps "(if x 0 y 1 (a 0) 2 3)"
'((fn (#:k)
(if x (#:k 0)
y (#:k 1)
(a (fn (#:a)
(if #:a (#:k 2) (#:k 3)))
0)))
(fn (#:r) #:r)))
(chkcps "(if a b c d e)"
'((fn (#:k)
(if a (#:k b)
c (#:k d)
(#:k e)))
(fn (#:r) #:r)))
(chkcps "(if x 0 (a 1) 2 y 3 4)"
'((fn (#:k)
(if x (#:k 0)
(a (fn (#:a)
(if #:a (#:k 2)
y (#:k 3) (#:k 4)))
1)))
(fn (#:r) #:r)))
(chkcps "(if (a 1) x (b 2) y z)"
'((fn (#:k)
(a (fn (#:a)
(if #:a (#:k x)
(b (fn (#:b)
(if #:b (#:k y) (#:k z)))
2)))
1))
(fn (#:r) #:r))))
(deftest c-set
(chkcps "(set a 1)"
'(set a 1))
(chkcps "(set a 1 b 2)"
'(:do (set a 1) (set b 2)))
(chkcps "(+ 1 (set a (b 2)))"
'(b (fn (#:b) (+ 1 (set a #:b))) 2))
(chkcps "(set a 1 b (c 2))"
'(:do (set a 1) (c (fn (#:c) (set b #:c)) 2)))
(chkcps "(+ 1 (set a 1 b (c 2)))"
'(:do (set a 1) (c (fn (#:c) (+ 1 (set b #:c))) 2))))
(deftest c-complex
(chkcps "(set a (if (b 1) 5 10))"
'((fn (#:i)
(b (fn (#:b)
(if #:b (#:i 5) (#:i 10)))
1))
(fn (#:r) (set a #:r))))
(chkcps "((fn ()
(if a b c)
(set x 10)))"
'((fn (#:k)
((fn (#:i) (if a (#:i b) (#:i c)))
(fn (#:n) (#:k (set x 10)))))
(fn (#:r) #:r))))
| null | https://raw.githubusercontent.com/pauek/arc-sbcl/c1a5be2a55b9b3e1327409f71d9bc985577198d6/test/cps.lisp | lisp |
(in-package :arc/test)
(deftest t-equal/cps
(chk (%equal/cps '(a b) '(a b)))
(chk (%equal/cps '(#\a 1) '(#\a 1)))
(chk (%equal/cps '(#:k) '(#:z)))
(chk (not (%equal/cps '(#:i #:i) '(#:a #:b))))
(chk (not (%equal/cps '(#:i #:j) '(#:a #:a))))
(chk (%equal/cps '(fn (#:k1) a b c) '(fn (#:k2) a b c)))
(chk (%equal/cps '(#:k2 #:k3 1 #\a "b") '(#:a2 #:a3 1 #\a "b")))
(chk (not (%equal/cps '(#:k2 #:k3 1 #\a "b") '(#:a3 #:a3 1 #\a "b")))))
(deftest c-simple
(chkcps "a" 'a)
(chkcps "#\\a" #\a)
(chkcps "\"hi\"" "hi")
(chkcps "(+ 1 x)" '(+ 1 x))
(chkcps "(sqrt (+ 1 x))" '(sqrt (+ 1 x)))
(chkcps "(+ '(1 2) '(3 4))" '(+ '(1 2) '(3 4))))
(deftest c-backq
(chkcps "`(,a)" '`(,a))
(chkcps "(fn (a) `(,a))" '(fn (#:k a) (#:k `(,a))))
(chkcps "(fn (a b) `(1 ,a ,@b))" '(fn (#:k a b) (#:k `(1 ,a ,@b))))
(chkcps "(fn (x) `(a ,(b x)))" '(fn (#:k x)
(b (fn (#:b)
(#:k `(a ,#:b)))
x))))
(deftest c-block
(chkcps "(fn () a)" '(fn (#:k) (#:k a)))
(chkcps "(fn () a b)" '(fn (#:k) (#:k b)))
(chkcps "(fn () a b c)" '(fn (#:k) (#:k c)))
(chkcps "(fn (x) (+ 1 (sqrt x)))" '(fn (#:k x) (#:k (+ 1 (sqrt x)))))
(chkcps "(fn () (a 1) b)" '(fn (#:k) (a (fn (#:r) (#:k b)) 1)))
(chkcps "(fn () a (b 1) c)" '(fn (#:k) (b (fn (#:r) (#:k c)) 1)))
(chkcps "(fn () a (b 1) c d)" '(fn (#:k) (b (fn (#:r) (#:k d)) 1)))
(chkcps "(fn () (a 1))" '(fn (#:k) (a (fn (#:r) (#:k #:r)) 1)))
(chkcps "(fn () (a 1) (b 2))"
'(fn (#:k) (a (fn (#:r1) (b (fn (#:r2) (#:k #:r2)) 2)) 1))))
(deftest c-funcall
(chkcps "(+)" '(+))
(chkcps "(a (+))" '(a (fn (#:k) #:k) (+)))
(chkcps "(a (b))" '(b (fn (#:k1) (a (fn (#:k2) #:k2) #:k1))))
(chkcps "(a (+ 1))" '(a (fn (#:k) #:k) (+ 1)))
(chkcps "(a (+ 1) (b 2))" '(b (fn (#:k1)
(a (fn (#:k2) #:k2)
(+ 1)
#:k1))
2))
(chkcps "(+ 1 (a 1 (b 2) 3))"
'(b (fn (#:k1) (a (fn (#:k2) (+ 1 #:k2)) 1 #:k1 3)) 2))
(chkcps "(+ 1 (- 2 (hi (sqrt x))))"
'(hi (fn (#:k)
(+ 1 (- 2 #:k)))
(sqrt x)))
(chkcps "(a (b x))"
'(b (fn (#:r1) (a (fn (#:r2) #:r2) #:r1)) x))
(chkcps "(+ (a x) 2)"
'(a (fn (#:r1) (+ #:r1 2)) x))
(chkcps "(+ (a 1) (b x))"
'(a (fn (#:r1)
(b (fn (#:r2)
(+ #:r1 #:r2))
x))
1))
(chkcps "(* (a (+ (b 1) 1)) 2)"
'(b (fn (#:k1)
(a (fn (#:k2)
(* #:k2 2))
(+ #:k1 1)))
1))
(chkcps "(fn (x f) (f (+ 1 x)))"
'(fn (#:k x f)
(f (fn (#:r1)
(#:k #:r1))
(+ 1 x)))))
(deftest c-if
(chkcps "(if a b)"
'((fn (#:k) (if a (#:k b) (#:k nil)))
(fn (#:r) #:r)))
(chkcps "(if a b c)"
'((fn (#:k) (if a (#:k b) (#:k c)))
(fn (#:r) #:r)))
(chkcps "(if (< a b) c d)"
'((fn (#:k) (if (< a b) (#:k c) (#:k d)))
(fn (#:r) #:r)))
(chkcps "(if (a 1) x y)"
'((fn (#:k) (a (fn (#:a) (if #:a (#:k x) (#:k y))) 1))
(fn (#:r) #:r)))
(chkcps "(if a (x 2) y)"
'((fn (#:k) (if a (x (fn (#:x) (#:k #:x)) 2) (#:k y)))
(fn (#:r) #:r)))
(chkcps "(+ 1 (if a x y))"
'((fn (#:k)
(if a (#:k x) (#:k y)))
(fn (#:r) (+ 1 #:r))))
(chkcps "(* 2 i (if (a 1) x y))"
'((fn (#:k)
(a (fn (#:r1)
(if #:r1 (#:k x) (#:k y)))
1))
(fn (#:x) (* 2 i #:x))))
(chkcps "(if x 0 y 1 (a 0) 2 3)"
'((fn (#:k)
(if x (#:k 0)
y (#:k 1)
(a (fn (#:a)
(if #:a (#:k 2) (#:k 3)))
0)))
(fn (#:r) #:r)))
(chkcps "(if a b c d e)"
'((fn (#:k)
(if a (#:k b)
c (#:k d)
(#:k e)))
(fn (#:r) #:r)))
(chkcps "(if x 0 (a 1) 2 y 3 4)"
'((fn (#:k)
(if x (#:k 0)
(a (fn (#:a)
(if #:a (#:k 2)
y (#:k 3) (#:k 4)))
1)))
(fn (#:r) #:r)))
(chkcps "(if (a 1) x (b 2) y z)"
'((fn (#:k)
(a (fn (#:a)
(if #:a (#:k x)
(b (fn (#:b)
(if #:b (#:k y) (#:k z)))
2)))
1))
(fn (#:r) #:r))))
(deftest c-set
(chkcps "(set a 1)"
'(set a 1))
(chkcps "(set a 1 b 2)"
'(:do (set a 1) (set b 2)))
(chkcps "(+ 1 (set a (b 2)))"
'(b (fn (#:b) (+ 1 (set a #:b))) 2))
(chkcps "(set a 1 b (c 2))"
'(:do (set a 1) (c (fn (#:c) (set b #:c)) 2)))
(chkcps "(+ 1 (set a 1 b (c 2)))"
'(:do (set a 1) (c (fn (#:c) (+ 1 (set b #:c))) 2))))
(deftest c-complex
(chkcps "(set a (if (b 1) 5 10))"
'((fn (#:i)
(b (fn (#:b)
(if #:b (#:i 5) (#:i 10)))
1))
(fn (#:r) (set a #:r))))
(chkcps "((fn ()
(if a b c)
(set x 10)))"
'((fn (#:k)
((fn (#:i) (if a (#:i b) (#:i c)))
(fn (#:n) (#:k (set x 10)))))
(fn (#:r) #:r))))
|
|
ed035fedaf1c77a84ef63d292cb509a9c9c872f2b09eaafe0ba80b4727b068d0 | atdixon/thurber | user_score_opt.clj | (ns game.user-score-opt
(:require [thurber :as th]
[clojure.tools.logging :as log]
[deercreeklabs.lancaster :as lan])
(:import (org.apache.beam.sdk.io TextIO)
(org.apache.beam.sdk.values KV)
(org.apache.beam.sdk.transforms Sum)
(thurber.java TCoder)
(org.apache.beam.sdk.coders CustomCoder KvCoder StringUtf8Coder VarIntCoder AvroCoder)
(java.io OutputStream InputStream)
(java.nio ByteBuffer)
(org.apache.avro Schema Schema$Parser)))
;;
;; Optimization #1: To minimize payload of message bytes (and therefore storage demands for the batch or
streaming job , we will use Avro for ser / de instead of Nippy .
;;
Nippy will happily ser / de defrecord types ; each message payload will contain the full classname
of the defrecord as overhead . In many cases , especially for even types with many fields , simply using
nippy plus defrecord will be a sufficient optimization .
;;
For payload with few fields a simple Clojure map will beat the defrecord serialization .
;;
Avro is the most optimal choice as neither field names nor a type name needs to be encoded
;; in each payload.
;;
(defrecord GameActionInfo [user team score timestamp])
(lan/def-record-schema game-action-info-schema
[:user lan/string-schema]
[:team lan/string-schema]
[:score lan/int-schema]
[:timestamp lan/long-schema])
(def ^:private game-action-info-coder-impl
(proxy [CustomCoder] []
(encode [val ^OutputStream out]
(let [^bytes record-bytes (lan/serialize game-action-info-schema val)
size (count record-bytes)]
(.write out (-> (ByteBuffer/allocate 4) (.putInt size) (.array)))
(.write out record-bytes)))
(decode [^InputStream in]
(let [^bytes size-bytes (byte-array 4)
_ (.read in size-bytes)
size (.getInt (ByteBuffer/wrap size-bytes))
^bytes record-bytes (byte-array size)
_ (.read in record-bytes)]
(lan/deserialize-same game-action-info-schema record-bytes)))))
(def game-action-info-coder
(TCoder. #'game-action-info-coder-impl))
(defn- ^{:th/coder game-action-info-coder} parse-event [^String elem]
(try
;; Optimization #2: Use low-level primitive array operations on a type-hinted array to avoid
overhead with Clojure 's polymorphic suboptimal aget , etc .
(let [^"[Ljava.lang.Object;" parts (.split elem "," -1)]
(if (>= (alength parts) 4)
(->GameActionInfo
;; Optimization #2/a: clojure.core/aget here needs the array type hint above to pick the optimal
;; primitive invocation!
(.trim ^String (aget parts 0))
(.trim ^String (aget parts 1))
(Integer/parseInt (.trim ^String (aget parts 2)))
(Long/parseLong (.trim ^String (aget parts 3))))
(log/warnf "parse error on %s, missing part" elem)))
(catch NumberFormatException e
(log/warnf "parse error on %s, %s" elem (.getMessage e)))))
(def ^:private kv-string-int-coder
(KvCoder/of (StringUtf8Coder/of) (VarIntCoder/of)))
(defn- ^{:th/coder kv-string-int-coder} ->field-and-score-kv [field elem]
(KV/of (field elem) (:score elem)))
(defn ->extract-sum-and-score-xf [field]
(th/compose "extract-sum-and-score"
(th/partial #'->field-and-score-kv field)
(Sum/integersPerKey)))
(defn- ->write-to-text-xf [output row-formatter]
(th/compose "write-to-text"
row-formatter
(-> (TextIO/write)
(.to ^String output))))
(defn- create-pipeline [opts]
(let [pipeline (th/create-pipeline opts)
conf (th/get-custom-config pipeline)]
(doto pipeline
(th/apply!
(-> (TextIO/read)
(.from ^String (:input conf)))
#'parse-event
(->extract-sum-and-score-xf :user)
(->write-to-text-xf (:output conf)
Optimization # 3 : Use explicit String coder where we know we have ,
;; instead of default nippy coder.
(th/fn* ^{:th/coder (StringUtf8Coder/of)} format-row [^KV kv]
(format "user: %s, total_score: %d" (.getKey kv) (.getValue kv))))))))
(defn demo! [& args]
(-> (create-pipeline
(concat
args
(th/->beam-args
{:custom-config
{:input "gs-beam-samples/game/gaming_data*.csv"
:output "gs-demo/user-score-opt-"}})))
(.run)))
| null | https://raw.githubusercontent.com/atdixon/thurber/38b86a683e446f2deed55fad157476ae05940d50/demo/game/user_score_opt.clj | clojure |
Optimization #1: To minimize payload of message bytes (and therefore storage demands for the batch or
each message payload will contain the full classname
in each payload.
Optimization #2: Use low-level primitive array operations on a type-hinted array to avoid
Optimization #2/a: clojure.core/aget here needs the array type hint above to pick the optimal
primitive invocation!
instead of default nippy coder. | (ns game.user-score-opt
(:require [thurber :as th]
[clojure.tools.logging :as log]
[deercreeklabs.lancaster :as lan])
(:import (org.apache.beam.sdk.io TextIO)
(org.apache.beam.sdk.values KV)
(org.apache.beam.sdk.transforms Sum)
(thurber.java TCoder)
(org.apache.beam.sdk.coders CustomCoder KvCoder StringUtf8Coder VarIntCoder AvroCoder)
(java.io OutputStream InputStream)
(java.nio ByteBuffer)
(org.apache.avro Schema Schema$Parser)))
streaming job , we will use Avro for ser / de instead of Nippy .
of the defrecord as overhead . In many cases , especially for even types with many fields , simply using
nippy plus defrecord will be a sufficient optimization .
For payload with few fields a simple Clojure map will beat the defrecord serialization .
Avro is the most optimal choice as neither field names nor a type name needs to be encoded
(defrecord GameActionInfo [user team score timestamp])
(lan/def-record-schema game-action-info-schema
[:user lan/string-schema]
[:team lan/string-schema]
[:score lan/int-schema]
[:timestamp lan/long-schema])
(def ^:private game-action-info-coder-impl
(proxy [CustomCoder] []
(encode [val ^OutputStream out]
(let [^bytes record-bytes (lan/serialize game-action-info-schema val)
size (count record-bytes)]
(.write out (-> (ByteBuffer/allocate 4) (.putInt size) (.array)))
(.write out record-bytes)))
(decode [^InputStream in]
(let [^bytes size-bytes (byte-array 4)
_ (.read in size-bytes)
size (.getInt (ByteBuffer/wrap size-bytes))
^bytes record-bytes (byte-array size)
_ (.read in record-bytes)]
(lan/deserialize-same game-action-info-schema record-bytes)))))
(def game-action-info-coder
(TCoder. #'game-action-info-coder-impl))
(defn- ^{:th/coder game-action-info-coder} parse-event [^String elem]
(try
overhead with Clojure 's polymorphic suboptimal aget , etc .
(let [^"[Ljava.lang.Object;" parts (.split elem "," -1)]
(if (>= (alength parts) 4)
(->GameActionInfo
(.trim ^String (aget parts 0))
(.trim ^String (aget parts 1))
(Integer/parseInt (.trim ^String (aget parts 2)))
(Long/parseLong (.trim ^String (aget parts 3))))
(log/warnf "parse error on %s, missing part" elem)))
(catch NumberFormatException e
(log/warnf "parse error on %s, %s" elem (.getMessage e)))))
(def ^:private kv-string-int-coder
(KvCoder/of (StringUtf8Coder/of) (VarIntCoder/of)))
(defn- ^{:th/coder kv-string-int-coder} ->field-and-score-kv [field elem]
(KV/of (field elem) (:score elem)))
(defn ->extract-sum-and-score-xf [field]
(th/compose "extract-sum-and-score"
(th/partial #'->field-and-score-kv field)
(Sum/integersPerKey)))
(defn- ->write-to-text-xf [output row-formatter]
(th/compose "write-to-text"
row-formatter
(-> (TextIO/write)
(.to ^String output))))
(defn- create-pipeline [opts]
(let [pipeline (th/create-pipeline opts)
conf (th/get-custom-config pipeline)]
(doto pipeline
(th/apply!
(-> (TextIO/read)
(.from ^String (:input conf)))
#'parse-event
(->extract-sum-and-score-xf :user)
(->write-to-text-xf (:output conf)
Optimization # 3 : Use explicit String coder where we know we have ,
(th/fn* ^{:th/coder (StringUtf8Coder/of)} format-row [^KV kv]
(format "user: %s, total_score: %d" (.getKey kv) (.getValue kv))))))))
(defn demo! [& args]
(-> (create-pipeline
(concat
args
(th/->beam-args
{:custom-config
{:input "gs-beam-samples/game/gaming_data*.csv"
:output "gs-demo/user-score-opt-"}})))
(.run)))
|
c8f474af613ae3791124845ba3350a9cd9ee65eaabcd4ac705c573c401a61e0d | heraldry/heraldicon | charge_group.cljs | (ns heraldicon.frontend.component.charge-group
(:require
[heraldicon.context :as c]
[heraldicon.frontend.component.core :as component]
[heraldicon.frontend.component.element :as component.element]
[heraldicon.frontend.component.tree :as tree]
[heraldicon.frontend.element.charge-group-preset-select :as charge-group-preset-select]
[heraldicon.frontend.element.core :as element]
[heraldicon.frontend.element.submenu :as submenu]
[heraldicon.frontend.js-event :as js-event]
[heraldicon.frontend.language :refer [tr]]
[heraldicon.frontend.macros :as macros]
[heraldicon.frontend.tooltip :as tooltip]
[heraldicon.heraldry.charge-group.core :as charge-group]
[heraldicon.heraldry.charge.options :as charge.options]
[heraldicon.heraldry.default :as default]
[heraldicon.heraldry.tincture :as tincture]
[heraldicon.interface :as interface]
[heraldicon.localization.string :as string]
[heraldicon.math.vector :as v]
[heraldicon.static :as static]
[re-frame.core :as rf]))
(macros/reg-event-db ::cycle-charge-index
(fn [db [_ path num-charges]]
(let [slots-path (drop-last path)
slot-index (last path)
slots (get-in db slots-path)
current-value (get-in db path)
new-value (cond
(nil? current-value) 0
(= current-value (dec num-charges)) nil
(> current-value (dec num-charges)) 0
:else (inc current-value))]
(assoc-in db slots-path (assoc slots slot-index new-value)))))
(macros/reg-event-db ::remove-charge
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(vec (concat (subvec elements 0 index)
(subvec elements (inc index))))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) 0
(> charge-index index) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) 0
(> charge-index index) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index nil)))))
(macros/reg-event-db ::move-charge-up
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(let [num-elements (count elements)]
(if (>= index num-elements)
elements
(-> elements
(subvec 0 index)
(conj (get elements (inc index)))
(conj (get elements index))
(concat (subvec elements (+ index 2)))
vec)))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) (inc charge-index)
(= charge-index (inc index)) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) (inc charge-index)
(= charge-index (inc index)) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index (inc index))))))
(macros/reg-event-db ::add-strip
(fn [db [_ {:keys [path]} value]]
(let [elements (-> (get-in db path)
(conj value)
vec)]
(assoc-in db path elements))))
(macros/reg-event-db ::move-charge-down
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(if (zero? index)
elements
(-> elements
(subvec 0 (dec index))
(conj (get elements index))
(conj (get elements (dec index)))
(concat (subvec elements (inc index)))
vec))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index (dec index)) (inc charge-index)
(= charge-index index) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index (dec index)) (inc charge-index)
(= charge-index index) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index (dec index))))))
(def ^:private preview-tinctures
[:azure :or :vert :gules :purpure :sable])
(defn- preview-form [context]
(let [{:keys [slot-positions
slot-spacing]} (charge-group/calculate-points
(-> context
(c/set-key :parent-environment-override {:width 200
:height 200})
(c/set-key :parent-shape "M-100,-100 h200 v200 h-200 z")))
num-charges (interface/get-list-size (c/++ context :charges))
dot-size (/ (min (:width slot-spacing)
(:height slot-spacing))
2
1.05)]
[:div
[:svg {:style {:width "10em"
:height "10em"}
:viewBox "0 0 200 200"
:preserveAspectRatio "xMidYMin meet"}
[:g
[:rect {:x 0
:y 0
:width 200
:height 200
:style {:stroke "#000"
:fill "none"}}]
(into [:g {:transform "translate(100,100)"}]
(map-indexed (fn [idx {:keys [point charge-index slot-path]}]
(let [color (if (nil? charge-index)
"#fff"
(-> charge-index
(mod (count preview-tinctures))
(->> (get preview-tinctures))
(tincture/pick context)))]
^{:key idx}
[:g {:transform (str "translate(" (v/->str point) ")")
:on-click (js-event/handled
#(rf/dispatch [::cycle-charge-index slot-path num-charges]))
:style {:cursor "pointer"}}
[:circle {:r dot-size
:style {:stroke "#000"
:stroke-width 0.5
:fill color}}]
(when (>= charge-index (count preview-tinctures))
[:circle {:r (* 2 (quot charge-index (count preview-tinctures)))
:style {:stroke "#000"
:stroke-width 0.5
:fill "#fff"}}])])))
slot-positions)]]
[tooltip/info :string.tooltip/charge-group-preview]]))
(defn- strip-form [context type-str]
(let [num-slots (interface/get-list-size (c/++ context :slots))
stretch (interface/get-sanitized-data (c/++ context :stretch))
offset (interface/get-sanitized-data (c/++ context :offset))
title (string/combine
", "
[(string/str-tr num-slots
" " (if (= num-slots 1)
:string.submenu-summary/slot
:string.submenu-summary/slots))
(when-not (= stretch 1)
:string.submenu-summary/stretched)
(when-not (zero? offset)
:string.submenu-summary/shifted)])]
[:div {:style {:position "relative"}}
[submenu/submenu context type-str [tr title] {:style {:width "20em"}
:class "submenu-strip-form"}
(element/elements
context
[:slots
:stretch
:offset])]]))
(defn- form [context]
(let [charge-group-type (interface/get-raw-data (c/++ context :type))
strip-type? (#{:heraldry.charge-group.type/rows
:heraldry.charge-group.type/columns}
charge-group-type)
type-str (case charge-group-type
:heraldry.charge-group.type/rows :string.option/row
:heraldry.charge-group.type/columns :string.option/column
nil)
type-plural-str (case charge-group-type
:heraldry.charge-group.type/rows :string.charge-group.type/rows
:heraldry.charge-group.type/columns :string.charge-group.type/columns
nil)]
[:div {:style {:display "table-cell"
:vertical-align "top"}}
[element/element (c/++ context :adapt-to-ordinaries?)]
[charge-group-preset-select/charge-group-preset-select context]
[element/element (c/++ context :anchor)]
[preview-form context]
(element/elements
context
[:type
:spacing
:stretch
:strip-angle
:radius
:arc-angle
:start-angle
:arc-stretch
:distance
:offset
:rotate-charges?
:slots])
(when strip-type?
(let [strips-context (c/++ context :strips)
num-strips (interface/get-list-size strips-context)]
[:div.ui-setting
[:label [tr type-plural-str]
" "
[:button {:on-click (js-event/handled
#(rf/dispatch [::add-strip
strips-context default/charge-group-strip]))}
[:i.fas.fa-plus] " " [tr :string.button/add]]]
[:div.option.charge-group-strips
(into [:ul]
(map (fn [idx]
(let [strip-context (c/++ strips-context idx)]
^{:key idx}
[:li
[:div.no-select {:style {:padding-right "10px"
:white-space "nowrap"}}
[:a (if (zero? idx)
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/move strip-context (dec idx)]))})
[:i.fas.fa-chevron-up]]
" "
[:a (if (= idx (dec num-strips))
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/move strip-context (inc idx)]))})
[:i.fas.fa-chevron-down]]]
[:div
[strip-form strip-context type-str]]
[:div {:style {:padding-left "10px"}}
[:a (if (< num-strips 2)
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/remove strip-context]))})
[:i.far.fa-trash-alt]]]])))
(range num-strips))]]))
[element/element (c/++ context :manual-blazon)]]))
(defmethod component/node :heraldry/charge-group [context]
(let [charges-context (c/++ context :charges)
num-charges (interface/get-list-size charges-context)]
{:title (string/str-tr :string.charge-group/charge-group-of " " (if (= num-charges 1)
(charge.options/title (c/++ context :charges 0))
:string.charge-group/various))
:icon {:default (static/static-url
(str "/svg/charge-group-preset-three.svg"))
:selected (static/static-url
(str "/svg/charge-group-preset-three-selected.svg"))}
:buttons [{:icon "fas fa-plus"
:title :string.button/add
:menu [{:title :string.entity/charge
:handler #(rf/dispatch [::component.element/add charges-context default/charge])}]}]
:nodes (concat (->> (range num-charges)
(map (fn [idx]
(let [charge-context (c/++ charges-context idx)]
{:context charge-context
:buttons [{:icon "fas fa-chevron-up"
:disabled? (zero? idx)
:title :string.tooltip/move-down
:handler #(rf/dispatch [::move-charge-down charge-context])}
{:icon "fas fa-chevron-down"
:disabled? (= idx (dec num-charges))
:title :string.tooltip/move-up
:handler #(rf/dispatch [::move-charge-up charge-context])}
{:icon "far fa-trash-alt"
:disabled? (= num-charges 1)
:title :string.tooltip/remove
:handler #(rf/dispatch [::remove-charge charge-context])}]})))
vec))}))
(defmethod component/form :heraldry/charge-group [_context]
form)
| null | https://raw.githubusercontent.com/heraldry/heraldicon/71126cfd7ba342dea0d1bb849be92cbe5290a6fd/src/heraldicon/frontend/component/charge_group.cljs | clojure | (ns heraldicon.frontend.component.charge-group
(:require
[heraldicon.context :as c]
[heraldicon.frontend.component.core :as component]
[heraldicon.frontend.component.element :as component.element]
[heraldicon.frontend.component.tree :as tree]
[heraldicon.frontend.element.charge-group-preset-select :as charge-group-preset-select]
[heraldicon.frontend.element.core :as element]
[heraldicon.frontend.element.submenu :as submenu]
[heraldicon.frontend.js-event :as js-event]
[heraldicon.frontend.language :refer [tr]]
[heraldicon.frontend.macros :as macros]
[heraldicon.frontend.tooltip :as tooltip]
[heraldicon.heraldry.charge-group.core :as charge-group]
[heraldicon.heraldry.charge.options :as charge.options]
[heraldicon.heraldry.default :as default]
[heraldicon.heraldry.tincture :as tincture]
[heraldicon.interface :as interface]
[heraldicon.localization.string :as string]
[heraldicon.math.vector :as v]
[heraldicon.static :as static]
[re-frame.core :as rf]))
(macros/reg-event-db ::cycle-charge-index
(fn [db [_ path num-charges]]
(let [slots-path (drop-last path)
slot-index (last path)
slots (get-in db slots-path)
current-value (get-in db path)
new-value (cond
(nil? current-value) 0
(= current-value (dec num-charges)) nil
(> current-value (dec num-charges)) 0
:else (inc current-value))]
(assoc-in db slots-path (assoc slots slot-index new-value)))))
(macros/reg-event-db ::remove-charge
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(vec (concat (subvec elements 0 index)
(subvec elements (inc index))))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) 0
(> charge-index index) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) 0
(> charge-index index) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index nil)))))
(macros/reg-event-db ::move-charge-up
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(let [num-elements (count elements)]
(if (>= index num-elements)
elements
(-> elements
(subvec 0 index)
(conj (get elements (inc index)))
(conj (get elements index))
(concat (subvec elements (+ index 2)))
vec)))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) (inc charge-index)
(= charge-index (inc index)) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index index) (inc charge-index)
(= charge-index (inc index)) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index (inc index))))))
(macros/reg-event-db ::add-strip
(fn [db [_ {:keys [path]} value]]
(let [elements (-> (get-in db path)
(conj value)
vec)]
(assoc-in db path elements))))
(macros/reg-event-db ::move-charge-down
(fn [db [_ {:keys [path]}]]
(let [elements-path (drop-last path)
strips-context (-> path
(->> (drop-last 2))
vec
(conj :strips))
slots-path (-> path
(->> (drop-last 2))
vec
(conj :slots))
index (last path)]
(-> db
(update-in elements-path (fn [elements]
(if (zero? index)
elements
(-> elements
(subvec 0 (dec index))
(conj (get elements index))
(conj (get elements (dec index)))
(concat (subvec elements (inc index)))
vec))))
(update-in strips-context (fn [strips]
(mapv (fn [strip]
(update strip :slots (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index (dec index)) (inc charge-index)
(= charge-index index) (dec charge-index)
:else charge-index))
slots))))
strips)))
(update-in slots-path (fn [slots]
(mapv (fn [charge-index]
(cond
(= charge-index (dec index)) (inc charge-index)
(= charge-index index) (dec charge-index)
:else charge-index))
slots)))
(tree/element-order-changed elements-path index (dec index))))))
(def ^:private preview-tinctures
[:azure :or :vert :gules :purpure :sable])
(defn- preview-form [context]
(let [{:keys [slot-positions
slot-spacing]} (charge-group/calculate-points
(-> context
(c/set-key :parent-environment-override {:width 200
:height 200})
(c/set-key :parent-shape "M-100,-100 h200 v200 h-200 z")))
num-charges (interface/get-list-size (c/++ context :charges))
dot-size (/ (min (:width slot-spacing)
(:height slot-spacing))
2
1.05)]
[:div
[:svg {:style {:width "10em"
:height "10em"}
:viewBox "0 0 200 200"
:preserveAspectRatio "xMidYMin meet"}
[:g
[:rect {:x 0
:y 0
:width 200
:height 200
:style {:stroke "#000"
:fill "none"}}]
(into [:g {:transform "translate(100,100)"}]
(map-indexed (fn [idx {:keys [point charge-index slot-path]}]
(let [color (if (nil? charge-index)
"#fff"
(-> charge-index
(mod (count preview-tinctures))
(->> (get preview-tinctures))
(tincture/pick context)))]
^{:key idx}
[:g {:transform (str "translate(" (v/->str point) ")")
:on-click (js-event/handled
#(rf/dispatch [::cycle-charge-index slot-path num-charges]))
:style {:cursor "pointer"}}
[:circle {:r dot-size
:style {:stroke "#000"
:stroke-width 0.5
:fill color}}]
(when (>= charge-index (count preview-tinctures))
[:circle {:r (* 2 (quot charge-index (count preview-tinctures)))
:style {:stroke "#000"
:stroke-width 0.5
:fill "#fff"}}])])))
slot-positions)]]
[tooltip/info :string.tooltip/charge-group-preview]]))
(defn- strip-form [context type-str]
(let [num-slots (interface/get-list-size (c/++ context :slots))
stretch (interface/get-sanitized-data (c/++ context :stretch))
offset (interface/get-sanitized-data (c/++ context :offset))
title (string/combine
", "
[(string/str-tr num-slots
" " (if (= num-slots 1)
:string.submenu-summary/slot
:string.submenu-summary/slots))
(when-not (= stretch 1)
:string.submenu-summary/stretched)
(when-not (zero? offset)
:string.submenu-summary/shifted)])]
[:div {:style {:position "relative"}}
[submenu/submenu context type-str [tr title] {:style {:width "20em"}
:class "submenu-strip-form"}
(element/elements
context
[:slots
:stretch
:offset])]]))
(defn- form [context]
(let [charge-group-type (interface/get-raw-data (c/++ context :type))
strip-type? (#{:heraldry.charge-group.type/rows
:heraldry.charge-group.type/columns}
charge-group-type)
type-str (case charge-group-type
:heraldry.charge-group.type/rows :string.option/row
:heraldry.charge-group.type/columns :string.option/column
nil)
type-plural-str (case charge-group-type
:heraldry.charge-group.type/rows :string.charge-group.type/rows
:heraldry.charge-group.type/columns :string.charge-group.type/columns
nil)]
[:div {:style {:display "table-cell"
:vertical-align "top"}}
[element/element (c/++ context :adapt-to-ordinaries?)]
[charge-group-preset-select/charge-group-preset-select context]
[element/element (c/++ context :anchor)]
[preview-form context]
(element/elements
context
[:type
:spacing
:stretch
:strip-angle
:radius
:arc-angle
:start-angle
:arc-stretch
:distance
:offset
:rotate-charges?
:slots])
(when strip-type?
(let [strips-context (c/++ context :strips)
num-strips (interface/get-list-size strips-context)]
[:div.ui-setting
[:label [tr type-plural-str]
" "
[:button {:on-click (js-event/handled
#(rf/dispatch [::add-strip
strips-context default/charge-group-strip]))}
[:i.fas.fa-plus] " " [tr :string.button/add]]]
[:div.option.charge-group-strips
(into [:ul]
(map (fn [idx]
(let [strip-context (c/++ strips-context idx)]
^{:key idx}
[:li
[:div.no-select {:style {:padding-right "10px"
:white-space "nowrap"}}
[:a (if (zero? idx)
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/move strip-context (dec idx)]))})
[:i.fas.fa-chevron-up]]
" "
[:a (if (= idx (dec num-strips))
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/move strip-context (inc idx)]))})
[:i.fas.fa-chevron-down]]]
[:div
[strip-form strip-context type-str]]
[:div {:style {:padding-left "10px"}}
[:a (if (< num-strips 2)
{:class "disabled"}
{:on-click (js-event/handled
#(rf/dispatch [::component.element/remove strip-context]))})
[:i.far.fa-trash-alt]]]])))
(range num-strips))]]))
[element/element (c/++ context :manual-blazon)]]))
(defmethod component/node :heraldry/charge-group [context]
(let [charges-context (c/++ context :charges)
num-charges (interface/get-list-size charges-context)]
{:title (string/str-tr :string.charge-group/charge-group-of " " (if (= num-charges 1)
(charge.options/title (c/++ context :charges 0))
:string.charge-group/various))
:icon {:default (static/static-url
(str "/svg/charge-group-preset-three.svg"))
:selected (static/static-url
(str "/svg/charge-group-preset-three-selected.svg"))}
:buttons [{:icon "fas fa-plus"
:title :string.button/add
:menu [{:title :string.entity/charge
:handler #(rf/dispatch [::component.element/add charges-context default/charge])}]}]
:nodes (concat (->> (range num-charges)
(map (fn [idx]
(let [charge-context (c/++ charges-context idx)]
{:context charge-context
:buttons [{:icon "fas fa-chevron-up"
:disabled? (zero? idx)
:title :string.tooltip/move-down
:handler #(rf/dispatch [::move-charge-down charge-context])}
{:icon "fas fa-chevron-down"
:disabled? (= idx (dec num-charges))
:title :string.tooltip/move-up
:handler #(rf/dispatch [::move-charge-up charge-context])}
{:icon "far fa-trash-alt"
:disabled? (= num-charges 1)
:title :string.tooltip/remove
:handler #(rf/dispatch [::remove-charge charge-context])}]})))
vec))}))
(defmethod component/form :heraldry/charge-group [_context]
form)
|
|
5e2324a122f79c44d60953b3c9b8c95d579626e213e6e2dbb4188836679eb4c1 | typedclojure/typedclojure | ignore.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns typed.cljc.checker.check.ignore
(:require [typed.cljc.checker.check-below :as below]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.utils :as u]))
(defn tc-ignore-expr [expr expected]
(-> expr
(assoc u/expr-type (below/maybe-check-below
(r/ret r/-any)
expected))))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/e3bbf031b1b109e4b17daf96617fed0d025cdc74/typed/clj.checker/src/typed/cljc/checker/check/ignore.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) , contributors .
(ns typed.cljc.checker.check.ignore
(:require [typed.cljc.checker.check-below :as below]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.utils :as u]))
(defn tc-ignore-expr [expr expected]
(-> expr
(assoc u/expr-type (below/maybe-check-below
(r/ret r/-any)
expected))))
|
bfa18ab717f17009537d3c5597fcc5a1347b42852a83648adde12230fa174f44 | CorticalComputer/Book_NeuroevolutionThroughErlang | cortex.erl | This source code and work is provided and developed by DXNN Research Group WWW.DXNNResearch . COM
%%
Copyright ( C ) 2012 by , DXNN Research Group ,
%All rights reserved.
%
This code is licensed under the version 3 of the GNU General Public License . Please see the LICENSE file that accompanies this project for the terms of use .
-module(cortex).
-compile(export_all).
-include("records.hrl").
-record(state,{id,exoself_pid,spids,npids,apids,cycle_acc=0,fitness_acc=0,endflag=0,status}).
gen(ExoSelf_PId,Node)->
spawn(Node,?MODULE,prep,[ExoSelf_PId]).
prep(ExoSelf_PId) ->
{V1,V2,V3} = now(),
random:seed(V1,V2,V3),
receive
{ExoSelf_PId,Id,SPIds,NPIds,APIds} ->
put(start_time,now()),
[SPId ! {self(),sync} || SPId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{APIds,APIds},NPIds,1,0,0,active)
end.
The gen/2 function spawns the cortex element , which immediately starts to wait for its initial state message from the same process that spawned it , exoself . The initial state message contains the sensor , actuator , and neuron PId lists . Before dropping into the main loop , CycleAcc , FitnessAcc , and HFAcc ( HaltFlag Acc ) , are all set to 0 , and the status of the cortex is set to ac- tive , prompting it to begin the synchronization process and call the sensors to action .
loop(Id,ExoSelf_PId,SPIds,{[APId|APIds],MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,active) ->
receive
{APId,sync,Fitness,EndFlag} ->
loop(Id,ExoSelf_PId,SPIds,{APIds,MAPIds},NPIds,CycleAcc,FitnessAcc+Fitness,EFAcc+EndFlag,active);
terminate ->
io:format("Cortex:~p is terminating.~n",[Id]),
[PId ! {self(),terminate} || PId <- SPIds],
[PId ! {self(),terminate} || PId <- MAPIds],
[PId ! {self(),termiante} || PId <- NPIds]
end;
loop(Id,ExoSelf_PId,SPIds,{[],MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,active)->
case EFAcc > 0 of
true ->
TimeDif=timer:now_diff(now(),get(start_time)),
ExoSelf_PId ! {self(),evaluation_completed,FitnessAcc,CycleAcc,TimeDif},
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,inactive);
false ->
[PId ! {self(),sync} || PId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,CycleAcc+1,FitnessAcc,EFAcc,active)
end;
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,_CycleAcc,_FitnessAcc,_EFAcc,inactive)->
receive
{ExoSelf_PId,reactivate}->
put(start_time,now()),
[SPId ! {self(),sync} || SPId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,1,0,0,active);
{ExoSelf_PId,terminate}->
ok
end.
The cortex ’s goal is to synchronize the NN system ’s sensors and actuators . When the actuators have received all their control signals , they forward the sync messages , the Fitness , and the HaltFlag messages to the cortex . The cortex accumulates these Fitness and HaltFlag signals , and if any of the HaltFlag signals have been set to 1 , HFAcc will be greater than 0 , signifying that the cortex should halt . When EFAcc > 0 , the cortex calculates the total amount of time it has ran ( TimeDiff ) , and forwards to exoself the values : FitnessAcc , CycleAcc , and TimeDiff . Afterwards , the cortex enters the inactive mode and awaits further instructions from the exoself . If none of the HaltFlags were set to 0 , then the value HFAcc = = 0 , and the cortex triggers off another Sense - Think - Act cycle . The reason the cortex process stores 2 copies of the actuator PIds : the APIds , and the MemoryAPIds ( MAPIds ) , is so that once all the actuators have sent it the sync messages , it can restore the APIds list from the MAPIds .
| null | https://raw.githubusercontent.com/CorticalComputer/Book_NeuroevolutionThroughErlang/81b96e3d7985624a6183cb313a7f9bff0a7e14c5/Ch_7/cortex.erl | erlang |
All rights reserved.
| This source code and work is provided and developed by DXNN Research Group WWW.DXNNResearch . COM
Copyright ( C ) 2012 by , DXNN Research Group ,
This code is licensed under the version 3 of the GNU General Public License . Please see the LICENSE file that accompanies this project for the terms of use .
-module(cortex).
-compile(export_all).
-include("records.hrl").
-record(state,{id,exoself_pid,spids,npids,apids,cycle_acc=0,fitness_acc=0,endflag=0,status}).
gen(ExoSelf_PId,Node)->
spawn(Node,?MODULE,prep,[ExoSelf_PId]).
prep(ExoSelf_PId) ->
{V1,V2,V3} = now(),
random:seed(V1,V2,V3),
receive
{ExoSelf_PId,Id,SPIds,NPIds,APIds} ->
put(start_time,now()),
[SPId ! {self(),sync} || SPId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{APIds,APIds},NPIds,1,0,0,active)
end.
The gen/2 function spawns the cortex element , which immediately starts to wait for its initial state message from the same process that spawned it , exoself . The initial state message contains the sensor , actuator , and neuron PId lists . Before dropping into the main loop , CycleAcc , FitnessAcc , and HFAcc ( HaltFlag Acc ) , are all set to 0 , and the status of the cortex is set to ac- tive , prompting it to begin the synchronization process and call the sensors to action .
loop(Id,ExoSelf_PId,SPIds,{[APId|APIds],MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,active) ->
receive
{APId,sync,Fitness,EndFlag} ->
loop(Id,ExoSelf_PId,SPIds,{APIds,MAPIds},NPIds,CycleAcc,FitnessAcc+Fitness,EFAcc+EndFlag,active);
terminate ->
io:format("Cortex:~p is terminating.~n",[Id]),
[PId ! {self(),terminate} || PId <- SPIds],
[PId ! {self(),terminate} || PId <- MAPIds],
[PId ! {self(),termiante} || PId <- NPIds]
end;
loop(Id,ExoSelf_PId,SPIds,{[],MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,active)->
case EFAcc > 0 of
true ->
TimeDif=timer:now_diff(now(),get(start_time)),
ExoSelf_PId ! {self(),evaluation_completed,FitnessAcc,CycleAcc,TimeDif},
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,CycleAcc,FitnessAcc,EFAcc,inactive);
false ->
[PId ! {self(),sync} || PId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,CycleAcc+1,FitnessAcc,EFAcc,active)
end;
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,_CycleAcc,_FitnessAcc,_EFAcc,inactive)->
receive
{ExoSelf_PId,reactivate}->
put(start_time,now()),
[SPId ! {self(),sync} || SPId <- SPIds],
loop(Id,ExoSelf_PId,SPIds,{MAPIds,MAPIds},NPIds,1,0,0,active);
{ExoSelf_PId,terminate}->
ok
end.
The cortex ’s goal is to synchronize the NN system ’s sensors and actuators . When the actuators have received all their control signals , they forward the sync messages , the Fitness , and the HaltFlag messages to the cortex . The cortex accumulates these Fitness and HaltFlag signals , and if any of the HaltFlag signals have been set to 1 , HFAcc will be greater than 0 , signifying that the cortex should halt . When EFAcc > 0 , the cortex calculates the total amount of time it has ran ( TimeDiff ) , and forwards to exoself the values : FitnessAcc , CycleAcc , and TimeDiff . Afterwards , the cortex enters the inactive mode and awaits further instructions from the exoself . If none of the HaltFlags were set to 0 , then the value HFAcc = = 0 , and the cortex triggers off another Sense - Think - Act cycle . The reason the cortex process stores 2 copies of the actuator PIds : the APIds , and the MemoryAPIds ( MAPIds ) , is so that once all the actuators have sent it the sync messages , it can restore the APIds list from the MAPIds .
|
6643ea6496f3ffbf52d3439160d1461d9ef93bccdc848779c37f4046d68f4def | digital-asset/ghc | T13506.hs | # LANGUAGE FlexibleInstances , FunctionalDependencies , MultiParamTypeClasses #
module Bug where
class FunDep lista a | lista -> a
instance FunDep [a] a
singleton :: FunDep lista a => a -> lista
singleton _ = undefined
-- this error is expected:
Could n't match type ' ' with ' ( ) '
-- arising from a functional dependency between
constraint ' FunDep [ ] ( ) ' arising from a use of ' singleton '
instance ' FunDep [ a ] a '
illTyped :: [Char]
illTyped = singleton ()
[ W ] FunDep [ ] ( )
-- but this one is not:
Could n't match type ' ( ) ' with ' '
-- arising from a functional dependency between constraints:
' FunDep [ Char ] ' arising from a use of ' singleton ' ( in ' wellTyped ' )
' FunDep [ ] ( ) ' arising from a use of ' singleton ' ( in ' ' )
wellTyped :: [Char]
wellTyped = singleton 'a'
[ W ] FunDep [ Char ]
| null | https://raw.githubusercontent.com/digital-asset/ghc/323dc6fcb127f77c08423873efc0a088c071440a/testsuite/tests/typecheck/should_fail/T13506.hs | haskell | this error is expected:
arising from a functional dependency between
but this one is not:
arising from a functional dependency between constraints: | # LANGUAGE FlexibleInstances , FunctionalDependencies , MultiParamTypeClasses #
module Bug where
class FunDep lista a | lista -> a
instance FunDep [a] a
singleton :: FunDep lista a => a -> lista
singleton _ = undefined
Could n't match type ' ' with ' ( ) '
constraint ' FunDep [ ] ( ) ' arising from a use of ' singleton '
instance ' FunDep [ a ] a '
illTyped :: [Char]
illTyped = singleton ()
[ W ] FunDep [ ] ( )
Could n't match type ' ( ) ' with ' '
' FunDep [ Char ] ' arising from a use of ' singleton ' ( in ' wellTyped ' )
' FunDep [ ] ( ) ' arising from a use of ' singleton ' ( in ' ' )
wellTyped :: [Char]
wellTyped = singleton 'a'
[ W ] FunDep [ Char ]
|
01842220a7bcb9657cc200d69d558a436b907b98421b7314227d33a8467d2a87 | fabricate-site/fabricate | html_test.clj | (ns site.fabricate.prototype.html-test
(:require [site.fabricate.prototype.html :as html :refer :all]
[site.fabricate.prototype.schema :as schema]
[malli.core :as m :refer [validate]]
[malli.error :as me]
[malli.generator :as mg]
[clojure.set :as set]
[clojure.pprint :as pprint]
[clojure.test :as t]))
(defmethod t/assert-expr 'valid-schema? [msg form]
`(let [schema# ~(nth form 1)
form# (m/form schema#)
data# ~(nth form 2)
result# (m/validate schema# data#)
schema-name# (last form#)]
(t/do-report
{:type (if result# :pass :fail)
:message ~msg
:expected (str (with-out-str (pprint/pprint data#))
" conforms to schema for "
schema-name#)
:actual (if (not result#)
(m/explain schema# data#)
result#)})
result#))
(def example-forms
"Some forms used to test the validity of the HTML schema"
{:a [:a {:href ""} "a link"]
:data [:data {:value "0311ab"} "A sample post"]
:del [:del "some deleted text"]
:dl [:dl {:id "definitions"}
[:dt ":dl - Definition List"]
[:dd "A HTML element with a list of definitions"]]
:figure [:figure [:figcaption "a picture"]
[:img {:src "/some-picture.png"}]]
:ul [:ul [:li "some text"] [:li "more text"]]
:bdo [:bdo {:dir "rtl"} "right to left text"]
:time [:time {:datetime "2020-12-31"}]
:img [:img {:src "/sample.jpg"}]
:head [:head [:title "a page"] [:script {:type "text/javascript" :src "/intro.js"}] [:style "h1 {size: 3rem}"]]
:span [:span [:img {:src "/sample.jpg"}]]
#_#_:script [:script {:type "text/javascript" :src "code.js"}]
:q [:q {:cite "Anonymous"} "If you can't convince, confuse!"]
:script [:script {:src "/resources/klipse.js" :type "text/javascript"} ""]
#_#_:wbr [:wbr]
:hr [:hr]
:br [:br]
:wbr [:wbr]
:abbr [:abbr {:title "ACME Corporation"} "ACME"]
:ol [:ol [:li "item"] [:li "item2"]]
:hgroup [:hgroup [:h1 "big header"] [:h4 "small header"]]
:link [:link {:rel "stylesheet" :href "/main.css"}]
:details [:details [:summary [:span "summarized text"]] [:p "text"]]
:table [:table
[:caption "an example table"]
[:colgroup [:col]]
[:thead [:tr [:td "label"]]]
[:tbody [:tr [:td "a cell"]]]]
:article [:article [:section "something"]]})
(t/deftest schema
(t/testing "content schemas"
(t/is (valid-schema?
(#'site.fabricate.prototype.html/->hiccup-schema :p global-attributes
[:* atomic-element])
[:p {:id "something"} "text in a paragraph"]))
(t/is (valid-schema? (schema/subschema html ::html/p)
[:p "something" [:a {:href ""} "text"]])
"Phrasing subtags should be respected.")
(t/is (valid-schema?
(schema/subschema html "a-phrasing")
[:a {:href ""} [:ins "something" [:del "something" [:em "something else"]]]])
"Phrasing subtags should be respected.")
(t/is (valid-schema?
(schema/subschema html "ins-phrasing")
[:ins [:ins [:ins [:em "text"]]]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html "ins-phrasing")
[:ins [:ins "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html "del-phrasing")
[:del [:em "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em [:ins [:ins [:em "text"]]]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em [:a {:href ""}] "something"])
"Phrasing subtags should be respected")
(t/is (not (m/validate
(schema/subschema html "ins-phrasing")
[:ins [:ins [:ins [:p "text"]]]])))
(t/is (valid-schema?
(schema/subschema html "a-phrasing")
[:a {:href ""} "link" [:em "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/p)
[:p "text" [:img {:src "/picture.jpg"}]]))
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em "text" [:br] "more text"]))
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em {:id "something"} "text" "more text"]))
(doseq [elem (set/union flow-tags phrasing-tags heading-tags)]
(t/testing (str "schema for element: <" (name elem) ">")
(let [data (get example-forms elem
[elem "sample string"])
schema (schema/subschema
html (ns-kw 'site.fabricate.prototype.html elem))]
(t/is (valid-schema? schema data)))))
(t/is (palpable? [:p "text"]))
(t/is (not (palpable? [:p])))
(t/is (valid-schema? (schema/subschema html ::html/element) [:div [:div [:div [:p "text"]]]])))
(t/testing "example forms"
(doseq [[k v] example-forms]
(let [schema (schema/subschema html (ns-kw 'site.fabricate.prototype.html k))]
(t/testing (str "schema for element: <" (symbol k) ">")
(t/is (valid-schema? schema v))))))
(t/testing "page structure"
(doseq [[tag element] example-forms]
(let [example-page [:html [:head] [:body element]]]
(t/testing (str "schema for element: <" (symbol tag) ">")
(when (not= :head tag)
(t/is (valid-schema? html example-page)))))))
(comment
(map (fn [[k v]] [k (valid-schema? htmls v)]) example-forms))
(t/testing "atomic elements"
(t/is (m/validate global-attributes {:class "a"
:href ""}))
(t/is (m/validate global-attributes {:title "some page"
:href "/relative-page.html"}))))
(t/deftest processing
(t/testing "Parse + unparse for elements"
(let [unparse-element (m/unparser element)]
(doseq [[tag elem] example-forms]
(if (not= :head tag)
(t/is (= elem (-> elem parse-element unparse-element))
(str "<" (name tag) "> should parse and unparse correctly")))))) )
| null | https://raw.githubusercontent.com/fabricate-site/fabricate/d2999e6306b54c28ca67aa49bf74102889d4ec3c/test/site/fabricate/prototype/html_test.clj | clojure | (ns site.fabricate.prototype.html-test
(:require [site.fabricate.prototype.html :as html :refer :all]
[site.fabricate.prototype.schema :as schema]
[malli.core :as m :refer [validate]]
[malli.error :as me]
[malli.generator :as mg]
[clojure.set :as set]
[clojure.pprint :as pprint]
[clojure.test :as t]))
(defmethod t/assert-expr 'valid-schema? [msg form]
`(let [schema# ~(nth form 1)
form# (m/form schema#)
data# ~(nth form 2)
result# (m/validate schema# data#)
schema-name# (last form#)]
(t/do-report
{:type (if result# :pass :fail)
:message ~msg
:expected (str (with-out-str (pprint/pprint data#))
" conforms to schema for "
schema-name#)
:actual (if (not result#)
(m/explain schema# data#)
result#)})
result#))
(def example-forms
"Some forms used to test the validity of the HTML schema"
{:a [:a {:href ""} "a link"]
:data [:data {:value "0311ab"} "A sample post"]
:del [:del "some deleted text"]
:dl [:dl {:id "definitions"}
[:dt ":dl - Definition List"]
[:dd "A HTML element with a list of definitions"]]
:figure [:figure [:figcaption "a picture"]
[:img {:src "/some-picture.png"}]]
:ul [:ul [:li "some text"] [:li "more text"]]
:bdo [:bdo {:dir "rtl"} "right to left text"]
:time [:time {:datetime "2020-12-31"}]
:img [:img {:src "/sample.jpg"}]
:head [:head [:title "a page"] [:script {:type "text/javascript" :src "/intro.js"}] [:style "h1 {size: 3rem}"]]
:span [:span [:img {:src "/sample.jpg"}]]
#_#_:script [:script {:type "text/javascript" :src "code.js"}]
:q [:q {:cite "Anonymous"} "If you can't convince, confuse!"]
:script [:script {:src "/resources/klipse.js" :type "text/javascript"} ""]
#_#_:wbr [:wbr]
:hr [:hr]
:br [:br]
:wbr [:wbr]
:abbr [:abbr {:title "ACME Corporation"} "ACME"]
:ol [:ol [:li "item"] [:li "item2"]]
:hgroup [:hgroup [:h1 "big header"] [:h4 "small header"]]
:link [:link {:rel "stylesheet" :href "/main.css"}]
:details [:details [:summary [:span "summarized text"]] [:p "text"]]
:table [:table
[:caption "an example table"]
[:colgroup [:col]]
[:thead [:tr [:td "label"]]]
[:tbody [:tr [:td "a cell"]]]]
:article [:article [:section "something"]]})
(t/deftest schema
(t/testing "content schemas"
(t/is (valid-schema?
(#'site.fabricate.prototype.html/->hiccup-schema :p global-attributes
[:* atomic-element])
[:p {:id "something"} "text in a paragraph"]))
(t/is (valid-schema? (schema/subschema html ::html/p)
[:p "something" [:a {:href ""} "text"]])
"Phrasing subtags should be respected.")
(t/is (valid-schema?
(schema/subschema html "a-phrasing")
[:a {:href ""} [:ins "something" [:del "something" [:em "something else"]]]])
"Phrasing subtags should be respected.")
(t/is (valid-schema?
(schema/subschema html "ins-phrasing")
[:ins [:ins [:ins [:em "text"]]]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html "ins-phrasing")
[:ins [:ins "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html "del-phrasing")
[:del [:em "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em [:ins [:ins [:em "text"]]]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em [:a {:href ""}] "something"])
"Phrasing subtags should be respected")
(t/is (not (m/validate
(schema/subschema html "ins-phrasing")
[:ins [:ins [:ins [:p "text"]]]])))
(t/is (valid-schema?
(schema/subschema html "a-phrasing")
[:a {:href ""} "link" [:em "text"]])
"Phrasing subtags should be respected")
(t/is (valid-schema?
(schema/subschema html ::html/p)
[:p "text" [:img {:src "/picture.jpg"}]]))
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em "text" [:br] "more text"]))
(t/is (valid-schema?
(schema/subschema html ::html/em)
[:em {:id "something"} "text" "more text"]))
(doseq [elem (set/union flow-tags phrasing-tags heading-tags)]
(t/testing (str "schema for element: <" (name elem) ">")
(let [data (get example-forms elem
[elem "sample string"])
schema (schema/subschema
html (ns-kw 'site.fabricate.prototype.html elem))]
(t/is (valid-schema? schema data)))))
(t/is (palpable? [:p "text"]))
(t/is (not (palpable? [:p])))
(t/is (valid-schema? (schema/subschema html ::html/element) [:div [:div [:div [:p "text"]]]])))
(t/testing "example forms"
(doseq [[k v] example-forms]
(let [schema (schema/subschema html (ns-kw 'site.fabricate.prototype.html k))]
(t/testing (str "schema for element: <" (symbol k) ">")
(t/is (valid-schema? schema v))))))
(t/testing "page structure"
(doseq [[tag element] example-forms]
(let [example-page [:html [:head] [:body element]]]
(t/testing (str "schema for element: <" (symbol tag) ">")
(when (not= :head tag)
(t/is (valid-schema? html example-page)))))))
(comment
(map (fn [[k v]] [k (valid-schema? htmls v)]) example-forms))
(t/testing "atomic elements"
(t/is (m/validate global-attributes {:class "a"
:href ""}))
(t/is (m/validate global-attributes {:title "some page"
:href "/relative-page.html"}))))
(t/deftest processing
(t/testing "Parse + unparse for elements"
(let [unparse-element (m/unparser element)]
(doseq [[tag elem] example-forms]
(if (not= :head tag)
(t/is (= elem (-> elem parse-element unparse-element))
(str "<" (name tag) "> should parse and unparse correctly")))))) )
|
|
8072e03d6aaf695a5f67abf18dff7e89e971d321b4ce4ef448a3cdc5d26ef13f | Clozure/ccl-tests | modules.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat Apr 30 19:51:06 2005
;;;; Contains: Tests of *MODULES*, PROVIDE, and REQUIRE
(in-package :cl-test)
(deftest modules.1
(notnot (every #'stringp *modules*))
t)
(deftest modules.2
(let ((*modules* *modules*))
(provide "FOO")
(notnot (member "FOO" *modules* :test #'string=)))
t)
(deftest modules.3
(let ((*modules* *modules*))
(provide "FOO")
(provide "FOO")
(count "FOO" *modules* :test #'string=))
1)
(deftest modules.4
(let ((*modules* *modules*))
(provide "FOO")
(require "FOO")
(values)))
(deftest modules.5
(let ((*modules* *modules*))
(provide :|FOO|)
(notnot (member "FOO" *modules* :test #'string=)))
t)
(deftest modules.6
(let ((*modules* *modules*))
(provide "FOO")
(require :|FOO|)
(values)))
(deftest modules.7
(let ((*modules* *modules*)
(fn 'modules7-fun))
(when (fboundp fn) (fmakunbound fn))
(require "MODULES-7" #p"modules7.lsp")
(funcall fn))
:good)
(deftest modules.8
(let ((*modules* *modules*)
(fns '(modules8a-fun modules8b-fun)))
(dolist (fn fns)
(when (fboundp fn) (fmakunbound fn)))
(require "MODULES-8" '(#p"modules8a.lsp" #p"modules8b.lsp"))
(mapcar #'funcall fns))
(:good :also-good))
(deftest modules.9
(signals-error (require "AB7djaCgaaL") error)
t)
(deftest modules.10
(do-special-strings
(s "FOO")
(let ((*modules* *modules*))
(provide s)
(assert (member "FOO" *modules* :test #'string=))))
nil)
(deftest modules.11
(do-special-strings
(s "FOO")
(let ((*modules* *modules*))
(provide "FOO")
(require s)
(values)))
nil)
(deftest modules.12
(unless (member "Z" *modules* :test #'string=)
(let ((*modules* *modules*))
(provide #\Z)
(not (member "Z" *modules* :test #'string=))))
nil)
(deftest modules.13
(unless (member "Z" *modules* :test #'string=)
(let ((*modules* *modules*))
(provide "Z")
(require #\Z)
nil))
nil) | null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/modules.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of *MODULES*, PROVIDE, and REQUIRE | Author :
Created : Sat Apr 30 19:51:06 2005
(in-package :cl-test)
(deftest modules.1
(notnot (every #'stringp *modules*))
t)
(deftest modules.2
(let ((*modules* *modules*))
(provide "FOO")
(notnot (member "FOO" *modules* :test #'string=)))
t)
(deftest modules.3
(let ((*modules* *modules*))
(provide "FOO")
(provide "FOO")
(count "FOO" *modules* :test #'string=))
1)
(deftest modules.4
(let ((*modules* *modules*))
(provide "FOO")
(require "FOO")
(values)))
(deftest modules.5
(let ((*modules* *modules*))
(provide :|FOO|)
(notnot (member "FOO" *modules* :test #'string=)))
t)
(deftest modules.6
(let ((*modules* *modules*))
(provide "FOO")
(require :|FOO|)
(values)))
(deftest modules.7
(let ((*modules* *modules*)
(fn 'modules7-fun))
(when (fboundp fn) (fmakunbound fn))
(require "MODULES-7" #p"modules7.lsp")
(funcall fn))
:good)
(deftest modules.8
(let ((*modules* *modules*)
(fns '(modules8a-fun modules8b-fun)))
(dolist (fn fns)
(when (fboundp fn) (fmakunbound fn)))
(require "MODULES-8" '(#p"modules8a.lsp" #p"modules8b.lsp"))
(mapcar #'funcall fns))
(:good :also-good))
(deftest modules.9
(signals-error (require "AB7djaCgaaL") error)
t)
(deftest modules.10
(do-special-strings
(s "FOO")
(let ((*modules* *modules*))
(provide s)
(assert (member "FOO" *modules* :test #'string=))))
nil)
(deftest modules.11
(do-special-strings
(s "FOO")
(let ((*modules* *modules*))
(provide "FOO")
(require s)
(values)))
nil)
(deftest modules.12
(unless (member "Z" *modules* :test #'string=)
(let ((*modules* *modules*))
(provide #\Z)
(not (member "Z" *modules* :test #'string=))))
nil)
(deftest modules.13
(unless (member "Z" *modules* :test #'string=)
(let ((*modules* *modules*))
(provide "Z")
(require #\Z)
nil))
nil) |
036fee7311e686734f85dc5d992681af53909a68fa7a12c0fd1c839c334e587a | pascal-knodel/haskell-craft | E'5'18.hs | --
--
--
-----------------
Exercise 5.18 .
-----------------
--
--
--
module E'5'18 where
doubleAll :: [Integer] -> [Integer]
doubleAll xs
= [ 2 * x | x <- xs ]
-- Other solution for "doubleAll":
doubleAll' :: [Integer] -> [Integer]
doubleAll' xs
= map (2 *) xs
| null | https://raw.githubusercontent.com/pascal-knodel/haskell-craft/c03d6eb857abd8b4785b6de075b094ec3653c968/Chapter%205/E'5'18.hs | haskell |
---------------
---------------
Other solution for "doubleAll": | Exercise 5.18 .
module E'5'18 where
doubleAll :: [Integer] -> [Integer]
doubleAll xs
= [ 2 * x | x <- xs ]
doubleAll' :: [Integer] -> [Integer]
doubleAll' xs
= map (2 *) xs
|
17891b9e7148c97a5e7497d9afcbc20271203321d95ff3b87a054e7a9dd6f770 | pixlsus/registry.gimp.org_static | script-fu-make-anaglyph.scm | ;
; make-anaglyph
;
;
( )
; On-line portfolio: <>.
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; This program creates stereoscopic 3D anaglyph photos from a stereo pair.
In order to make use of this program you will first need a pair of images
; from slightly different angles or the same object. You then need to place
both images as different layers in the same image window in Gimp with the
; right image as the Bottom layer and the left image as the next layer above
; it and no other layers in the image (if you use this script in other cir-
; cumstances then it probably won't work properly). The script finishes
leaving the two images as seperate layers so that final alignment adjust-
; ments can be made before merging the layers down and saving the anaglyph.
Any colours can be chosen for the two layers but it is recommended that
you only choose colours with individual RGB values of 0 or 255 and that
the two colours compliment each other . The default colours , and the most
commoly used colour combination , are Red ( 255 0 0 ) for the right image and
cyan ( 0 255 255 ) for the left image . Other possible pairs are :
Red ( 255 0 0 ) and Blue ( 0 0 255 )
Red ( 255 0 0 ) and Green ( 0 255 0 )
Blue ( 0 0 255 ) and Green ( 0 255 0 )
Blue ( 0 0 255 ) and Yellow ( 255 255 0 )
Green ( 0 255 0 ) and ( 255 0 255 )
; but be warned, not all colour pairs work equally well.
; To view the anaglyphs as 3D images you will need a pair of glasses in
; the colours that you have chosen with the colour of the left eye of
; the glasses matching the colour applied to the right image (e.g.
; with the default red/cyan combination the red side of the glasses
; goes over the left eye and the cyan side on the right eye).
; And finaly, if you haven't been involved in 3D photography before
; it is highly addictive and standard 2D images will look flat when
; compared to anaglyphs.
; Define the function:
(define (script-fu-make-anaglyph inImg inDrawable inTopColour inBottomColour)
(gimp-image-undo-group-start inImg)
(gimp-context-push)
(let*
(
; create local variables
(theLayersList
(cadr
(gimp-image-get-layers inImg)
)
)
(theTopImageLayer
(aref theLayersList 0)
)
(theBottomImageLayer
(aref theLayersList 1)
)
)
(gimp-context-set-foreground inTopColour)
(gimp-context-set-background inBottomColour)
(gimp-selection-all inImg)
(gimp-edit-bucket-fill theTopImageLayer FG-BUCKET-FILL SCREEN-MODE 100 0 FALSE 0 0)
(gimp-edit-bucket-fill theBottomImageLayer BG-BUCKET-FILL SCREEN-MODE 100 0 FALSE 0 0)
(gimp-layer-set-mode theTopImageLayer MULTIPLY-MODE)
)
(gimp-displays-flush)
(gimp-context-pop)
(gimp-image-undo-group-end inImg)
)
(script-fu-register
"script-fu-make-anaglyph" ;func name
"Make Anaglyph" ;menu label
"Converts two images to a two-colour Anaglyph. Right image as Background, Left image as Layer 1" ;description
"Steph Parker" ;author
"copyright 2008, Steph Parker" ;copyright notice
"27/6/2008" ;date created
"RGB, RGBA" ;image type that the script works on
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
SF-COLOR "Top Layer Color (Cyan):" '(0 255 255) ;color variable
SF-COLOR "Bottom Layer Color (Red):" '(255 0 0) ;color variable
)
(script-fu-menu-register "script-fu-make-anaglyph" "<Image>/Stereo")
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/script-fu-make-anaglyph.scm | scheme |
make-anaglyph
On-line portfolio: <>.
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This program creates stereoscopic 3D anaglyph photos from a stereo pair.
from slightly different angles or the same object. You then need to place
right image as the Bottom layer and the left image as the next layer above
it and no other layers in the image (if you use this script in other cir-
cumstances then it probably won't work properly). The script finishes
ments can be made before merging the layers down and saving the anaglyph.
but be warned, not all colour pairs work equally well.
To view the anaglyphs as 3D images you will need a pair of glasses in
the colours that you have chosen with the colour of the left eye of
the glasses matching the colour applied to the right image (e.g.
with the default red/cyan combination the red side of the glasses
goes over the left eye and the cyan side on the right eye).
And finaly, if you haven't been involved in 3D photography before
it is highly addictive and standard 2D images will look flat when
compared to anaglyphs.
Define the function:
create local variables
func name
menu label
description
author
copyright notice
date created
image type that the script works on
color variable
color variable | ( )
it under the terms of the GNU General Public License as published by
In order to make use of this program you will first need a pair of images
both images as different layers in the same image window in Gimp with the
leaving the two images as seperate layers so that final alignment adjust-
Any colours can be chosen for the two layers but it is recommended that
you only choose colours with individual RGB values of 0 or 255 and that
the two colours compliment each other . The default colours , and the most
commoly used colour combination , are Red ( 255 0 0 ) for the right image and
cyan ( 0 255 255 ) for the left image . Other possible pairs are :
Red ( 255 0 0 ) and Blue ( 0 0 255 )
Red ( 255 0 0 ) and Green ( 0 255 0 )
Blue ( 0 0 255 ) and Green ( 0 255 0 )
Blue ( 0 0 255 ) and Yellow ( 255 255 0 )
Green ( 0 255 0 ) and ( 255 0 255 )
(define (script-fu-make-anaglyph inImg inDrawable inTopColour inBottomColour)
(gimp-image-undo-group-start inImg)
(gimp-context-push)
(let*
(
(theLayersList
(cadr
(gimp-image-get-layers inImg)
)
)
(theTopImageLayer
(aref theLayersList 0)
)
(theBottomImageLayer
(aref theLayersList 1)
)
)
(gimp-context-set-foreground inTopColour)
(gimp-context-set-background inBottomColour)
(gimp-selection-all inImg)
(gimp-edit-bucket-fill theTopImageLayer FG-BUCKET-FILL SCREEN-MODE 100 0 FALSE 0 0)
(gimp-edit-bucket-fill theBottomImageLayer BG-BUCKET-FILL SCREEN-MODE 100 0 FALSE 0 0)
(gimp-layer-set-mode theTopImageLayer MULTIPLY-MODE)
)
(gimp-displays-flush)
(gimp-context-pop)
(gimp-image-undo-group-end inImg)
)
(script-fu-register
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
)
(script-fu-menu-register "script-fu-make-anaglyph" "<Image>/Stereo")
|
5f3ebd0ccb8a2532830bc3dca2b6bf8e881e795e47f37c8d88b8c5025ce940e5 | mbenelli/klio | buffmap-test.scm | ; Test
(##namespace ("buffmap-test#"))
(##include "~~lib/gambit#.scm")
(##include "../klio/test.scm")
(load "../klio/ctypes")
(load "../klio/buffmap")
(##namespace
("buffmap#" make-var build-accessors))
(define buffer
(u8vector ; type little-endian big-endian
#x00 ; u8 0 0
u8 255 255
bits 01111111
u8 1 1
f32 1.0 4.600602988224807e-41
u8 2 2
u8 3 3
f64 1.0 3.03865e-319
(define datamap
(map
(lambda (x)
(apply make-var x))
`((v0 byte 0)
(v1 byte 1)
(v2 bit 2 0)
(v3 bit 2 1)
(v4 bit 2 2)
(v5 bit 2 3)
(v6 bit 2 4)
(v7 bit 2 5)
(v8 bit 2 6)
(v9 bit 2 7)
(v10 byte 3)
(v11 f32 4)
(v12 byte 8)
(v13 byte 9)
(v14 f64 10))))
(define get (build-accessors buffer datamap))
(define get-be (build-accessors buffer datamap 'big))
(define (run)
(test (get 'v0) 0)
(test (get 'v1) 255)
(test (get 'v2) 0)
(test (get 'v3) 1)
(test (get 'v4) 1)
(test (get 'v5) 1)
(test (get 'v6) 1)
(test (get 'v7) 1)
(test (get 'v8) 1)
(test (get 'v9) 1)
(test (get 'v10) 1)
(test (get 'v11) 1.0)
(test (get 'v12) 2)
(test (get 'v13) 3)
(test (get 'v14) 1.0)
(test (get-be 'v11) 4.600602988224807e-41)
(test (get-be 'v14) 3.03865e-319))
(run)
| null | https://raw.githubusercontent.com/mbenelli/klio/33c11700d6080de44a22a27a5147f97899583f6e/tests/buffmap-test.scm | scheme | Test
type little-endian big-endian
u8 0 0 |
(##namespace ("buffmap-test#"))
(##include "~~lib/gambit#.scm")
(##include "../klio/test.scm")
(load "../klio/ctypes")
(load "../klio/buffmap")
(##namespace
("buffmap#" make-var build-accessors))
(define buffer
u8 255 255
bits 01111111
u8 1 1
f32 1.0 4.600602988224807e-41
u8 2 2
u8 3 3
f64 1.0 3.03865e-319
(define datamap
(map
(lambda (x)
(apply make-var x))
`((v0 byte 0)
(v1 byte 1)
(v2 bit 2 0)
(v3 bit 2 1)
(v4 bit 2 2)
(v5 bit 2 3)
(v6 bit 2 4)
(v7 bit 2 5)
(v8 bit 2 6)
(v9 bit 2 7)
(v10 byte 3)
(v11 f32 4)
(v12 byte 8)
(v13 byte 9)
(v14 f64 10))))
(define get (build-accessors buffer datamap))
(define get-be (build-accessors buffer datamap 'big))
(define (run)
(test (get 'v0) 0)
(test (get 'v1) 255)
(test (get 'v2) 0)
(test (get 'v3) 1)
(test (get 'v4) 1)
(test (get 'v5) 1)
(test (get 'v6) 1)
(test (get 'v7) 1)
(test (get 'v8) 1)
(test (get 'v9) 1)
(test (get 'v10) 1)
(test (get 'v11) 1.0)
(test (get 'v12) 2)
(test (get 'v13) 3)
(test (get 'v14) 1.0)
(test (get-be 'v11) 4.600602988224807e-41)
(test (get-be 'v14) 3.03865e-319))
(run)
|
12644c588c766e7c2e56f8edeaabd479a4cb73115d8665fa8f79bd23923b8546 | exercism/erlang | raindrops_tests.erl | Generated with ' v0.2.0 '
%% Revision 1 of the exercises generator was used
%% -specifications/raw/42dd0cea20498fd544b152c4e2c0a419bb7e266a/exercises/raindrops/canonical-data.json
%% This file is automatically generated from the exercises canonical data.
-module(raindrops_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_the_sound_for_1_is_1_test_'() ->
{"the sound for 1 is 1",
?_assertEqual("1", raindrops:convert(1))}.
'2_the_sound_for_3_is_pling_test_'() ->
{"the sound for 3 is Pling",
?_assertEqual("Pling", raindrops:convert(3))}.
'3_the_sound_for_5_is_plang_test_'() ->
{"the sound for 5 is Plang",
?_assertEqual("Plang", raindrops:convert(5))}.
'4_the_sound_for_7_is_plong_test_'() ->
{"the sound for 7 is Plong",
?_assertEqual("Plong", raindrops:convert(7))}.
'5_the_sound_for_6_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 6 is Pling as it has a "
"factor 3",
?_assertEqual("Pling", raindrops:convert(6))}.
'6_2_to_the_power_3_does_not_make_a_raindrop_sound_as_3_is_the_exponent_not_the_base_test_'() ->
{"2 to the power 3 does not make a raindrop "
"sound as 3 is the exponent not the base",
?_assertEqual("8", raindrops:convert(8))}.
'7_the_sound_for_9_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 9 is Pling as it has a "
"factor 3",
?_assertEqual("Pling", raindrops:convert(9))}.
'8_the_sound_for_10_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 10 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(10))}.
'9_the_sound_for_14_is_plong_as_it_has_a_factor_of_7_test_'() ->
{"the sound for 14 is Plong as it has "
"a factor of 7",
?_assertEqual("Plong", raindrops:convert(14))}.
'10_the_sound_for_15_is_pling_plang_as_it_has_factors_3_and_5_test_'() ->
{"the sound for 15 is PlingPlang as it "
"has factors 3 and 5",
?_assertEqual("PlingPlang", raindrops:convert(15))}.
'11_the_sound_for_21_is_pling_plong_as_it_has_factors_3_and_7_test_'() ->
{"the sound for 21 is PlingPlong as it "
"has factors 3 and 7",
?_assertEqual("PlingPlong", raindrops:convert(21))}.
'12_the_sound_for_25_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 25 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(25))}.
'13_the_sound_for_27_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 27 is Pling as it has "
"a factor 3",
?_assertEqual("Pling", raindrops:convert(27))}.
'14_the_sound_for_35_is_plang_plong_as_it_has_factors_5_and_7_test_'() ->
{"the sound for 35 is PlangPlong as it "
"has factors 5 and 7",
?_assertEqual("PlangPlong", raindrops:convert(35))}.
'15_the_sound_for_49_is_plong_as_it_has_a_factor_7_test_'() ->
{"the sound for 49 is Plong as it has "
"a factor 7",
?_assertEqual("Plong", raindrops:convert(49))}.
'16_the_sound_for_52_is_52_test_'() ->
{"the sound for 52 is 52",
?_assertEqual("52", raindrops:convert(52))}.
'17_the_sound_for_105_is_pling_plang_plong_as_it_has_factors_3_5_and_7_test_'() ->
{"the sound for 105 is PlingPlangPlong "
"as it has factors 3, 5 and 7",
?_assertEqual("PlingPlangPlong",
raindrops:convert(105))}.
'18_the_sound_for_3125_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 3125 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(3125))}.
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/raindrops/test/raindrops_tests.erl | erlang | Revision 1 of the exercises generator was used
-specifications/raw/42dd0cea20498fd544b152c4e2c0a419bb7e266a/exercises/raindrops/canonical-data.json
This file is automatically generated from the exercises canonical data. | Generated with ' v0.2.0 '
-module(raindrops_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
'1_the_sound_for_1_is_1_test_'() ->
{"the sound for 1 is 1",
?_assertEqual("1", raindrops:convert(1))}.
'2_the_sound_for_3_is_pling_test_'() ->
{"the sound for 3 is Pling",
?_assertEqual("Pling", raindrops:convert(3))}.
'3_the_sound_for_5_is_plang_test_'() ->
{"the sound for 5 is Plang",
?_assertEqual("Plang", raindrops:convert(5))}.
'4_the_sound_for_7_is_plong_test_'() ->
{"the sound for 7 is Plong",
?_assertEqual("Plong", raindrops:convert(7))}.
'5_the_sound_for_6_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 6 is Pling as it has a "
"factor 3",
?_assertEqual("Pling", raindrops:convert(6))}.
'6_2_to_the_power_3_does_not_make_a_raindrop_sound_as_3_is_the_exponent_not_the_base_test_'() ->
{"2 to the power 3 does not make a raindrop "
"sound as 3 is the exponent not the base",
?_assertEqual("8", raindrops:convert(8))}.
'7_the_sound_for_9_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 9 is Pling as it has a "
"factor 3",
?_assertEqual("Pling", raindrops:convert(9))}.
'8_the_sound_for_10_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 10 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(10))}.
'9_the_sound_for_14_is_plong_as_it_has_a_factor_of_7_test_'() ->
{"the sound for 14 is Plong as it has "
"a factor of 7",
?_assertEqual("Plong", raindrops:convert(14))}.
'10_the_sound_for_15_is_pling_plang_as_it_has_factors_3_and_5_test_'() ->
{"the sound for 15 is PlingPlang as it "
"has factors 3 and 5",
?_assertEqual("PlingPlang", raindrops:convert(15))}.
'11_the_sound_for_21_is_pling_plong_as_it_has_factors_3_and_7_test_'() ->
{"the sound for 21 is PlingPlong as it "
"has factors 3 and 7",
?_assertEqual("PlingPlong", raindrops:convert(21))}.
'12_the_sound_for_25_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 25 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(25))}.
'13_the_sound_for_27_is_pling_as_it_has_a_factor_3_test_'() ->
{"the sound for 27 is Pling as it has "
"a factor 3",
?_assertEqual("Pling", raindrops:convert(27))}.
'14_the_sound_for_35_is_plang_plong_as_it_has_factors_5_and_7_test_'() ->
{"the sound for 35 is PlangPlong as it "
"has factors 5 and 7",
?_assertEqual("PlangPlong", raindrops:convert(35))}.
'15_the_sound_for_49_is_plong_as_it_has_a_factor_7_test_'() ->
{"the sound for 49 is Plong as it has "
"a factor 7",
?_assertEqual("Plong", raindrops:convert(49))}.
'16_the_sound_for_52_is_52_test_'() ->
{"the sound for 52 is 52",
?_assertEqual("52", raindrops:convert(52))}.
'17_the_sound_for_105_is_pling_plang_plong_as_it_has_factors_3_5_and_7_test_'() ->
{"the sound for 105 is PlingPlangPlong "
"as it has factors 3, 5 and 7",
?_assertEqual("PlingPlangPlong",
raindrops:convert(105))}.
'18_the_sound_for_3125_is_plang_as_it_has_a_factor_5_test_'() ->
{"the sound for 3125 is Plang as it has "
"a factor 5",
?_assertEqual("Plang", raindrops:convert(3125))}.
|
e5f669dbd84522ba542537a6f86c2c49937096207f6416665e010fa58e4e364f | deadpendency/deadpendency | DependencyAssessmentViolation.hs | # OPTIONS_GHC -fno - warn - orphans #
module Common.HtmlReport.Instances.Assessment.DependencyAssessmentViolation (parsePackageDeprecated, parseDeprecatedFor) where
import Common.HtmlReport.HtmlReport
import Common.HtmlReport.HtmlReportDecodeError
import Common.HtmlReport.Instances.PackageLink
import Common.Model.Assessment.DependencyAssessmentViolation
import Common.Model.Dependency.DependencyName
import Common.Model.Ecosystem.Registry
import Common.Model.Report.PackageLink
import Common.Parsing.Megaparsec
import Data.Vector qualified as V
import Lucid
import Text.Megaparsec qualified as M
import Text.Megaparsec.Char qualified as M
import Text.Megaparsec.Char.Lexer qualified as M
instance ToHtmlReportBody DependencyAssessmentViolation where
toHtmlReportBody =
\case
DAVNoRecentCommits errorAtMonths maybeLastCommitTime ->
toHtml $ "No commits within the last " <> show @Text errorAtMonths <> " months." <> maybe "" ((<>) " Last commit: " . pack . formatTime defaultTimeLocale "%Y-%m-%d") maybeLastCommitTime
DAVFewYearlyCommits errorAtCount commitCount -> toHtml $ "Only " <> show @Text commitCount <> " commit(s) within the last year. Expected more than " <> show @Text errorAtCount <> "."
DAVNoRecentPackageRelease errorAtMonths lastReleaseDate -> toHtml $ "Last package release over " <> show @Text errorAtMonths <> " months ago. Last release: " <> pack (formatTime defaultTimeLocale "%Y-%m-%d" lastReleaseDate)
DAVRepoArchived -> "Repository is archived."
DAVPackageDeprecated registry deprecationType maybeDeprecationMessage deprecatedForNames -> packageDeprecated registry deprecationType maybeDeprecationMessage deprecatedForNames
DAVIsFork -> "The repository is a fork."
DAVSingleRecentAuthor -> "All commits within the last year were authored by a single person."
DAVRepoNotIdentified -> "A source repository was not identified."
DAVRepoNotFound -> "Source repository was not found."
packageDeprecated :: Registry -> DAVDeprecationType -> Maybe Text -> V.Vector DependencyName -> Html ()
packageDeprecated registry davDeprecationType maybeDeprecationMessage deprecatedForNames =
let prefixHtml = toHtml @Text "Package has been"
deprecationTypeHtml =
toHtml @Text $
case davDeprecationType of
DAVDTDeprecated -> "deprecated"
DAVDTAbandoned -> "flagged abandonded"
DAVDTRelocated -> "relocated"
registryHtml = toHtml $ registryAsText registry
hasMoreInfo = isJust maybeDeprecationMessage
moreInfoHtml = case maybeDeprecationMessage of
Just deprecationMessage -> li_ $ "More Info: " <> i_ (toHtml deprecationMessage)
Nothing -> mempty
packageLinks = PackageLink registry <$> deprecatedForNames
hasDeprecatedFor = not (V.null deprecatedForNames)
deprecatedForHtml =
if hasDeprecatedFor
then li_ $ "Recommended Replacement(s): " <> fold (intersperseV ", " (fmap (renderPackageLinkWithName False) packageLinks))
else mempty
additionalInfoList =
if hasMoreInfo || hasDeprecatedFor
then ul_ (moreInfoHtml <> deprecatedForHtml)
else mempty
in prefixHtml <> " " <> deprecationTypeHtml <> " in " <> registryHtml <> "." <> additionalInfoList
instance FromHtmlReportBody DependencyAssessmentViolation where
fromHtmlReportBody input =
let parsers =
( M.try parseNoRecentCommits
<|> M.try parseFewYearlyCommits
<|> M.try parseRepoArchived
<|> M.try parsePackageDeprecated
<|> M.try parseNoRecentPackageRelease
<|> M.try parseRepoIsAFork
<|> M.try parseRepoNotIdentified
<|> M.try parseRepoNotFound
<|> parseSingleRecentAuthor
)
maybeFailureResult = mParseMaybe (M.try parsers) input
in maybeToRight
(HtmlReportDecodeError $ "Unable to match violation: " <> input)
maybeFailureResult
parseNoRecentCommits :: MParser DependencyAssessmentViolation
parseNoRecentCommits = do
M.string "No commits within the last "
errorAtMonths <- M.decimal
M.string " months."
maybeLastCommitDateTime <- M.optional $ do
_ <- M.string " Last commit: "
M.some M.anySingle >>= parseTimeM False defaultTimeLocale "%Y-%m-%d"
case maybeLastCommitDateTime of
Just lastCommitDateTime -> pure $ DAVNoRecentCommits errorAtMonths (Just lastCommitDateTime)
Nothing -> pure $ DAVNoRecentCommits errorAtMonths Nothing
parseFewYearlyCommits :: MParser DependencyAssessmentViolation
parseFewYearlyCommits = do
M.string "Only "
commitCount <- M.decimal
M.string " commit(s) within the last year. Expected more than "
errorAtCount <- M.decimal
M.char '.'
pure $ DAVFewYearlyCommits errorAtCount commitCount
parseRepoArchived :: MParser DependencyAssessmentViolation
parseRepoArchived = M.string "Repository is archived." $> DAVRepoArchived
parseRepoNotIdentified :: MParser DependencyAssessmentViolation
parseRepoNotIdentified = M.string "A source repository was not identified." $> DAVRepoNotIdentified
parseRepoNotFound :: MParser DependencyAssessmentViolation
parseRepoNotFound = M.string "Source repository was not found." $> DAVRepoNotFound
parsePackageDeprecated :: MParser DependencyAssessmentViolation
parsePackageDeprecated = do
M.string "Package has been "
davDeprecationType <-
(M.string "deprecated" $> DAVDTDeprecated)
<|> (M.string "flagged abandonded" $> DAVDTAbandoned)
<|> (M.string "relocated" $> DAVDTRelocated)
M.string " in "
registry <- registryParser <* M.char '.'
maybeDeprecationMessage <- M.optional $ fmap pack $ M.try $ M.skipManyTill M.anySingle (M.string "More Info: <i>") *> M.someTill M.anySingle (M.string "</i>")
deprecatedForNames <- fmap (fromMaybe V.empty) $ M.optional $ M.try $ M.skipManyTill M.anySingle (M.string "Recommended Replacement(s): ") *> parseDeprecatedFor
pure $ DAVPackageDeprecated registry davDeprecationType maybeDeprecationMessage deprecatedForNames
parseDeprecatedFor :: MParser (V.Vector DependencyName)
parseDeprecatedFor = do
names <- _dependencyName <<$>> M.someTill (parserRegistryAnchor <* M.optional (M.string ", ")) (M.string "</li>")
pure $
V.fromList names
parseNoRecentPackageRelease :: MParser DependencyAssessmentViolation
parseNoRecentPackageRelease = do
M.string "Last package release over "
errorAtMonths <- M.decimal
M.string " months ago. Last release: "
lastReleaseDateTime <- M.some M.anySingle >>= parseTimeM False defaultTimeLocale "%Y-%m-%d"
pure $ DAVNoRecentPackageRelease errorAtMonths lastReleaseDateTime
parseRepoIsAFork :: MParser DependencyAssessmentViolation
parseRepoIsAFork = M.string "The repository is a fork." $> DAVIsFork
parseSingleRecentAuthor :: MParser DependencyAssessmentViolation
parseSingleRecentAuthor = M.string "All commits within the last year were authored by a single person." $> DAVSingleRecentAuthor
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/HtmlReport/Instances/Assessment/DependencyAssessmentViolation.hs | haskell | # OPTIONS_GHC -fno - warn - orphans #
module Common.HtmlReport.Instances.Assessment.DependencyAssessmentViolation (parsePackageDeprecated, parseDeprecatedFor) where
import Common.HtmlReport.HtmlReport
import Common.HtmlReport.HtmlReportDecodeError
import Common.HtmlReport.Instances.PackageLink
import Common.Model.Assessment.DependencyAssessmentViolation
import Common.Model.Dependency.DependencyName
import Common.Model.Ecosystem.Registry
import Common.Model.Report.PackageLink
import Common.Parsing.Megaparsec
import Data.Vector qualified as V
import Lucid
import Text.Megaparsec qualified as M
import Text.Megaparsec.Char qualified as M
import Text.Megaparsec.Char.Lexer qualified as M
instance ToHtmlReportBody DependencyAssessmentViolation where
toHtmlReportBody =
\case
DAVNoRecentCommits errorAtMonths maybeLastCommitTime ->
toHtml $ "No commits within the last " <> show @Text errorAtMonths <> " months." <> maybe "" ((<>) " Last commit: " . pack . formatTime defaultTimeLocale "%Y-%m-%d") maybeLastCommitTime
DAVFewYearlyCommits errorAtCount commitCount -> toHtml $ "Only " <> show @Text commitCount <> " commit(s) within the last year. Expected more than " <> show @Text errorAtCount <> "."
DAVNoRecentPackageRelease errorAtMonths lastReleaseDate -> toHtml $ "Last package release over " <> show @Text errorAtMonths <> " months ago. Last release: " <> pack (formatTime defaultTimeLocale "%Y-%m-%d" lastReleaseDate)
DAVRepoArchived -> "Repository is archived."
DAVPackageDeprecated registry deprecationType maybeDeprecationMessage deprecatedForNames -> packageDeprecated registry deprecationType maybeDeprecationMessage deprecatedForNames
DAVIsFork -> "The repository is a fork."
DAVSingleRecentAuthor -> "All commits within the last year were authored by a single person."
DAVRepoNotIdentified -> "A source repository was not identified."
DAVRepoNotFound -> "Source repository was not found."
packageDeprecated :: Registry -> DAVDeprecationType -> Maybe Text -> V.Vector DependencyName -> Html ()
packageDeprecated registry davDeprecationType maybeDeprecationMessage deprecatedForNames =
let prefixHtml = toHtml @Text "Package has been"
deprecationTypeHtml =
toHtml @Text $
case davDeprecationType of
DAVDTDeprecated -> "deprecated"
DAVDTAbandoned -> "flagged abandonded"
DAVDTRelocated -> "relocated"
registryHtml = toHtml $ registryAsText registry
hasMoreInfo = isJust maybeDeprecationMessage
moreInfoHtml = case maybeDeprecationMessage of
Just deprecationMessage -> li_ $ "More Info: " <> i_ (toHtml deprecationMessage)
Nothing -> mempty
packageLinks = PackageLink registry <$> deprecatedForNames
hasDeprecatedFor = not (V.null deprecatedForNames)
deprecatedForHtml =
if hasDeprecatedFor
then li_ $ "Recommended Replacement(s): " <> fold (intersperseV ", " (fmap (renderPackageLinkWithName False) packageLinks))
else mempty
additionalInfoList =
if hasMoreInfo || hasDeprecatedFor
then ul_ (moreInfoHtml <> deprecatedForHtml)
else mempty
in prefixHtml <> " " <> deprecationTypeHtml <> " in " <> registryHtml <> "." <> additionalInfoList
instance FromHtmlReportBody DependencyAssessmentViolation where
fromHtmlReportBody input =
let parsers =
( M.try parseNoRecentCommits
<|> M.try parseFewYearlyCommits
<|> M.try parseRepoArchived
<|> M.try parsePackageDeprecated
<|> M.try parseNoRecentPackageRelease
<|> M.try parseRepoIsAFork
<|> M.try parseRepoNotIdentified
<|> M.try parseRepoNotFound
<|> parseSingleRecentAuthor
)
maybeFailureResult = mParseMaybe (M.try parsers) input
in maybeToRight
(HtmlReportDecodeError $ "Unable to match violation: " <> input)
maybeFailureResult
parseNoRecentCommits :: MParser DependencyAssessmentViolation
parseNoRecentCommits = do
M.string "No commits within the last "
errorAtMonths <- M.decimal
M.string " months."
maybeLastCommitDateTime <- M.optional $ do
_ <- M.string " Last commit: "
M.some M.anySingle >>= parseTimeM False defaultTimeLocale "%Y-%m-%d"
case maybeLastCommitDateTime of
Just lastCommitDateTime -> pure $ DAVNoRecentCommits errorAtMonths (Just lastCommitDateTime)
Nothing -> pure $ DAVNoRecentCommits errorAtMonths Nothing
parseFewYearlyCommits :: MParser DependencyAssessmentViolation
parseFewYearlyCommits = do
M.string "Only "
commitCount <- M.decimal
M.string " commit(s) within the last year. Expected more than "
errorAtCount <- M.decimal
M.char '.'
pure $ DAVFewYearlyCommits errorAtCount commitCount
parseRepoArchived :: MParser DependencyAssessmentViolation
parseRepoArchived = M.string "Repository is archived." $> DAVRepoArchived
parseRepoNotIdentified :: MParser DependencyAssessmentViolation
parseRepoNotIdentified = M.string "A source repository was not identified." $> DAVRepoNotIdentified
parseRepoNotFound :: MParser DependencyAssessmentViolation
parseRepoNotFound = M.string "Source repository was not found." $> DAVRepoNotFound
parsePackageDeprecated :: MParser DependencyAssessmentViolation
parsePackageDeprecated = do
M.string "Package has been "
davDeprecationType <-
(M.string "deprecated" $> DAVDTDeprecated)
<|> (M.string "flagged abandonded" $> DAVDTAbandoned)
<|> (M.string "relocated" $> DAVDTRelocated)
M.string " in "
registry <- registryParser <* M.char '.'
maybeDeprecationMessage <- M.optional $ fmap pack $ M.try $ M.skipManyTill M.anySingle (M.string "More Info: <i>") *> M.someTill M.anySingle (M.string "</i>")
deprecatedForNames <- fmap (fromMaybe V.empty) $ M.optional $ M.try $ M.skipManyTill M.anySingle (M.string "Recommended Replacement(s): ") *> parseDeprecatedFor
pure $ DAVPackageDeprecated registry davDeprecationType maybeDeprecationMessage deprecatedForNames
parseDeprecatedFor :: MParser (V.Vector DependencyName)
parseDeprecatedFor = do
names <- _dependencyName <<$>> M.someTill (parserRegistryAnchor <* M.optional (M.string ", ")) (M.string "</li>")
pure $
V.fromList names
parseNoRecentPackageRelease :: MParser DependencyAssessmentViolation
parseNoRecentPackageRelease = do
M.string "Last package release over "
errorAtMonths <- M.decimal
M.string " months ago. Last release: "
lastReleaseDateTime <- M.some M.anySingle >>= parseTimeM False defaultTimeLocale "%Y-%m-%d"
pure $ DAVNoRecentPackageRelease errorAtMonths lastReleaseDateTime
parseRepoIsAFork :: MParser DependencyAssessmentViolation
parseRepoIsAFork = M.string "The repository is a fork." $> DAVIsFork
parseSingleRecentAuthor :: MParser DependencyAssessmentViolation
parseSingleRecentAuthor = M.string "All commits within the last year were authored by a single person." $> DAVSingleRecentAuthor
|
|
2e52513c61230146f946700ebb152a858bbba6612f9820e4fe8e5d3e1305bf5b | b0-system/b0 | b0_cli.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Cmdliner
module Arg = struct
let units ?docs ?(doc = "Use unit $(docv).") () =
Arg.(value & opt_all string [] & info ["u"; "unit"] ?docs ~doc ~docv:"UNIT")
let x_units ?docs ?(doc = "Exclude unit $(docv). Takes over inclusion.") () =
let docv = "UNIT" in
Arg.(value & opt_all string [] & info ["x"; "x-unit"] ?docs ~doc ~docv)
let packs ?docs ?(doc = "Use pack $(docv).") () =
Arg.(value & opt_all string [] & info ["p"; "pack"] ?docs ~doc ~docv:"PACK")
let x_packs ?docs ?(doc = "Exclude pack $(docv). Takes over inclusion.") () =
let docv = "PACK" in
Arg.(value & opt_all string [] & info ["X"; "x-pack"] ?docs ~doc ~docv)
end
---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/b0-system/b0/cbe12b8a55da6b50ab01ed058b339dbed3cfe894/src/b0_cli.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Cmdliner
module Arg = struct
let units ?docs ?(doc = "Use unit $(docv).") () =
Arg.(value & opt_all string [] & info ["u"; "unit"] ?docs ~doc ~docv:"UNIT")
let x_units ?docs ?(doc = "Exclude unit $(docv). Takes over inclusion.") () =
let docv = "UNIT" in
Arg.(value & opt_all string [] & info ["x"; "x-unit"] ?docs ~doc ~docv)
let packs ?docs ?(doc = "Use pack $(docv).") () =
Arg.(value & opt_all string [] & info ["p"; "pack"] ?docs ~doc ~docv:"PACK")
let x_packs ?docs ?(doc = "Exclude pack $(docv). Takes over inclusion.") () =
let docv = "PACK" in
Arg.(value & opt_all string [] & info ["X"; "x-pack"] ?docs ~doc ~docv)
end
---------------------------------------------------------------------------
Copyright ( c ) 2020 The b0 programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2020 The b0 programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
|
8d1569a6e5f16ae39313ea6894f092b943ab3a615e72915bed1d91fdfeb7c94e | Metaxal/quickscript-extra | regexp-replace.rkt | #lang racket/base
(require framework/gui-utils
racket/gui
racket/help
quickscript)
(script-help-string "Replace patterns in the selected text using regular expressions.")
(editor-set-x-selection-mode #t)
(define str-out #f)
(define str-in #f)
(define f (new dialog% [label "Regexp Replace"]
[min-width 500]))
(define hp-help (new horizontal-panel% [parent f]))
(define msg-help
(new message% [parent hp-help]
[label "Replace the selected text using an extended regular expression"]))
(define bt-help (new button% [parent hp-help] [label "Regexp Help"]
[callback (thunk* (help "Regular expressions N:Printing N:Reading"))]))
(define templates
'(("– Templates –" . #f)
; title from to protect-from protect-to
("Remove trailing spaces" "\\s*$" "" #f #f)
("Remove leading spaces" "^\\s*" "" #f #f)
("Comment out" "^" ";" #f #f)
("Uncomment" "^;" "" #f #f)
("Markdown quotes -> @racket[]" "`([^`]+)`" "@racket[\\1]" #f #f)
("Markdown item -> @item{}" "\\s*\\*\\s*(.*)\\s*" "@item{\\1}" #f #f)
))
(define ch-templates
(new choice% [parent f]
[label #f #;"Templates:"]
[choices (map car templates)]
[callback (λ (ch ev)
(define sel (send ch get-string-selection))
(define l (and sel (dict-ref templates sel)))
(when l
(send t1 set-value (first l))
(send t2 set-value (second l))
(send cb1 set-value (third l))
(send cb2 set-value (fourth l))))]))
(define hp1 (new horizontal-panel% [parent f]))
(define t1 (new text-field% [parent hp1] [label "Replace:"]))
(define cb1 (new check-box% [parent hp1] [label "Not regexp"]))
(define hp2 (new horizontal-panel% [parent f]))
(define t2 (new text-field% [parent hp2] [label "Replace:"]))
; Hack: Setting the label afterwards ensures both fields have the same size.
(send t2 set-label "With:")
(define cb2 (new check-box% [parent hp2] [label "Not regexp"]))
(define (ok-pressed b ev)
(send f show #f)
(define t1-re ((if (send cb1 get-value) regexp-quote pregexp)
(send t1 get-value)))
(define t2-re ((if (send cb2 get-value) regexp-replace-quote values)
(send t2 get-value)))
(define new-lines
; apply the regexes only per line
(for/list ([line (regexp-split #rx"\n" str-in)])
(regexp-replace* t1-re line t2-re)))
(set! str-out (string-join new-lines "\n"))
;(set! str-out (regexp-replace* t1-re str-in t2-re)) ; problems with that, e.g., with "\n"
)
(define (cancel-pressed b ev)
(send f show #f))
(define-values (bt-ok bt-cancel)
(gui-utils:ok/cancel-buttons f ok-pressed cancel-pressed))
;; Performs a (extended) regexp-replace* on the selection.
;; The "from" and "to" patterns are asked in a dialog box.
;; If protect? is checked, the "from" pattern is regexp-quoted.
(define-script regexp-replace-selection
#:label "Regex replace"
#:menu-path ("Sele&ction")
#:help-string "Replace patterns in the selection using regular expressions"
#:shortcut #\h
#:shortcut-prefix (ctl)
#:persistent
(λ (str)
(set! str-in str)
(set! str-out #f)
(send t1 focus)
(send (send t1 get-editor) select-all)
(send f show #t)
str-out))
( item - callback " See the manual in the Script / Help \s * menu for \nmore information . " )
; for protect , test with \s * and \1
;
(item-callback "See the manual in the Script/Help \s* menu for \nmore information.")
; for protect, test with \s* and \1
;|#
| null | https://raw.githubusercontent.com/Metaxal/quickscript-extra/526b2eccd9f73ea30bbc013741fdd77f4e2724cf/scripts/regexp-replace.rkt | racket | title from to protect-from protect-to
"Templates:"]
Hack: Setting the label afterwards ensures both fields have the same size.
apply the regexes only per line
(set! str-out (regexp-replace* t1-re str-in t2-re)) ; problems with that, e.g., with "\n"
Performs a (extended) regexp-replace* on the selection.
The "from" and "to" patterns are asked in a dialog box.
If protect? is checked, the "from" pattern is regexp-quoted.
for protect , test with \s * and \1
for protect, test with \s* and \1
|# | #lang racket/base
(require framework/gui-utils
racket/gui
racket/help
quickscript)
(script-help-string "Replace patterns in the selected text using regular expressions.")
(editor-set-x-selection-mode #t)
(define str-out #f)
(define str-in #f)
(define f (new dialog% [label "Regexp Replace"]
[min-width 500]))
(define hp-help (new horizontal-panel% [parent f]))
(define msg-help
(new message% [parent hp-help]
[label "Replace the selected text using an extended regular expression"]))
(define bt-help (new button% [parent hp-help] [label "Regexp Help"]
[callback (thunk* (help "Regular expressions N:Printing N:Reading"))]))
(define templates
'(("– Templates –" . #f)
("Remove trailing spaces" "\\s*$" "" #f #f)
("Remove leading spaces" "^\\s*" "" #f #f)
("Comment out" "^" ";" #f #f)
("Uncomment" "^;" "" #f #f)
("Markdown quotes -> @racket[]" "`([^`]+)`" "@racket[\\1]" #f #f)
("Markdown item -> @item{}" "\\s*\\*\\s*(.*)\\s*" "@item{\\1}" #f #f)
))
(define ch-templates
(new choice% [parent f]
[choices (map car templates)]
[callback (λ (ch ev)
(define sel (send ch get-string-selection))
(define l (and sel (dict-ref templates sel)))
(when l
(send t1 set-value (first l))
(send t2 set-value (second l))
(send cb1 set-value (third l))
(send cb2 set-value (fourth l))))]))
(define hp1 (new horizontal-panel% [parent f]))
(define t1 (new text-field% [parent hp1] [label "Replace:"]))
(define cb1 (new check-box% [parent hp1] [label "Not regexp"]))
(define hp2 (new horizontal-panel% [parent f]))
(define t2 (new text-field% [parent hp2] [label "Replace:"]))
(send t2 set-label "With:")
(define cb2 (new check-box% [parent hp2] [label "Not regexp"]))
(define (ok-pressed b ev)
(send f show #f)
(define t1-re ((if (send cb1 get-value) regexp-quote pregexp)
(send t1 get-value)))
(define t2-re ((if (send cb2 get-value) regexp-replace-quote values)
(send t2 get-value)))
(define new-lines
(for/list ([line (regexp-split #rx"\n" str-in)])
(regexp-replace* t1-re line t2-re)))
(set! str-out (string-join new-lines "\n"))
)
(define (cancel-pressed b ev)
(send f show #f))
(define-values (bt-ok bt-cancel)
(gui-utils:ok/cancel-buttons f ok-pressed cancel-pressed))
(define-script regexp-replace-selection
#:label "Regex replace"
#:menu-path ("Sele&ction")
#:help-string "Replace patterns in the selection using regular expressions"
#:shortcut #\h
#:shortcut-prefix (ctl)
#:persistent
(λ (str)
(set! str-in str)
(set! str-out #f)
(send t1 focus)
(send (send t1 get-editor) select-all)
(send f show #t)
str-out))
( item - callback " See the manual in the Script / Help \s * menu for \nmore information . " )
(item-callback "See the manual in the Script/Help \s* menu for \nmore information.")
|
c4ede86531fdd762b74813b63a12896ca1ef75e7135ab64bb6a69346b21a7be8 | softwarelanguageslab/maf | R5RS_various_infinite-2-1.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 0
; * negated predicates: 0
; * swapped branches: 0
; * calls to id fun: 0
(letrec ((t (lambda (x)
(t (+ x 1)))))
(<change>
()
(t 0))
(t 0)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_various_infinite-2-1.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0
* calls to id fun: 0 | * removed : 0
* added : 1
* swaps : 0
(letrec ((t (lambda (x)
(t (+ x 1)))))
(<change>
()
(t 0))
(t 0)) |
91f94e2fa7f6d2ea3f3a1d2bb6b203a9da1cf859ec210b44791ec8295025beb8 | pezipink/fairylog | expander.rkt |
Copyright , 2019
#lang racket/base
(require (for-syntax syntax/parse
racket/string
racket/base
racket/list
racket/syntax
racket/string
racket/function
syntax/srcloc
syntax/location
racket/list))
(require syntax/parse/define syntax/location)
(begin-for-syntax
; true when expanding inside an always block with a sensitivity list
(define is-always-sens #f)
(define (toggle-always-sens)
(set! is-always-sens (not is-always-sens)))
(define declared-enums (make-hash))
(define current-module "")
(define (set-current-module name)
; (printf "setting current module ~a\n" name)
(set! current-module name))
(define (enum-exists? ctx enum-name)
(let ([gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
#t
(hash-has-key? declared-enums enum-name))))
(define (enum-key-exists? ctx enum-name key)
( printf " enum key exists\n " )
(let ([enum-name
(if (symbol? enum-name)
(symbol->string enum-name)
enum-name)]
[key
(if (symbol? key)
(symbol->string key)
key)])
(let ([gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
(member key (map car (syntax-local-value gn)))
(member key (map car (hash-ref declared-enums enum-name)))))
))
(define (get-enum-keys ctx enum-name)
(map car (hash-ref declared-enums (symbol->string enum-name))))
(define (get-enum-value ctx enum-name key)
(let* ([enum-name
(if (symbol? enum-name)
(symbol->string enum-name)
enum-name)]
[key
(if (symbol? key)
(symbol->string key)
key)]
[gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
(let
([pair (memf (λ (p) (equal? (car p) key)) (syntax-local-value gn))])
(cdr (car pair)))
(let*
([pairs (hash-ref declared-enums enum-name)]
[pair (memf (λ (p) (equal? (car p) key)) pairs)])
(cdr (car pair))))))
(define (add-enum enum-name vals)
(printf "enum ~a\n" (symbol->string enum-name) )
(for ([kvp vals])
(printf "~a : ~x\n" (car kvp) (cdr kvp)))
(hash-set! declared-enums (symbol->string enum-name) vals))
(define-syntax-class enum
#:description "a declared enum"
#:opaque
(pattern x:id #:when (enum-exists? (attribute x) (symbol->string (syntax-e (attribute x))))))
(define-syntax-class enum-kvp
#:description "a name and numeric value pair"
#:opaque
(pattern [x:id y]
#:with y-evaled (eval (syntax-e (attribute y)))
#:with pair (cons
(format "~a" (syntax-e (attribute x)))
(syntax-e (attribute y-evaled)))))
(define-syntax-class enum-literal
#:description "enum literal in the form enum.value"
(pattern x:id
#:do
[(define split
(string-split
(symbol->string (syntax-e (attribute x)))
"."))]
#:when (eq? (length split) 2 )
#:cut
#:fail-unless (enum-exists? (attribute x) (car split))
(format "the enum ~a does not exist" (car split))
#:fail-unless (enum-key-exists? (attribute x) (car split) (car (cdr split)))
(format "the value ~a does not exist for enum ~a"
(car (cdr split))
(car split))
#:with value (datum->syntax this-syntax (get-enum-value (attribute x) (car split) (car (cdr split))))
#:with compiled
(datum->syntax this-syntax
(format "~a (~a)" (symbol->string (syntax-e (attribute x)))
(get-enum-value (attribute x) (car split) (car (cdr split)))))
#:with bits (datum->syntax this-syntax (string-length (format "~b" (get-enum-value (attribute x)(car split) (car (cdr split))))))
))
;important note: these mutable structs do not work "globally", they are for
;local expansion purposes only. the modules and ports are also exposed via
;static bindings for other files to see.
(struct port-meta (name direction type) #:transparent)
(struct func-meta (name size-int) #:transparent #:mutable)
(struct module-meta (name ports functions) #:transparent #:mutable)
(define module-metadata (make-hash))
(define (add-module name ports)
(if (hash-has-key? module-metadata name)
(error "module ~a already exists" name)
(hash-set! module-metadata name (module-meta name ports '()))))
(define (module-exists? name-stx)
;here we check for a static binding to this works across files.
(if (syntax-local-value name-stx (λ () #f))
#t
(hash-ref module-metadata (symbol->string (syntax-e name-stx)))))
(define (module-port-names name-stx)
(if (syntax-local-value name-stx (λ () #f))
(map (compose symbol->string port-meta-name)
(module-meta-ports (syntax-local-value name-stx)))
(map (compose symbol->string port-meta-name)
(module-meta-ports
(hash-ref module-metadata
(symbol->string (syntax-e name-stx)))))))
(define (module-has-port? name-stx port-name)
;uses static binding data
(if (syntax-local-value name-stx (λ () #f))
(memf (λ (port) (equal? (symbol->string (port-meta-name port)) port-name))
(module-meta-ports (syntax-local-value name-stx)))
(memf (λ (port) (equal? (symbol->string (port-meta-name port)) port-name))
(module-meta-ports (hash-ref module-metadata
(symbol->string (syntax-e name-stx)))))))
(define (module-has-function? module-name function-name)
;uses local data
(memf (λ (func) (equal? (func-meta-name func) function-name))
(module-meta-functions (hash-ref module-metadata module-name))))
(define (add-module-function module-name function-name size)
(let* ([mod (hash-ref module-metadata module-name)]
[fs (module-meta-functions mod)])
(set-module-meta-functions! mod (cons (func-meta function-name size) fs))))
(define-syntax-class module-param
#:description "a module initializer"
(pattern [port-name:id port-value:bound-usage]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'port-name)))
#:with value(datum->syntax this-syntax #'port-value.compiled))
(pattern [port-name:id port-value:expr]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'port-name)))
#:with value(datum->syntax this-syntax #'(expression port-value))))
(define scoped-bindings-stack (box (list (make-hash))))
(define (push-scoped-stack)
(let* ([lst (unbox scoped-bindings-stack)]
[new-lst (cons (make-hash) lst)])
(set-box! scoped-bindings-stack new-lst)))
(define (pop-scoped-stack)
(let* ([lst (unbox scoped-bindings-stack)]
[new-lst (cdr lst)])
(set-box! scoped-bindings-stack new-lst)))
(define (peek-scoped-stack)
(let ([lst (unbox scoped-bindings-stack)])
(car lst)))
(struct binding-meta ( stx-size stx-arity-list))
(define (add-scoped-binding stx-name binding-meta stx)
(let ([name (syntax-e stx-name)]
[scoped (peek-scoped-stack)])
(when (and (in-scope? name) (not (equal? name "global")))
(writeln
(format "warning: ~a is already in scope at ~a"
name (source-location->string stx))))
(hash-set! scoped name binding-meta)))
(define (remove-scoped-binding stx-name)
(let ([name (syntax-e stx-name)]
[scoped (peek-scoped-stack)])
(hash-remove! scoped name)))
(define (in-scope? name)
(define (aux lst)
(cond
[(empty? lst) #f]
[(hash-has-key? (car lst) name) #t]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack)))
(define (get-binding-size name)
(let ([name2 (if (syntax? name) (symbol->string (syntax-e name)) name)])
(define (aux lst)
(cond
[(empty? lst)
(begin
'none)]
[(hash-has-key? (car lst) name2)
(begin
(binding-meta-stx-size (hash-ref (car lst) name2)))]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack))))
(define (get-binding-arities name)
(let ([name2 (if (syntax? name) (symbol->string (syntax-e name)) name)])
(define (aux lst)
(cond
[(empty? lst)
(begin
'none)]
[(hash-has-key? (car lst) name2)
(begin
(binding-meta-stx-arity-list (hash-ref (car lst) name2)))]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack))))
(define-syntax-class scoped-binding
#:description "identifier in scope"
#:commit
(pattern x:id
#:with name (symbol->string (syntax-e #'x))
#:with name-stx (datum->syntax this-syntax (symbol->string (syntax-e #'x)))
#:fail-unless (in-scope? (symbol->string (syntax-e #'x))) "identifier is not in scope."
#:with size-int (get-binding-size (symbol->string (syntax-e #'x)))
#:with arities (get-binding-arities (symbol->string (syntax-e #'x)))
#:with is-array?
(let* ([a (get-binding-arities (symbol->string (syntax-e #'x)))]
[b (if (syntax? a)(list?(syntax-e a)) #f)] )
(and (syntax? a) (list? (syntax-e a)))
)))
(define-syntax-class binding
#:description "identifier name"
(pattern x:id
#:with name (symbol->string (syntax-e #'x))))
(define-syntax-class scoped-function
(pattern x:id
#:with name (symbol->string (syntax-e #'x))
#:with name-stx (datum->syntax this-syntax (symbol->string (syntax-e #'x)))
#:when (module-has-function? current-module (symbol->string (syntax-e #'x)))
)
)
(define-syntax-class inner-usage
(pattern x:scoped-binding
#:with name #'x.name
#:with size-int #'x.size-int
#:with compiled
#'x.name-stx)
(pattern x:expr
#:with size-int #'(expression x)
#:with compiled #'(expression x)))
(define-syntax-class bound-usage
#:description "identifier in scope with or without size, or array access"
#:commit
;arrays:
;when accessing an array, verilog says you must use all the dimensions.
;following that, you can further index into the bits using the normal
;range syntax.
;to start with no range checking of arrays. but we must still know
;the length of the array to know if they have supplied a range at the
end or not ( up to two expressions )
(pattern [s:scoped-binding
x:inner-usage ...+]
#:with x-count (length (syntax->list #'(x ...)))
#:with name #'s.name
#:with oob #'#f ;todo; out of bounds checks
#:with compiled
;todo: report these errors properly, not using exceptions!!
;todo: range checking on arities.
(if (syntax-e #'s.is-array?)
(cond
[(< (syntax-e #'x-count) (length (syntax-e #'s.arities)))
(error "you must specify all the array's dimensions" #'s)]
[(= (syntax-e #'x-count) (length (syntax-e #'s.arities)))
#'`(name ("[" ,x.compiled "]") ...)]
[else
(let-values
([(left right)
(split-at
(syntax->list #'(x ...))
(length (syntax-e #'s.arities)))])
(syntax-parse (list left right)
[((z:inner-usage ...) (ya:inner-usage yb:inner-usage))
#'`(name ("[" z.compiled "]") ...
"[" ya.compiled " : " yb.compiled "]"
)]
[((z:inner-usage ...) (ya:inner-usage))
#'`(name ("[" z.compiled "]") ...
"[" ya.compiled "]"
)]
[((z:inner-usage ...) ())
#'`(name ("[" z.compiled "]") ...)]))])
(cond
[(> (syntax-e #'x-count) 2) (error "not an array\n" #'s)]
[(= (syntax-e #'x-count) 2)
(syntax-parse #'(x ...)
[(x:inner-usage y:inner-usage)
#'`(name "[" ,x.compiled " : " ,y.compiled "]")])]
[else
#'`(name ("[" ,x.compiled "]") ...)]))
#:with name-stx #'compiled
#:with size-int
;since it is not possible to compile an array expression without
;all the indexes, we need only return the atual data size
;OR whatever the range equates to. for non-arrays, the size will
;be either one for a signle bit select or the size of the range.
(if (syntax-e #'s.is-array?)
(let-values
([(left right)
(split-at (syntax->list #'(x ...))
(length (syntax-e #'s.arities)))])
(syntax-parse (list left right)
[((z:inner-usage ...) (msb:inner-usage lsb:inner-usage))
#'(+ (- msb.size-int lsb.size-int) 1)]
[((z:inner-usage ...) (ya:inner-usage))
;single bit
#'1]
[((z:inner-usage ...) ())
;indexed - return size of array data
#'s.size-int]))
(syntax-parse #'(x ...)
[(msb:inner-usage lsb:inner-usage)
; (printf "here size is ~a ~a \n" #'msb.size-int #'lsb.size-int
;)
#'(+ (- msb.size-int lsb.size-int) 1)]
[(x:inner-usage)
#'1])
))
(pattern s:scoped-binding
#:with name #'s.name
#:with size (datum->syntax this-syntax "")
#:with size-int #'s.size-int
#:with oob #'#f
#:with compiled (datum->syntax this-syntax (symbol->string (syntax-e (attribute s))))
#:with name-stx #'compiled) ;used in error reporting
))
(define-syntax (push-binding stx)
(syntax-parse stx
[(_ id size)
(add-scoped-binding #'id (binding-meta #'size #'#f) stx)
#'(void)]
[(_ id size arity-list)
(add-scoped-binding #'id (binding-meta #'size #'arity-list) stx)
#'(void)]))
(define-syntax (pop-scoped-stack stx)
(syntax-parse stx
[(_)
(pop-scoped-stack)
#'(void)]))
(define-syntax (toggle-always-sens stx)
(syntax-parse stx
[(_)
(toggle-always-sens)
#'(void)]))
(begin-for-syntax
(define (syntax->error-syntax stx)
(datum->syntax stx
(format "~a:~a:~a"
(syntax-source stx)
(syntax-line stx)
(syntax-column stx))))
(define (is-hex-literal? str)
(regexp-match #px"^[$][0-9A-Fa-f_ZzXx]+$" str))
(define (is-binary-literal? str)
(regexp-match #px"^[%][01_ZzXx]+$" str))
(define (is-hex-string? str)
(regexp-match #px"^[0-9A-Fa-f_ZzXx]+$" str))
(define (is-binary-string? str)
(regexp-match #px"^[$][01_ZzXx]+$" str))
(define (is-number-literal-candidate? str)
;todo: need better literal checking
; eg check literal with base is not greater than size
; check literals characters properly - binary only 01xz_ etc
(let ([parsed
(regexp-match #px"^([0-9]+)_(2|8|10|16)_(-)?([0-9A-Fa-f_ZzXx]+$)" str)])
(if (eq? parsed #f)
#f
(cdr parsed)))) ; outputs size base negative? value
(define (string-replace-many str from to)
(for/fold ([str str])
([f from])
(string-replace str f to)))
(define-syntax-class number-literal
#:datum-literals (_)
(pattern x:integer
#:with base 10
#:with bits
(datum->syntax this-syntax
(string-length (format "~b" (syntax-e (attribute x))))) ;easy way out!
#:with compiled
(datum->syntax this-syntax
(format "~a" (syntax-e (attribute x)))))
;hex literals
(pattern x:id
#:do [(define str (symbol->string (syntax-e (attribute x))))]
#:when (is-hex-literal? str)
#:do [(define cleaned (string-replace
(string-replace str "_" "") "$" ""))]
#:with base 16
; for hex, leading zeroes are counted towards the length
#:with bits (datum->syntax this-syntax (* 4 (string-length cleaned)))
#:with compiled
(datum->syntax this-syntax
(format "~a'h~a"
(syntax-e (attribute bits))
(substring str 1))))
;binary literals
(pattern x:id
#:do [(define str (symbol->string (syntax-e (attribute x))))]
#:when (is-binary-literal? str)
#:do [(define cleaned (string-replace
(string-replace str "_" "") "%" ""))]
#:with base 2
; for binary, leading zeroes are counted towards the length
#:with bits (datum->syntax this-syntax (string-length cleaned))
#:with compiled
(datum->syntax this-syntax
(format "~a'b~a"
(syntax-e (attribute bits))
(substring str 1) )))
;full literal syntax
(pattern x:id
#:do [(define str
(is-number-literal-candidate?
(symbol->string (syntax-e (attribute x)))))]
#:when (list? str)
#:do [(define radix (string->number (list-ref str 1)))
(define radix-str
(case (string->number (list-ref str 1))
[(2) "'b"]
[(8) "'o"]
[(10) "'d"]
[(16) "'h"]))
(define size (string->number (list-ref str 0)))
(define literal (list-ref str 3))]
#:with base radix-str
#:with bits size
#:do [(let* ([n (string-replace-many literal '["X" "x" "Z" "z"]"0")]
[l
;for all but decimal we count the leading zeroes as well
;todo: this needs work, probably want tot just parse and count binary instead?
(case radix
[(2) (string-length n)]
[(8) (* (string-length n) 3)]
[(16) (string-length (format "~b" (string->number n 16))
)]
[(10) (string-length (format "~b" (string->number n 10))
)])])
(when (> l size)
(printf "warning: number literal ~a does not fit into the specified size at ~a\\n"
(symbol->string (syntax-e (attribute x))) #'x)))]
#:with compiled
(datum->syntax this-syntax
(format "~a~a~a~a"
(case (list-ref str 2)
[(#f) ""]
[else "-"])
size radix-str literal))))
(define-syntax-class edge-type
(pattern #:posedge)
(pattern #:negedge))
(define-syntax-class sensitivity
#:no-delimit-cut
(pattern [edge:edge-type ~! signal:bound-usage]
#:with edge-type (datum->syntax this-syntax (keyword->string (syntax-e #'edge)))
#:with compiled #'signal.compiled)
(pattern [signal:bound-usage]
#:with edge-type (datum->syntax this-syntax "")
#:with compiled #'signal.compiled)
)
(define-syntax-class direction-option
(pattern #:input)
(pattern #:output)
(pattern #:inout))
(define-syntax-class type-option
(pattern #:wire)
(pattern #:wand)
(pattern #:wor)
(pattern #:tri)
(pattern #:reg)
(pattern #:integer)
(pattern #:time)
(pattern #:real))
(define-syntax-class function-param
#:description "a function parameter"
(pattern [name-sym:id
(~optional [x (~optional y)])]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with direction (datum->syntax this-syntax "input")
#:with type (datum->syntax this-syntax "wire")
#:with arity-list #'#f
#:with default #'""
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])))
(define-syntax-class param
#:description "a module parameter"
(pattern [name-sym:id
direction-opt:direction-option
type-opt:type-option
(~optional [x (~optional y)])
(~optional default-value)]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with direction (datum->syntax this-syntax (keyword->string (syntax-e #'direction-opt)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
#:with default
(if (attribute default-value)
#'`(" = " ,(expression default-value))
#'"")
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])))
(define-syntax-class local-param
#:datum-literals (array)
(pattern [name-sym:id
type-opt:type-option
[x (~optional y)]
(~optional (array x2:expr ...+))]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
#:with default ;arrays dont have defaults, instead the
;additional array syntax appears here.
(cond
[(and (attribute x2))
#'`(
(
"[0:" ,(- x2 1) "]"
) ...
)]
[else #'""])
#:with arity-list
(if (attribute x2)
(syntax->list #'(x2 ...))
#'#f)
; actual data size, not array dimensions.
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ": 0" "]")]
[else #'""]))
(pattern [name-sym:id
type-opt:type-option
(~optional [x (~optional y)])
(~optional
default-value:expr)]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
#:with default
(if (attribute default-value)
#'`(" = " ,(expression default-value))
#'"")
#:with arity-list #'#f
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ": 0" "]")]
[else #'""]))))
(define-syntax-parser expression
#:datum-literals
(set ~delay if case else when concat
\|\| \| \~\| ! ~ + - * / % << >> >>> == != >= <= < > && & ~& ^ ~^ )
[(_ x:integer)
#'x]
[(_ x:number-literal )
#'x.compiled]
[(_ x:bound-usage)
#:with err-prefix (syntax->error-syntax #'x)
#'`(
,(when x.oob
(printf "~a: warning - the expression '~a' is out of range\n" err-prefix x.compiled))
,x.compiled)]
[(_ x:enum-literal)
#'x.value]
[(_ (f:scoped-function ~! params ... last-param))
#'`(
,f.name-stx "("
( ,(expression params ) ",") ...
,(expression last-param)
")")]
[(_ (~delay ~! x y))
#'`("#" ,(expression x) " " ,(expression y))]
[(_ (when ~! test true-expr))
special case one - line when in RHS of expression - ternary
#'(~begin (when test true-expr))]
[(_ (concat ~! x y ...+))
#'`("{" ,(expression x) ( ", ",(expression y)) ... "}" )]
[(_ (if ~!
(~describe "condional test for if" test)
(~describe "true expression for if" true-expr)
(~describe "false expression for if" false-expr)))
#'`("("
,(expression test)
" ? "
,(expression true-expr)
" : "
,(expression false-expr)
")")]
[(_ (case val
[test true-expr]
[test2 expr2] ...+
[else def-expr]))
#'`(
"("
,(expression (== val test))
" ? "
,(expression true-expr)
" : "
,(expression (case val [test2 expr2] ... [else def-expr]))
")")]
[(_ (case val [test true-expr]
[else def-expr]))
#'`(
"("
,(expression (== val test))
" ? "
,(expression true-expr)
" : "
,(expression def-expr)
")")]
[(_ (case ~! val [test true-expr] ...+))
#:fail-when #t "you must supply an else branch of a case when used as an epxression"
#'(void)]
; unary
[(_ ( (~and op (~or + - ! & ~& ~ \| \~\| ^ ~^)) x))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(,op-str ,(expression x))]
; binary
[(_ ( (~and op (~or + - * / % << >> >>> == != < > <= >= && & \|\| \| ^ ~^)) x y ))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(
"("
,(expression x)
" "
,op-str
" "
,(expression y)
")")]
[(_ ( (~and op (~or + - * / % << >> >>> == != <= >= && & \|\| \| ^ ~^)) x y z ... ))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(
"("
,(expression x)
" "
,op-str
" ("
,(expression (op y z ...))
")) " )]
;setters and bounds / truncation checking
[(_ (set (~or x:scoped-binding x:bound-usage) y:number-literal))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.bits x.size-int)
(printf "\"warning: the literal '~a' does not fit into '~a' and will be truncated\"\n" y.compiled x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) y:enum-literal))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.bits x.size-int)
(printf "\"warning: the enum literal '~a' does not fit into '~a' and will be truncated\"\n" y.compiled x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) (~or y:scoped-binding y:bound-usage)))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.size-int x.size-int)
(printf "\"warning: the expression '~a' does not fit into '~a' and will be truncated\"\n" y.name-stx x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) y:expr))
#:with op (if is-always-sens #'" <= " #'" = ")
#:with name (datum->syntax this-syntax (format "~a" #'y))
#'`(
,(when (and (number? (expression y))(> (string-length (format "~b" (expression y))) x.size-int))
(printf "\"warning: the expression '~a' does not fit into '~a' and will be truncated\"\n" name x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set x y))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(expression x)
op
,(expression y))]
[(_ x:expr)
#'x]
)
(define-syntax-parser ~case
#:datum-literals (else)
[(_ test:bound-usage [lhs:number-literal rhs (~optional comment:string #:defaults ([comment #'""]))] ...)
#'`(
tab
"case ("
,test.compiled
")\n"
inc-tab
(
tab
,lhs.compiled
" : // "
comment
"\n"
,(~begin rhs)
"\n"
) ...
dec-tab
tab
"endcase\n")]
[(_ test:bound-usage [lhs:number-literal rhs (~optional comment:string #:defaults ([comment #'""]))] ...
[else else-expr:expr])
#'`(
tab
"case ("
,test.compiled
")\n"
inc-tab
(
tab
,lhs.compiled
" : // "
comment
"\n"
,(~begin rhs)
"\n"
) ...
tab
"default : \n"
,(~begin else-expr)
"\n"
dec-tab
tab
"endcase\n")]
)
(define-syntax-parser ~cond
#:datum-literals (else)
[(_ [first-test first-outcome] [expr-test expr-outcome] ...
[else else-outcome])
#'`(
,(~cond
[first-test first-outcome]
[expr-test expr-outcome] ...)
tab
"else\n"
inc-tab
,(~begin else-outcome)
"\n"
dec-tab
)]
[(_ [first-test first-outcome])
#'`(
tab
"if("
,(expression first-test)
")\n"
inc-tab
,(~begin first-outcome)
dec-tab
"\n"
)]
[(_ [first-test first-outcome] [expr-test expr-outcome] ...)
#'`(
tab
"if("
,(expression first-test)
")\n"
inc-tab
,(~begin first-outcome)
"\n"
dec-tab
(tab
"else if("
,(expression expr-test)
")\n"
inc-tab
,(~begin expr-outcome)
"\n"
dec-tab
"\n") ...
)])
(define-syntax-parser ~if
[(_ (~describe "condional test for if" test-expr)
(~describe "true expression for if" true-expr)
(~describe "false expression for if" false-expr))
#'(~cond
[test-expr true-expr]
[else false-expr])])
(define-syntax-parser ~when
[(_ test-expr true-expr)
#'(~cond
[test-expr true-expr])])
(define-syntax-parser list->enum
[(_ name vals)
;todo: add global support here
(add-enum (syntax-e #'name) (eval #'vals))
#'(void)])
(define-syntax-parser enum
[(_ name kvp:enum-kvp ...+)
#:fail-when (check-duplicate-identifier
(syntax->list #'(kvp.x ...)))
"duplicate enum name"
#:fail-when (check-duplicates
(syntax->datum #'(kvp.y-evaled ...)))
"duplicate enum value"
(if (syntax-property this-syntax 'module)
(begin
;a local enum only need exist for this module during this expansion
(add-enum (syntax-e #'name) (syntax->datum #'(kvp.pair ...)))
#'(void))
;otherwise we create a static binding for the enum data
;prefixing the name with global-enum
(with-syntax ([g-name (datum->syntax this-syntax (string->symbol
(string-append "global-enum-"
(symbol->string
(syntax-e #'name)))))])
(printf "ADDING ENUM ~a\n" #'g-name)
#'(define-syntax g-name
'(kvp.pair ...)
)))]
[(_ name keys:id ...+)
#:fail-when (check-duplicate-identifier
(syntax->list #'(keys ...)))
"duplicate enum name"
(with-syntax
([(kvps ...)
(for/list
([n (in-naturals)]
[x (syntax->list #'(keys ...))])
(cons (format "~a" (syntax-e x)) n))])
(if (syntax-property this-syntax 'module)
(begin
(add-enum (syntax-e #'name)(syntax->datum #'(kvps ...)))
#'(void))
(with-syntax ([g-name (datum->syntax this-syntax (string->symbol
(string-append "global-enum-"
(symbol->string
(syntax-e #'name)))))])
#'(define-syntax g-name
'(kvps ...)
))
)
)])
(define-syntax-parser ~match-set
[(_ target:bound-usage test:expr enum-name:enum
[key value] ...)
#:fail-when (check-duplicate-identifier (syntax->list #'(key ...)))
"duplicate enum value"
#:fail-when
(let ([results (filter (λ (v) (not (enum-key-exists? #'enum-name (syntax-e #'enum-name) v)))
(syntax->datum #'(key ...)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"some identifiers do not exist in enum"
#:fail-when
(let*
([keys (map (λ (v) (format "~a" v)) (syntax->datum #'(key ...)))]
[results (filter (λ (v) (not (member v keys)))
(get-enum-keys #'enum-name (syntax-e #'enum-name)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"missing cases in the enum"
(with-syntax([(enum-vals ...) (map (λ (v) (get-enum-value #'enum-name (syntax-e #'enum-name) v))
(syntax->datum #'(key ...)))])
#'(~case test [enum-vals (set target value)] ...))]
)
(define-syntax-parser ~match
[(_ test:expr enum-name:enum
[key expr] ...)
#:fail-when (check-duplicate-identifier (syntax->list #'(key ...)))
"duplicate enum value"
#:fail-when
(let ([results (filter (λ (v) (not (enum-key-exists? #'enum-name (syntax-e #'enum-name) v)))
(syntax->datum #'(key ...)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"some identifiers do not exist in enum"
#:fail-when
(let*
([keys (map (λ (v) (format "~a" v)) (syntax->datum #'(key ...)))]
[results (filter (λ (v) (not (member v keys)))
(get-enum-keys #'enum-name (syntax-e #'enum-name)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"missing cases in the enum"
(with-syntax
([(enum-vals ...) (map (λ (v) (get-enum-value #'enum-name (syntax-e #'enum-name) v))
(syntax->datum #'(key ...)))]
[(key-str ...) (map (λ (v) (symbol->string v))
(syntax->datum #'(key ...)))] )
#'(~case test [enum-vals expr key-str] ...))]
)
(define-syntax-parser ~case-set
[(_ target:bound-usage test:expr
[key:number-literal value] ...)
#'(~case test [key (set target value)] ...)])
(define-syntax-parser ~begin-line
#:datum-literals (~cond locals expression ~when if set ~match-set ~match ~case-set)
[(_ (expression expr ...))
#'`(tab
,(expression expr ...)
";\n")]
[(_ (set [x:bound-usage y] ...))
#'`(
(tab
,(expression (set x y))
";\n")...)]
[(_ (set x:bound-usage y))
#'`(
(tab
,(expression (set x y))
";\n"))]
[(_ x:expr)
#'x] )
(define-syntax-parser inc
[( _ x:scoped-binding)
#'`(tab
,(expression (set x (+ x 1)))
";\n"
)])
(define-syntax-parser ~begin
[(_ block-name:id expr ...+)
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'block-name)))
#'`(
tab
"begin "
,name
"\n"
inc-tab
,(~begin-line expr) ...
dec-tab
tab
"end \n"
)]
[(_ expr ...)
#'`(
tab
"begin\n"
inc-tab
,(~begin-line expr) ...
dec-tab
tab
"end \n"
)])
(define-syntax-parser locals
[(_ params:local-param ...)
#'`(
(
tab
,(push-binding params.name params.size-int params.arity-list) ...
(
,params.type
" "
,params.size
" "
,params.name
" "
,params.default
";\n") ...))])
(define-syntax-parser assign
[(_ [x:bound-usage y:expr] ...)
#'`(
("assign "
,x.compiled
" = "
,(expression y)
";\n") ... )]
[(_ x:bound-usage y:expr)
#'`("assign "
,x.compiled
" = "
,(expression y)
";\n")])
(define-syntax-parser always-line
[(_ expr)
#'expr])
(define-syntax-parser always
#:datum-literals (* or)
[(_ (or sens:sensitivity rest:sensitivity ...) expr ...)
(printf "always\n")
(toggle-always-sens)
#'`(
tab
"always @("
,sens.edge-type
" "
,sens.compiled
(
" or "
,rest.edge-type
" "
,rest.compiled
) ...
")\n"
inc-tab
,(always-line expr) ...
dec-tab
,(toggle-always-sens))]
[(_ (sens:sensitivity rest:sensitivity ...) expr ...)
(toggle-always-sens)
#'`(
tab
"always @("
,sens.edge-type
" "
,sens.compiled
(
" , "
,rest.edge-type
" "
,rest.compiled) ...
")\n"
inc-tab
,(always-line expr) ...
dec-tab
,(toggle-always-sens)
)]
[(_ * expr ...)
#'`(
tab
"always @(*)\n"
inc-tab
,(always-line expr) ...
dec-tab
)]
[(_ expr ...)
#'`(
tab
"always\n"
inc-tab
,(always-line expr) ...
dec-tab
)]
)
(define-syntax-parser ~module-line
#:datum-literals (set vmod)
;; [(_ mod-id (set [x:bound-usage y] ...))
;; (syntax-property
;; #'`((tab
;; ,(expression (set x y))
;; "a;\n") ...)
;; 'module
;; #'mod-id)
;; ]
;; [(_ mod-id (set x:bound-usage y))
;; (syntax-property
;; #'`(tab
;; ,(expression (set x y))
;; "b;\n")
;; 'module
;; #'mod-id)
;; ]
[(_ mod-id (vmod m:id ~! p:module-param ... l:module-param ~!))
#:fail-unless (module-exists? #'m)
(format "the module '~a' doesn't exist" #'m (symbol->string (syntax-e #'m)))
#:fail-unless
(andmap (λ (name) (module-has-port? #'m name))
(syntax->datum #'(p.name ... l.name)))
(format "instantiation of module ~a contains invalid port names: ~a"
#'m
(filter (λ (name) (not (module-has-port? #'m name)))
(syntax->datum #'(p.name ... l.name))))
#:fail-unless
(andmap (λ (name) (member name (syntax->datum #'(p.name ... l.name))))
(module-port-names #'m))
(format "instantiation of module ~a is missing the following ports: ~a"
#'m
(filter
(λ (name)
(not (member name (syntax->datum #'(p.name ... l.name)))))
(module-port-names #'m)))
(with-syntax([m-name (symbol->string (syntax-e #'m))]
[i-name (symbol->string (syntax-e #'x))])
(syntax-property
#'`(
,m-name
" (\n"
inc-tab
(
"." ,p.name "(" ,(expression p.value) "),\n"
) ...
"." ,l.name "(" ,(expression l.value) ")\n"
dec-tab
");\n"
)
'module
#'mod-id))
]
[(_ mod-id x)
(syntax-property #'x 'module #'mod-id)])
(define-syntax-parser function
[(_ (~optional [x (~optional y)])
name-sym:id
; output size
(p:function-param ...)
expression ...)
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])
(push-scoped-stack)
(add-module-function current-module (symbol->string (syntax-e #'name-sym))
(syntax-e #'size-int))
#'`(
"function " ,size " " ,name ";\n"
inc-tab
tab
;push the name and size of the function as it is used
to set the return value . sticking to Verilog style for now .
,(push-binding name size-int #f)
,(push-binding p.name p.size-int p.arity-list) ...
(tab
,p.direction
" "
,p.size
" "
,p.name
";\n") ...
,(~begin
expression ...)
dec-tab
,(pop-scoped-stack)
"endfunction\n")])
(define out-ports (make-hash))
(define (ensure-port-open filename)
;todo: if already in hash, open for append
(if (hash-has-key? out-ports filename)
(let ([p (hash-ref out-ports filename)])
(when (port-closed? p)
(hash-set! out-ports filename
(open-output-file #:mode 'binary #:exists 'append filename))))
(hash-set! out-ports filename
(open-output-file #:mode 'binary #:exists 'replace filename))))
(define (get-port filename)
(ensure-port-open filename)
(hash-ref out-ports filename))
(define (ensure-ports-closed)
(for ([p (hash-values out-ports)])
(close-output-port p)))
(define-syntax-parser #%module-begin
[(_ exprs ...)
#'(#%plain-module-begin
exprs ...
;todo: we need a nicer way of dealing with knowing when files are done with
(ensure-ports-closed)
)])
(define-syntax-parser test?
[(_ name)
(syntax-local-value #'name)
#'(void)])
(define-syntax-parser vmod
[(_ name-sym:id
(p:param ... last:param) ;inputs
expression ... )
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
(push-scoped-stack)
(set-current-module (symbol->string (syntax-e #'name-sym)))
(add-module (syntax-e #'name)
(map (λ (lst) ;todo: we don't need this anymore, really
(port-meta
(list-ref lst 0)
(list-ref lst 1)
(list-ref lst 2)
))
(syntax->datum #'(p ... last))))
(let*
([fn (string-replace
(path->string (syntax-source-file-name #'name-sym)) ".rkt" ".v")])
(with-syntax
([nf (datum->syntax this-syntax
(build-path (syntax-source-directory this-syntax) fn))])
(syntax-property
#'(begin
(ensure-port-open nf)
(define-syntax name-sym
(module-meta name
(map (λ (lst)
(port-meta
(list-ref lst 0)
(list-ref lst 1)
(list-ref lst 2)))
(syntax->datum #'(p ... last))) '()))
(provide name-sym)
(code-gen nf
`(
,(format "module ~a (\n" name)
inc-tab
;port declarations
(tab
,p.direction
" "
,p.type
" "
,p.size
" "
,p.name
" "
,p.default
",\n") ...
tab
,last.direction
" "
,last.type
" "
,last.size
" "
,last.name
" "
,last.default
");"
,(push-binding p.name p.size-int) ...
,(push-binding last.name last.size-int)
"\n"
dec-tab
,(~module-line name-sym expression) ...
"endmodule\n"
,(pop-scoped-stack)
)))
'module
#'name-sym
)))])
(define-syntax-parser always-pos
[(_ clock exprs ...)
#'(always ([#:posedge clock]) (~begin exprs ...))])
(define-syntax-parser always-neg
[(_ clock exprs ...)
#'(always ([#:negedge clock]) (~begin exprs ...))])
(define-syntax-parser initial-begin
[(_ exprs ...) #'`("initial " ,(~begin exprs ...))])
(define (code-gen fn input)
(define tab 0)
(define (aux in)
(for ([sym in])
(cond
[(or (string? sym) (integer? sym))
(begin
(display sym (get-port fn)))]
[(eq? 'inc-tab sym) (set! tab (+ 1 tab))]
[(eq? 'tab sym) (display (make-string (* 2 tab) #\ ) (get-port fn))]
[(eq? 'dec-tab sym) (set! tab (- tab 1))]
[(eq? '() sym) '()]
[(list? sym) (aux sym)]
[(void? sym) '()]
[else (printf "unknown ~a\n" sym) ])))
(aux input)
)
(provide
(all-defined-out)
(for-syntax (all-defined-out))
(except-out (all-from-out syntax/parse/define)
define-syntax-parser)
(rename-out
[define-syntax-parser macro]))
| null | https://raw.githubusercontent.com/pezipink/fairylog/f0c1d0d82e2ed9ff02486ddd91a0ede5c5483ef7/expander.rkt | racket | true when expanding inside an always block with a sensitivity list
(printf "setting current module ~a\n" name)
important note: these mutable structs do not work "globally", they are for
local expansion purposes only. the modules and ports are also exposed via
static bindings for other files to see.
here we check for a static binding to this works across files.
uses static binding data
uses local data
arrays:
when accessing an array, verilog says you must use all the dimensions.
following that, you can further index into the bits using the normal
range syntax.
to start with no range checking of arrays. but we must still know
the length of the array to know if they have supplied a range at the
todo; out of bounds checks
todo: report these errors properly, not using exceptions!!
todo: range checking on arities.
since it is not possible to compile an array expression without
all the indexes, we need only return the atual data size
OR whatever the range equates to. for non-arrays, the size will
be either one for a signle bit select or the size of the range.
single bit
indexed - return size of array data
(printf "here size is ~a ~a \n" #'msb.size-int #'lsb.size-int
)
used in error reporting
todo: need better literal checking
eg check literal with base is not greater than size
check literals characters properly - binary only 01xz_ etc
outputs size base negative? value
easy way out!
hex literals
for hex, leading zeroes are counted towards the length
binary literals
for binary, leading zeroes are counted towards the length
full literal syntax
for all but decimal we count the leading zeroes as well
todo: this needs work, probably want tot just parse and count binary instead?
arrays dont have defaults, instead the
additional array syntax appears here.
actual data size, not array dimensions.
unary
binary
setters and bounds / truncation checking
todo: add global support here
a local enum only need exist for this module during this expansion
otherwise we create a static binding for the enum data
prefixing the name with global-enum
[(_ mod-id (set [x:bound-usage y] ...))
(syntax-property
#'`((tab
,(expression (set x y))
"a;\n") ...)
'module
#'mod-id)
]
[(_ mod-id (set x:bound-usage y))
(syntax-property
#'`(tab
,(expression (set x y))
"b;\n")
'module
#'mod-id)
]
output size
push the name and size of the function as it is used
todo: if already in hash, open for append
todo: we need a nicer way of dealing with knowing when files are done with
inputs
todo: we don't need this anymore, really
port declarations |
Copyright , 2019
#lang racket/base
(require (for-syntax syntax/parse
racket/string
racket/base
racket/list
racket/syntax
racket/string
racket/function
syntax/srcloc
syntax/location
racket/list))
(require syntax/parse/define syntax/location)
(begin-for-syntax
(define is-always-sens #f)
(define (toggle-always-sens)
(set! is-always-sens (not is-always-sens)))
(define declared-enums (make-hash))
(define current-module "")
(define (set-current-module name)
(set! current-module name))
(define (enum-exists? ctx enum-name)
(let ([gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
#t
(hash-has-key? declared-enums enum-name))))
(define (enum-key-exists? ctx enum-name key)
( printf " enum key exists\n " )
(let ([enum-name
(if (symbol? enum-name)
(symbol->string enum-name)
enum-name)]
[key
(if (symbol? key)
(symbol->string key)
key)])
(let ([gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
(member key (map car (syntax-local-value gn)))
(member key (map car (hash-ref declared-enums enum-name)))))
))
(define (get-enum-keys ctx enum-name)
(map car (hash-ref declared-enums (symbol->string enum-name))))
(define (get-enum-value ctx enum-name key)
(let* ([enum-name
(if (symbol? enum-name)
(symbol->string enum-name)
enum-name)]
[key
(if (symbol? key)
(symbol->string key)
key)]
[gn (datum->syntax ctx (string->symbol (string-append "global-enum-" enum-name)))])
(if (syntax-local-value gn (λ () #f))
(let
([pair (memf (λ (p) (equal? (car p) key)) (syntax-local-value gn))])
(cdr (car pair)))
(let*
([pairs (hash-ref declared-enums enum-name)]
[pair (memf (λ (p) (equal? (car p) key)) pairs)])
(cdr (car pair))))))
(define (add-enum enum-name vals)
(printf "enum ~a\n" (symbol->string enum-name) )
(for ([kvp vals])
(printf "~a : ~x\n" (car kvp) (cdr kvp)))
(hash-set! declared-enums (symbol->string enum-name) vals))
(define-syntax-class enum
#:description "a declared enum"
#:opaque
(pattern x:id #:when (enum-exists? (attribute x) (symbol->string (syntax-e (attribute x))))))
(define-syntax-class enum-kvp
#:description "a name and numeric value pair"
#:opaque
(pattern [x:id y]
#:with y-evaled (eval (syntax-e (attribute y)))
#:with pair (cons
(format "~a" (syntax-e (attribute x)))
(syntax-e (attribute y-evaled)))))
(define-syntax-class enum-literal
#:description "enum literal in the form enum.value"
(pattern x:id
#:do
[(define split
(string-split
(symbol->string (syntax-e (attribute x)))
"."))]
#:when (eq? (length split) 2 )
#:cut
#:fail-unless (enum-exists? (attribute x) (car split))
(format "the enum ~a does not exist" (car split))
#:fail-unless (enum-key-exists? (attribute x) (car split) (car (cdr split)))
(format "the value ~a does not exist for enum ~a"
(car (cdr split))
(car split))
#:with value (datum->syntax this-syntax (get-enum-value (attribute x) (car split) (car (cdr split))))
#:with compiled
(datum->syntax this-syntax
(format "~a (~a)" (symbol->string (syntax-e (attribute x)))
(get-enum-value (attribute x) (car split) (car (cdr split)))))
#:with bits (datum->syntax this-syntax (string-length (format "~b" (get-enum-value (attribute x)(car split) (car (cdr split))))))
))
(struct port-meta (name direction type) #:transparent)
(struct func-meta (name size-int) #:transparent #:mutable)
(struct module-meta (name ports functions) #:transparent #:mutable)
(define module-metadata (make-hash))
(define (add-module name ports)
(if (hash-has-key? module-metadata name)
(error "module ~a already exists" name)
(hash-set! module-metadata name (module-meta name ports '()))))
(define (module-exists? name-stx)
(if (syntax-local-value name-stx (λ () #f))
#t
(hash-ref module-metadata (symbol->string (syntax-e name-stx)))))
(define (module-port-names name-stx)
(if (syntax-local-value name-stx (λ () #f))
(map (compose symbol->string port-meta-name)
(module-meta-ports (syntax-local-value name-stx)))
(map (compose symbol->string port-meta-name)
(module-meta-ports
(hash-ref module-metadata
(symbol->string (syntax-e name-stx)))))))
(define (module-has-port? name-stx port-name)
(if (syntax-local-value name-stx (λ () #f))
(memf (λ (port) (equal? (symbol->string (port-meta-name port)) port-name))
(module-meta-ports (syntax-local-value name-stx)))
(memf (λ (port) (equal? (symbol->string (port-meta-name port)) port-name))
(module-meta-ports (hash-ref module-metadata
(symbol->string (syntax-e name-stx)))))))
(define (module-has-function? module-name function-name)
(memf (λ (func) (equal? (func-meta-name func) function-name))
(module-meta-functions (hash-ref module-metadata module-name))))
(define (add-module-function module-name function-name size)
(let* ([mod (hash-ref module-metadata module-name)]
[fs (module-meta-functions mod)])
(set-module-meta-functions! mod (cons (func-meta function-name size) fs))))
(define-syntax-class module-param
#:description "a module initializer"
(pattern [port-name:id port-value:bound-usage]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'port-name)))
#:with value(datum->syntax this-syntax #'port-value.compiled))
(pattern [port-name:id port-value:expr]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'port-name)))
#:with value(datum->syntax this-syntax #'(expression port-value))))
(define scoped-bindings-stack (box (list (make-hash))))
(define (push-scoped-stack)
(let* ([lst (unbox scoped-bindings-stack)]
[new-lst (cons (make-hash) lst)])
(set-box! scoped-bindings-stack new-lst)))
(define (pop-scoped-stack)
(let* ([lst (unbox scoped-bindings-stack)]
[new-lst (cdr lst)])
(set-box! scoped-bindings-stack new-lst)))
(define (peek-scoped-stack)
(let ([lst (unbox scoped-bindings-stack)])
(car lst)))
(struct binding-meta ( stx-size stx-arity-list))
(define (add-scoped-binding stx-name binding-meta stx)
(let ([name (syntax-e stx-name)]
[scoped (peek-scoped-stack)])
(when (and (in-scope? name) (not (equal? name "global")))
(writeln
(format "warning: ~a is already in scope at ~a"
name (source-location->string stx))))
(hash-set! scoped name binding-meta)))
(define (remove-scoped-binding stx-name)
(let ([name (syntax-e stx-name)]
[scoped (peek-scoped-stack)])
(hash-remove! scoped name)))
(define (in-scope? name)
(define (aux lst)
(cond
[(empty? lst) #f]
[(hash-has-key? (car lst) name) #t]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack)))
(define (get-binding-size name)
(let ([name2 (if (syntax? name) (symbol->string (syntax-e name)) name)])
(define (aux lst)
(cond
[(empty? lst)
(begin
'none)]
[(hash-has-key? (car lst) name2)
(begin
(binding-meta-stx-size (hash-ref (car lst) name2)))]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack))))
(define (get-binding-arities name)
(let ([name2 (if (syntax? name) (symbol->string (syntax-e name)) name)])
(define (aux lst)
(cond
[(empty? lst)
(begin
'none)]
[(hash-has-key? (car lst) name2)
(begin
(binding-meta-stx-arity-list (hash-ref (car lst) name2)))]
[else (aux (cdr lst))]))
(aux (unbox scoped-bindings-stack))))
(define-syntax-class scoped-binding
#:description "identifier in scope"
#:commit
(pattern x:id
#:with name (symbol->string (syntax-e #'x))
#:with name-stx (datum->syntax this-syntax (symbol->string (syntax-e #'x)))
#:fail-unless (in-scope? (symbol->string (syntax-e #'x))) "identifier is not in scope."
#:with size-int (get-binding-size (symbol->string (syntax-e #'x)))
#:with arities (get-binding-arities (symbol->string (syntax-e #'x)))
#:with is-array?
(let* ([a (get-binding-arities (symbol->string (syntax-e #'x)))]
[b (if (syntax? a)(list?(syntax-e a)) #f)] )
(and (syntax? a) (list? (syntax-e a)))
)))
(define-syntax-class binding
#:description "identifier name"
(pattern x:id
#:with name (symbol->string (syntax-e #'x))))
(define-syntax-class scoped-function
(pattern x:id
#:with name (symbol->string (syntax-e #'x))
#:with name-stx (datum->syntax this-syntax (symbol->string (syntax-e #'x)))
#:when (module-has-function? current-module (symbol->string (syntax-e #'x)))
)
)
(define-syntax-class inner-usage
(pattern x:scoped-binding
#:with name #'x.name
#:with size-int #'x.size-int
#:with compiled
#'x.name-stx)
(pattern x:expr
#:with size-int #'(expression x)
#:with compiled #'(expression x)))
(define-syntax-class bound-usage
#:description "identifier in scope with or without size, or array access"
#:commit
end or not ( up to two expressions )
(pattern [s:scoped-binding
x:inner-usage ...+]
#:with x-count (length (syntax->list #'(x ...)))
#:with name #'s.name
#:with compiled
(if (syntax-e #'s.is-array?)
(cond
[(< (syntax-e #'x-count) (length (syntax-e #'s.arities)))
(error "you must specify all the array's dimensions" #'s)]
[(= (syntax-e #'x-count) (length (syntax-e #'s.arities)))
#'`(name ("[" ,x.compiled "]") ...)]
[else
(let-values
([(left right)
(split-at
(syntax->list #'(x ...))
(length (syntax-e #'s.arities)))])
(syntax-parse (list left right)
[((z:inner-usage ...) (ya:inner-usage yb:inner-usage))
#'`(name ("[" z.compiled "]") ...
"[" ya.compiled " : " yb.compiled "]"
)]
[((z:inner-usage ...) (ya:inner-usage))
#'`(name ("[" z.compiled "]") ...
"[" ya.compiled "]"
)]
[((z:inner-usage ...) ())
#'`(name ("[" z.compiled "]") ...)]))])
(cond
[(> (syntax-e #'x-count) 2) (error "not an array\n" #'s)]
[(= (syntax-e #'x-count) 2)
(syntax-parse #'(x ...)
[(x:inner-usage y:inner-usage)
#'`(name "[" ,x.compiled " : " ,y.compiled "]")])]
[else
#'`(name ("[" ,x.compiled "]") ...)]))
#:with name-stx #'compiled
#:with size-int
(if (syntax-e #'s.is-array?)
(let-values
([(left right)
(split-at (syntax->list #'(x ...))
(length (syntax-e #'s.arities)))])
(syntax-parse (list left right)
[((z:inner-usage ...) (msb:inner-usage lsb:inner-usage))
#'(+ (- msb.size-int lsb.size-int) 1)]
[((z:inner-usage ...) (ya:inner-usage))
#'1]
[((z:inner-usage ...) ())
#'s.size-int]))
(syntax-parse #'(x ...)
[(msb:inner-usage lsb:inner-usage)
#'(+ (- msb.size-int lsb.size-int) 1)]
[(x:inner-usage)
#'1])
))
(pattern s:scoped-binding
#:with name #'s.name
#:with size (datum->syntax this-syntax "")
#:with size-int #'s.size-int
#:with oob #'#f
#:with compiled (datum->syntax this-syntax (symbol->string (syntax-e (attribute s))))
))
(define-syntax (push-binding stx)
(syntax-parse stx
[(_ id size)
(add-scoped-binding #'id (binding-meta #'size #'#f) stx)
#'(void)]
[(_ id size arity-list)
(add-scoped-binding #'id (binding-meta #'size #'arity-list) stx)
#'(void)]))
(define-syntax (pop-scoped-stack stx)
(syntax-parse stx
[(_)
(pop-scoped-stack)
#'(void)]))
(define-syntax (toggle-always-sens stx)
(syntax-parse stx
[(_)
(toggle-always-sens)
#'(void)]))
(begin-for-syntax
(define (syntax->error-syntax stx)
(datum->syntax stx
(format "~a:~a:~a"
(syntax-source stx)
(syntax-line stx)
(syntax-column stx))))
(define (is-hex-literal? str)
(regexp-match #px"^[$][0-9A-Fa-f_ZzXx]+$" str))
(define (is-binary-literal? str)
(regexp-match #px"^[%][01_ZzXx]+$" str))
(define (is-hex-string? str)
(regexp-match #px"^[0-9A-Fa-f_ZzXx]+$" str))
(define (is-binary-string? str)
(regexp-match #px"^[$][01_ZzXx]+$" str))
(define (is-number-literal-candidate? str)
(let ([parsed
(regexp-match #px"^([0-9]+)_(2|8|10|16)_(-)?([0-9A-Fa-f_ZzXx]+$)" str)])
(if (eq? parsed #f)
#f
(define (string-replace-many str from to)
(for/fold ([str str])
([f from])
(string-replace str f to)))
(define-syntax-class number-literal
#:datum-literals (_)
(pattern x:integer
#:with base 10
#:with bits
(datum->syntax this-syntax
#:with compiled
(datum->syntax this-syntax
(format "~a" (syntax-e (attribute x)))))
(pattern x:id
#:do [(define str (symbol->string (syntax-e (attribute x))))]
#:when (is-hex-literal? str)
#:do [(define cleaned (string-replace
(string-replace str "_" "") "$" ""))]
#:with base 16
#:with bits (datum->syntax this-syntax (* 4 (string-length cleaned)))
#:with compiled
(datum->syntax this-syntax
(format "~a'h~a"
(syntax-e (attribute bits))
(substring str 1))))
(pattern x:id
#:do [(define str (symbol->string (syntax-e (attribute x))))]
#:when (is-binary-literal? str)
#:do [(define cleaned (string-replace
(string-replace str "_" "") "%" ""))]
#:with base 2
#:with bits (datum->syntax this-syntax (string-length cleaned))
#:with compiled
(datum->syntax this-syntax
(format "~a'b~a"
(syntax-e (attribute bits))
(substring str 1) )))
(pattern x:id
#:do [(define str
(is-number-literal-candidate?
(symbol->string (syntax-e (attribute x)))))]
#:when (list? str)
#:do [(define radix (string->number (list-ref str 1)))
(define radix-str
(case (string->number (list-ref str 1))
[(2) "'b"]
[(8) "'o"]
[(10) "'d"]
[(16) "'h"]))
(define size (string->number (list-ref str 0)))
(define literal (list-ref str 3))]
#:with base radix-str
#:with bits size
#:do [(let* ([n (string-replace-many literal '["X" "x" "Z" "z"]"0")]
[l
(case radix
[(2) (string-length n)]
[(8) (* (string-length n) 3)]
[(16) (string-length (format "~b" (string->number n 16))
)]
[(10) (string-length (format "~b" (string->number n 10))
)])])
(when (> l size)
(printf "warning: number literal ~a does not fit into the specified size at ~a\\n"
(symbol->string (syntax-e (attribute x))) #'x)))]
#:with compiled
(datum->syntax this-syntax
(format "~a~a~a~a"
(case (list-ref str 2)
[(#f) ""]
[else "-"])
size radix-str literal))))
(define-syntax-class edge-type
(pattern #:posedge)
(pattern #:negedge))
(define-syntax-class sensitivity
#:no-delimit-cut
(pattern [edge:edge-type ~! signal:bound-usage]
#:with edge-type (datum->syntax this-syntax (keyword->string (syntax-e #'edge)))
#:with compiled #'signal.compiled)
(pattern [signal:bound-usage]
#:with edge-type (datum->syntax this-syntax "")
#:with compiled #'signal.compiled)
)
(define-syntax-class direction-option
(pattern #:input)
(pattern #:output)
(pattern #:inout))
(define-syntax-class type-option
(pattern #:wire)
(pattern #:wand)
(pattern #:wor)
(pattern #:tri)
(pattern #:reg)
(pattern #:integer)
(pattern #:time)
(pattern #:real))
(define-syntax-class function-param
#:description "a function parameter"
(pattern [name-sym:id
(~optional [x (~optional y)])]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with direction (datum->syntax this-syntax "input")
#:with type (datum->syntax this-syntax "wire")
#:with arity-list #'#f
#:with default #'""
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])))
(define-syntax-class param
#:description "a module parameter"
(pattern [name-sym:id
direction-opt:direction-option
type-opt:type-option
(~optional [x (~optional y)])
(~optional default-value)]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with direction (datum->syntax this-syntax (keyword->string (syntax-e #'direction-opt)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
#:with default
(if (attribute default-value)
#'`(" = " ,(expression default-value))
#'"")
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])))
(define-syntax-class local-param
#:datum-literals (array)
(pattern [name-sym:id
type-opt:type-option
[x (~optional y)]
(~optional (array x2:expr ...+))]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
(cond
[(and (attribute x2))
#'`(
(
"[0:" ,(- x2 1) "]"
) ...
)]
[else #'""])
#:with arity-list
(if (attribute x2)
(syntax->list #'(x2 ...))
#'#f)
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ": 0" "]")]
[else #'""]))
(pattern [name-sym:id
type-opt:type-option
(~optional [x (~optional y)])
(~optional
default-value:expr)]
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with type (datum->syntax this-syntax (keyword->string (syntax-e #'type-opt)))
#:with default
(if (attribute default-value)
#'`(" = " ,(expression default-value))
#'"")
#:with arity-list #'#f
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ": 0" "]")]
[else #'""]))))
(define-syntax-parser expression
#:datum-literals
(set ~delay if case else when concat
\|\| \| \~\| ! ~ + - * / % << >> >>> == != >= <= < > && & ~& ^ ~^ )
[(_ x:integer)
#'x]
[(_ x:number-literal )
#'x.compiled]
[(_ x:bound-usage)
#:with err-prefix (syntax->error-syntax #'x)
#'`(
,(when x.oob
(printf "~a: warning - the expression '~a' is out of range\n" err-prefix x.compiled))
,x.compiled)]
[(_ x:enum-literal)
#'x.value]
[(_ (f:scoped-function ~! params ... last-param))
#'`(
,f.name-stx "("
( ,(expression params ) ",") ...
,(expression last-param)
")")]
[(_ (~delay ~! x y))
#'`("#" ,(expression x) " " ,(expression y))]
[(_ (when ~! test true-expr))
special case one - line when in RHS of expression - ternary
#'(~begin (when test true-expr))]
[(_ (concat ~! x y ...+))
#'`("{" ,(expression x) ( ", ",(expression y)) ... "}" )]
[(_ (if ~!
(~describe "condional test for if" test)
(~describe "true expression for if" true-expr)
(~describe "false expression for if" false-expr)))
#'`("("
,(expression test)
" ? "
,(expression true-expr)
" : "
,(expression false-expr)
")")]
[(_ (case val
[test true-expr]
[test2 expr2] ...+
[else def-expr]))
#'`(
"("
,(expression (== val test))
" ? "
,(expression true-expr)
" : "
,(expression (case val [test2 expr2] ... [else def-expr]))
")")]
[(_ (case val [test true-expr]
[else def-expr]))
#'`(
"("
,(expression (== val test))
" ? "
,(expression true-expr)
" : "
,(expression def-expr)
")")]
[(_ (case ~! val [test true-expr] ...+))
#:fail-when #t "you must supply an else branch of a case when used as an epxression"
#'(void)]
[(_ ( (~and op (~or + - ! & ~& ~ \| \~\| ^ ~^)) x))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(,op-str ,(expression x))]
[(_ ( (~and op (~or + - * / % << >> >>> == != < > <= >= && & \|\| \| ^ ~^)) x y ))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(
"("
,(expression x)
" "
,op-str
" "
,(expression y)
")")]
[(_ ( (~and op (~or + - * / % << >> >>> == != <= >= && & \|\| \| ^ ~^)) x y z ... ))
#:with op-str (datum->syntax this-syntax (symbol->string (syntax-e #'op)))
#'`(
"("
,(expression x)
" "
,op-str
" ("
,(expression (op y z ...))
")) " )]
[(_ (set (~or x:scoped-binding x:bound-usage) y:number-literal))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.bits x.size-int)
(printf "\"warning: the literal '~a' does not fit into '~a' and will be truncated\"\n" y.compiled x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) y:enum-literal))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.bits x.size-int)
(printf "\"warning: the enum literal '~a' does not fit into '~a' and will be truncated\"\n" y.compiled x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) (~or y:scoped-binding y:bound-usage)))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(when (> y.size-int x.size-int)
(printf "\"warning: the expression '~a' does not fit into '~a' and will be truncated\"\n" y.name-stx x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set (~or x:scoped-binding x:bound-usage) y:expr))
#:with op (if is-always-sens #'" <= " #'" = ")
#:with name (datum->syntax this-syntax (format "~a" #'y))
#'`(
,(when (and (number? (expression y))(> (string-length (format "~b" (expression y))) x.size-int))
(printf "\"warning: the expression '~a' does not fit into '~a' and will be truncated\"\n" name x.name-stx))
,(expression x)
op
,(expression y))]
[(_ (set x y))
#:with op (if is-always-sens #'" <= " #'" = ")
#'`(
,(expression x)
op
,(expression y))]
[(_ x:expr)
#'x]
)
(define-syntax-parser ~case
#:datum-literals (else)
[(_ test:bound-usage [lhs:number-literal rhs (~optional comment:string #:defaults ([comment #'""]))] ...)
#'`(
tab
"case ("
,test.compiled
")\n"
inc-tab
(
tab
,lhs.compiled
" : // "
comment
"\n"
,(~begin rhs)
"\n"
) ...
dec-tab
tab
"endcase\n")]
[(_ test:bound-usage [lhs:number-literal rhs (~optional comment:string #:defaults ([comment #'""]))] ...
[else else-expr:expr])
#'`(
tab
"case ("
,test.compiled
")\n"
inc-tab
(
tab
,lhs.compiled
" : // "
comment
"\n"
,(~begin rhs)
"\n"
) ...
tab
"default : \n"
,(~begin else-expr)
"\n"
dec-tab
tab
"endcase\n")]
)
(define-syntax-parser ~cond
#:datum-literals (else)
[(_ [first-test first-outcome] [expr-test expr-outcome] ...
[else else-outcome])
#'`(
,(~cond
[first-test first-outcome]
[expr-test expr-outcome] ...)
tab
"else\n"
inc-tab
,(~begin else-outcome)
"\n"
dec-tab
)]
[(_ [first-test first-outcome])
#'`(
tab
"if("
,(expression first-test)
")\n"
inc-tab
,(~begin first-outcome)
dec-tab
"\n"
)]
[(_ [first-test first-outcome] [expr-test expr-outcome] ...)
#'`(
tab
"if("
,(expression first-test)
")\n"
inc-tab
,(~begin first-outcome)
"\n"
dec-tab
(tab
"else if("
,(expression expr-test)
")\n"
inc-tab
,(~begin expr-outcome)
"\n"
dec-tab
"\n") ...
)])
(define-syntax-parser ~if
[(_ (~describe "condional test for if" test-expr)
(~describe "true expression for if" true-expr)
(~describe "false expression for if" false-expr))
#'(~cond
[test-expr true-expr]
[else false-expr])])
(define-syntax-parser ~when
[(_ test-expr true-expr)
#'(~cond
[test-expr true-expr])])
(define-syntax-parser list->enum
[(_ name vals)
(add-enum (syntax-e #'name) (eval #'vals))
#'(void)])
(define-syntax-parser enum
[(_ name kvp:enum-kvp ...+)
#:fail-when (check-duplicate-identifier
(syntax->list #'(kvp.x ...)))
"duplicate enum name"
#:fail-when (check-duplicates
(syntax->datum #'(kvp.y-evaled ...)))
"duplicate enum value"
(if (syntax-property this-syntax 'module)
(begin
(add-enum (syntax-e #'name) (syntax->datum #'(kvp.pair ...)))
#'(void))
(with-syntax ([g-name (datum->syntax this-syntax (string->symbol
(string-append "global-enum-"
(symbol->string
(syntax-e #'name)))))])
(printf "ADDING ENUM ~a\n" #'g-name)
#'(define-syntax g-name
'(kvp.pair ...)
)))]
[(_ name keys:id ...+)
#:fail-when (check-duplicate-identifier
(syntax->list #'(keys ...)))
"duplicate enum name"
(with-syntax
([(kvps ...)
(for/list
([n (in-naturals)]
[x (syntax->list #'(keys ...))])
(cons (format "~a" (syntax-e x)) n))])
(if (syntax-property this-syntax 'module)
(begin
(add-enum (syntax-e #'name)(syntax->datum #'(kvps ...)))
#'(void))
(with-syntax ([g-name (datum->syntax this-syntax (string->symbol
(string-append "global-enum-"
(symbol->string
(syntax-e #'name)))))])
#'(define-syntax g-name
'(kvps ...)
))
)
)])
(define-syntax-parser ~match-set
[(_ target:bound-usage test:expr enum-name:enum
[key value] ...)
#:fail-when (check-duplicate-identifier (syntax->list #'(key ...)))
"duplicate enum value"
#:fail-when
(let ([results (filter (λ (v) (not (enum-key-exists? #'enum-name (syntax-e #'enum-name) v)))
(syntax->datum #'(key ...)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"some identifiers do not exist in enum"
#:fail-when
(let*
([keys (map (λ (v) (format "~a" v)) (syntax->datum #'(key ...)))]
[results (filter (λ (v) (not (member v keys)))
(get-enum-keys #'enum-name (syntax-e #'enum-name)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"missing cases in the enum"
(with-syntax([(enum-vals ...) (map (λ (v) (get-enum-value #'enum-name (syntax-e #'enum-name) v))
(syntax->datum #'(key ...)))])
#'(~case test [enum-vals (set target value)] ...))]
)
(define-syntax-parser ~match
[(_ test:expr enum-name:enum
[key expr] ...)
#:fail-when (check-duplicate-identifier (syntax->list #'(key ...)))
"duplicate enum value"
#:fail-when
(let ([results (filter (λ (v) (not (enum-key-exists? #'enum-name (syntax-e #'enum-name) v)))
(syntax->datum #'(key ...)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"some identifiers do not exist in enum"
#:fail-when
(let*
([keys (map (λ (v) (format "~a" v)) (syntax->datum #'(key ...)))]
[results (filter (λ (v) (not (member v keys)))
(get-enum-keys #'enum-name (syntax-e #'enum-name)))])
(if (not (eq? results '()))
(with-syntax ([res results]) #'res)
#f))
"missing cases in the enum"
(with-syntax
([(enum-vals ...) (map (λ (v) (get-enum-value #'enum-name (syntax-e #'enum-name) v))
(syntax->datum #'(key ...)))]
[(key-str ...) (map (λ (v) (symbol->string v))
(syntax->datum #'(key ...)))] )
#'(~case test [enum-vals expr key-str] ...))]
)
(define-syntax-parser ~case-set
[(_ target:bound-usage test:expr
[key:number-literal value] ...)
#'(~case test [key (set target value)] ...)])
(define-syntax-parser ~begin-line
#:datum-literals (~cond locals expression ~when if set ~match-set ~match ~case-set)
[(_ (expression expr ...))
#'`(tab
,(expression expr ...)
";\n")]
[(_ (set [x:bound-usage y] ...))
#'`(
(tab
,(expression (set x y))
";\n")...)]
[(_ (set x:bound-usage y))
#'`(
(tab
,(expression (set x y))
";\n"))]
[(_ x:expr)
#'x] )
(define-syntax-parser inc
[( _ x:scoped-binding)
#'`(tab
,(expression (set x (+ x 1)))
";\n"
)])
(define-syntax-parser ~begin
[(_ block-name:id expr ...+)
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'block-name)))
#'`(
tab
"begin "
,name
"\n"
inc-tab
,(~begin-line expr) ...
dec-tab
tab
"end \n"
)]
[(_ expr ...)
#'`(
tab
"begin\n"
inc-tab
,(~begin-line expr) ...
dec-tab
tab
"end \n"
)])
(define-syntax-parser locals
[(_ params:local-param ...)
#'`(
(
tab
,(push-binding params.name params.size-int params.arity-list) ...
(
,params.type
" "
,params.size
" "
,params.name
" "
,params.default
";\n") ...))])
(define-syntax-parser assign
[(_ [x:bound-usage y:expr] ...)
#'`(
("assign "
,x.compiled
" = "
,(expression y)
";\n") ... )]
[(_ x:bound-usage y:expr)
#'`("assign "
,x.compiled
" = "
,(expression y)
";\n")])
(define-syntax-parser always-line
[(_ expr)
#'expr])
(define-syntax-parser always
#:datum-literals (* or)
[(_ (or sens:sensitivity rest:sensitivity ...) expr ...)
(printf "always\n")
(toggle-always-sens)
#'`(
tab
"always @("
,sens.edge-type
" "
,sens.compiled
(
" or "
,rest.edge-type
" "
,rest.compiled
) ...
")\n"
inc-tab
,(always-line expr) ...
dec-tab
,(toggle-always-sens))]
[(_ (sens:sensitivity rest:sensitivity ...) expr ...)
(toggle-always-sens)
#'`(
tab
"always @("
,sens.edge-type
" "
,sens.compiled
(
" , "
,rest.edge-type
" "
,rest.compiled) ...
")\n"
inc-tab
,(always-line expr) ...
dec-tab
,(toggle-always-sens)
)]
[(_ * expr ...)
#'`(
tab
"always @(*)\n"
inc-tab
,(always-line expr) ...
dec-tab
)]
[(_ expr ...)
#'`(
tab
"always\n"
inc-tab
,(always-line expr) ...
dec-tab
)]
)
(define-syntax-parser ~module-line
#:datum-literals (set vmod)
[(_ mod-id (vmod m:id ~! p:module-param ... l:module-param ~!))
#:fail-unless (module-exists? #'m)
(format "the module '~a' doesn't exist" #'m (symbol->string (syntax-e #'m)))
#:fail-unless
(andmap (λ (name) (module-has-port? #'m name))
(syntax->datum #'(p.name ... l.name)))
(format "instantiation of module ~a contains invalid port names: ~a"
#'m
(filter (λ (name) (not (module-has-port? #'m name)))
(syntax->datum #'(p.name ... l.name))))
#:fail-unless
(andmap (λ (name) (member name (syntax->datum #'(p.name ... l.name))))
(module-port-names #'m))
(format "instantiation of module ~a is missing the following ports: ~a"
#'m
(filter
(λ (name)
(not (member name (syntax->datum #'(p.name ... l.name)))))
(module-port-names #'m)))
(with-syntax([m-name (symbol->string (syntax-e #'m))]
[i-name (symbol->string (syntax-e #'x))])
(syntax-property
#'`(
,m-name
" (\n"
inc-tab
(
"." ,p.name "(" ,(expression p.value) "),\n"
) ...
"." ,l.name "(" ,(expression l.value) ")\n"
dec-tab
");\n"
)
'module
#'mod-id))
]
[(_ mod-id x)
(syntax-property #'x 'module #'mod-id)])
(define-syntax-parser function
[(_ (~optional [x (~optional y)])
name-sym:id
(p:function-param ...)
expression ...)
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
#:with size-int
(cond
[(and (attribute x) (attribute y))
#'(+ (- x y) 1)]
[(attribute x)
#'x]
[else #'1])
#:with size
(cond
[(and (attribute x) (attribute y))
#'`("[" ,x ":" ,y "]")]
[(attribute x)
#'`("[" ,(- x 1) ":0" "]")]
[else #'""])
(push-scoped-stack)
(add-module-function current-module (symbol->string (syntax-e #'name-sym))
(syntax-e #'size-int))
#'`(
"function " ,size " " ,name ";\n"
inc-tab
tab
to set the return value . sticking to Verilog style for now .
,(push-binding name size-int #f)
,(push-binding p.name p.size-int p.arity-list) ...
(tab
,p.direction
" "
,p.size
" "
,p.name
";\n") ...
,(~begin
expression ...)
dec-tab
,(pop-scoped-stack)
"endfunction\n")])
(define out-ports (make-hash))
(define (ensure-port-open filename)
(if (hash-has-key? out-ports filename)
(let ([p (hash-ref out-ports filename)])
(when (port-closed? p)
(hash-set! out-ports filename
(open-output-file #:mode 'binary #:exists 'append filename))))
(hash-set! out-ports filename
(open-output-file #:mode 'binary #:exists 'replace filename))))
(define (get-port filename)
(ensure-port-open filename)
(hash-ref out-ports filename))
(define (ensure-ports-closed)
(for ([p (hash-values out-ports)])
(close-output-port p)))
(define-syntax-parser #%module-begin
[(_ exprs ...)
#'(#%plain-module-begin
exprs ...
(ensure-ports-closed)
)])
(define-syntax-parser test?
[(_ name)
(syntax-local-value #'name)
#'(void)])
(define-syntax-parser vmod
[(_ name-sym:id
expression ... )
#:with name (datum->syntax this-syntax (symbol->string (syntax-e #'name-sym)))
(push-scoped-stack)
(set-current-module (symbol->string (syntax-e #'name-sym)))
(add-module (syntax-e #'name)
(port-meta
(list-ref lst 0)
(list-ref lst 1)
(list-ref lst 2)
))
(syntax->datum #'(p ... last))))
(let*
([fn (string-replace
(path->string (syntax-source-file-name #'name-sym)) ".rkt" ".v")])
(with-syntax
([nf (datum->syntax this-syntax
(build-path (syntax-source-directory this-syntax) fn))])
(syntax-property
#'(begin
(ensure-port-open nf)
(define-syntax name-sym
(module-meta name
(map (λ (lst)
(port-meta
(list-ref lst 0)
(list-ref lst 1)
(list-ref lst 2)))
(syntax->datum #'(p ... last))) '()))
(provide name-sym)
(code-gen nf
`(
,(format "module ~a (\n" name)
inc-tab
(tab
,p.direction
" "
,p.type
" "
,p.size
" "
,p.name
" "
,p.default
",\n") ...
tab
,last.direction
" "
,last.type
" "
,last.size
" "
,last.name
" "
,last.default
");"
,(push-binding p.name p.size-int) ...
,(push-binding last.name last.size-int)
"\n"
dec-tab
,(~module-line name-sym expression) ...
"endmodule\n"
,(pop-scoped-stack)
)))
'module
#'name-sym
)))])
(define-syntax-parser always-pos
[(_ clock exprs ...)
#'(always ([#:posedge clock]) (~begin exprs ...))])
(define-syntax-parser always-neg
[(_ clock exprs ...)
#'(always ([#:negedge clock]) (~begin exprs ...))])
(define-syntax-parser initial-begin
[(_ exprs ...) #'`("initial " ,(~begin exprs ...))])
(define (code-gen fn input)
(define tab 0)
(define (aux in)
(for ([sym in])
(cond
[(or (string? sym) (integer? sym))
(begin
(display sym (get-port fn)))]
[(eq? 'inc-tab sym) (set! tab (+ 1 tab))]
[(eq? 'tab sym) (display (make-string (* 2 tab) #\ ) (get-port fn))]
[(eq? 'dec-tab sym) (set! tab (- tab 1))]
[(eq? '() sym) '()]
[(list? sym) (aux sym)]
[(void? sym) '()]
[else (printf "unknown ~a\n" sym) ])))
(aux input)
)
(provide
(all-defined-out)
(for-syntax (all-defined-out))
(except-out (all-from-out syntax/parse/define)
define-syntax-parser)
(rename-out
[define-syntax-parser macro]))
|
e2d0c09d052a2f99ee2d9043713d86e69f03772f72450d1f21ff433102513099 | mitchellwrosen/planet-mitchell | Dynamic.hs | module Dynamic
( -- * Dynamic
Dynamic(..)
, toDyn
, fromDynamic
, dynApply
, dynTypeRep
) where
import Data.Dynamic
| null | https://raw.githubusercontent.com/mitchellwrosen/planet-mitchell/18dd83204e70fffcd23fe12dd3a80f70b7fa409b/planet-mitchell/src/Dynamic.hs | haskell | * Dynamic | module Dynamic
Dynamic(..)
, toDyn
, fromDynamic
, dynApply
, dynTypeRep
) where
import Data.Dynamic
|
d00955b15d704d06849affcad4906e1767935a9a9ec39a3aca8df6189fdde85b | braveclojure/training | ex11_read_eval_macros.clj | (ns training.exercises.ex11-read-eval-macros)
;; ========================================
;; Read and Eval
;; ========================================
The reader reads text to produce a Clojure data structure . When you
write Clojure , you 're writing text that represents data structures .
(read-string "(+ 1 2)")
(= '(+ 1 2) (read-string "(+ 1 2)"))
(= 3 (read-string "(+ 1 2)"))
;; The evaluator evaluates those data structures
(eval (read-string "(+ 1 2)"))
(eval '(+ 1 2))
;; What's with the '?
map
'map
(quote map)
(map inc [1 2 3])
'(map inc [1 2 3])
;; You can manipulate data structures before they get evald
(eval (list '+ 1 2))
(defn infix
[expr]
(let [x (first expr)
op (second expr)
y (last expr)]
(list op x y)))
(eval (infix '(1 + 2)))
;; aside: you can use destructuring
(defn infix'
[[x op y]]
(list op x y))
Macros let you manipulate the data structures emitted by the reader ,
;; sending the result to the evaluator
(defmacro infix-macro
[x op y]
(list op x y))
(infix-macro 1 + 2)
;; You try:
;; * Write some code to handle postfix evaluation, like:
;; (eval (postfix 1 2 +))
;; ========================================
;; Eval rules
;; ========================================
;; Data that's not a list or symbol evals to itself:
(eval true)
(eval false)
(eval {})
(eval 1)
(eval #{1 2})
;; empty lists also eval to themselves
(eval ())
;;;;
;; Let's get to know symbols!
;;;;
In general , Clojure resolves a symbol by :
;;
1 . Looking up whether the symbol names a special form . If it does n’t . . .
2 . Looking up whether the symbol corresponds to a local binding . If it does n’t . . .
3 . Trying to find a namespace mapping introduced by def . If it does n’t . . .
4 . Throwing an exception
;; if is a special form
(if true :a :b)
(let [x 5]
(+ x 3))
(def x 15)
x
(let [x 5]
(let [x 6]
(+ x 3)))
(defn exclaim
[exclamation]
(str exclamation "!"))
(read-string "+")
(type (read-string "+"))
(list (read-string "+") 1 2)
(eval (list (read-string "+") 1 2))
;; Evaling lists
;; function calls
(+ 1 2)
;; special forms
(if true 1 2)
;; Evaling macros
(read-string "(1 + 1)")
;; Why will this fail?
(comment (eval (read-string "(1 + 1)")))
;; You can manipulate the data before evaling it
(let [infix (read-string "(1 + 1)")]
(list (second infix) (first infix) (last infix)))
;; ========================================
;; Writing macros
;; ========================================
Macro anatomy
1 .
2 . macro name
3 . macro arguments . When the macro is called ,
;; these arguments are unevaluated data.
4 . macro body - works exactly like a function body
(defmacro infix-m
[[x op y]]
(list op x y))
Macros have to return a list . Why does n't this work ?
(defmacro broken-infix
[[x op y]]
(op x y))
(broken-infix (1 + 2))
;; This doesn't work because, in the macro body, you're applying `op`
;; to the `x` and `y`, not returning the list '(op x y). The return
;; value of the macro is ('+ 1 2), which attempts to apply the plus
_ symbol _ to the arguments 1 and 2 , and the return value of that is
2 . 2 is then passed to the evaluator .
;;
;; Instead, you want the macro to return the list '(+ 1 2) so that the
;; evaluator will handle it correctly.
;; Check macros with macroexpand and macroexpand-1:
(macroexpand-1 '(broken-infix (1 + 2)))
(macroexpand-1 '(when true (pr "when") (pr "true")))
;; simple quoting
(defmacro when'
"Evaluates test. If logical true, evaluates body in an implicit do."
{:added "1.0"}
[test & body]
(list 'if test (cons 'do body)))
;; ========================================
;; Syntax Quoting
;; ========================================
;; * Uses fully-qualified symbols
;; * Allows unquoting and unquote splicing
;; Fully-qualified symbols
'+
`+
;; recursively syntax quotes all elements
`(+ 1 (- 2 3))
;; You can unquote values
;; When you unqoute something, it's evaluated, and the result is
;; placed in the resulting data structure returned by syntax quote
(def flibbity :a)
`(get {:a 1} flibbity)
`(get {:a 1} ~flibbity)
`(+ 1 (inc 1))
`(+ 1 ~(inc 1))
;; Unquote splicing evaluates a form which should return a sequence,
;; then "unwraps" it
(defmacro wait
[timeout & body]
`(do (Thread/sleep ~timeout) ~@body))
(macroexpand-1 '(wait 500
(println "waited!")
(reduce + [1 2 3])))
Without unquote splicing , ~body is a list
(defmacro bad-wait
[timeout & body]
`(do (Thread/sleep ~timeout) ~body))
(macroexpand-1 '(bad-wait 500
(println "waited!")
(reduce + [1 2 3])))
;; expands to:
(comment
(do (java.lang.Thread/sleep 500)
((println "waited!") (reduce + [1 2 3]))))
;; ========================================
Macro pitfalls
;; ========================================
;; Variable capture: macro introduces a binding that shadows an
;; existing binding
(def message "Good job!")
;; This macro will shadow `message`
(defmacro with-mischief
[& stuff-to-do]
(concat (list 'let ['message "Oh, big deal!"])
stuff-to-do))
(with-mischief
(println "Here's how I feel about that thing you did: " message))
(macroexpand-1 '(with-mischief
(println "Here's how I feel about that thing you did: " message)))
get around this with
(gensym)
(gensym 'message)
(defmacro without-mischief
[& stuff-to-do]
(let [macro-message (gensym 'message)]
`(let [~macro-message "Oh, big deal!"]
~@stuff-to-do
(println "I still need to say: " ~macro-message))))
(without-mischief
(println "Here's how I feel about that thing you did: " message))
;; autogensyms are a convenience.
;; All instances of the same autogensym
;; in a syntax quote evaluate to the same symbol
`(blarg# blarg#)
(defmacro without-mischief'
[& stuff-to-do]
`(let [message# "Oh, big deal!"]
~@stuff-to-do
(println "I still need to say: " message#)))
(macroexpand-1 '(without-mischief'
(println "Here's how I feel about that thing you did: " message)))
;; expands to:
(comment
(clojure.core/let
[message__21129__auto__ "Oh, big deal!"]
(println "Here's how I feel about that thing you did: " message)
(clojure.core/println "I still need to say: " message__21129__auto__)))
;; ========================================
;; When to use macros?
;; ========================================
;; * When you're beginning, use them whenever you feel like it. Then
;; try to do the same thing with functions.
;; * Use them when you need new syntax - new evaluation rules not
;; provided out of the box. Examples:
(-> "Catface Meowmers"
(clojure.string/lower-case)
(clojure.string/split #" "))
(comment
(if-valid
rest-params validation-map errors
false
[true (errors-map errors)]))
;; You try:
;; Remember this?
(comment
(def character
{:name "Smooches McCutes"
:attributes {:intelligence 10
:strength 4
:dexterity 5}})
(def c-int (comp :intelligence :attributes))
(def c-str (comp :strength :attributes))
(def c-dex (comp :dexterity :attributes)))
write a macro , defattrs , which lets you define c - int , c - str , c - dex
;; more succinctly, like this:
(comment
(defattrs
c-int :intelligence
c-str :strength
c-dex :dexterity)
= > 10
)
;; Bonus
;; `and` is a macro:
(comment
(defmacro and
"Evaluates exprs one at a time, from left to right. If a form
returns logical false (nil or false), and returns that value and
doesn't evaluate any of the other expressions, otherwise it returns
the value of the last expr. (and) returns true."
{:added "1.0"}
([] true)
([x] x)
([x & next]
`(let [and# ~x]
(if and# (and ~@next) and#)))))
;; study it till you understand it, and optionally implement or as a macro
| null | https://raw.githubusercontent.com/braveclojure/training/5b7fb9059c17b2166c2e66850094f424319e55eb/exercises/src/training/exercises/ex11_read_eval_macros.clj | clojure | ========================================
Read and Eval
========================================
The evaluator evaluates those data structures
What's with the '?
You can manipulate data structures before they get evald
aside: you can use destructuring
sending the result to the evaluator
You try:
* Write some code to handle postfix evaluation, like:
(eval (postfix 1 2 +))
========================================
Eval rules
========================================
Data that's not a list or symbol evals to itself:
empty lists also eval to themselves
Let's get to know symbols!
if is a special form
Evaling lists
function calls
special forms
Evaling macros
Why will this fail?
You can manipulate the data before evaling it
========================================
Writing macros
========================================
these arguments are unevaluated data.
This doesn't work because, in the macro body, you're applying `op`
to the `x` and `y`, not returning the list '(op x y). The return
value of the macro is ('+ 1 2), which attempts to apply the plus
Instead, you want the macro to return the list '(+ 1 2) so that the
evaluator will handle it correctly.
Check macros with macroexpand and macroexpand-1:
simple quoting
========================================
Syntax Quoting
========================================
* Uses fully-qualified symbols
* Allows unquoting and unquote splicing
Fully-qualified symbols
recursively syntax quotes all elements
You can unquote values
When you unqoute something, it's evaluated, and the result is
placed in the resulting data structure returned by syntax quote
Unquote splicing evaluates a form which should return a sequence,
then "unwraps" it
expands to:
========================================
========================================
Variable capture: macro introduces a binding that shadows an
existing binding
This macro will shadow `message`
autogensyms are a convenience.
All instances of the same autogensym
in a syntax quote evaluate to the same symbol
expands to:
========================================
When to use macros?
========================================
* When you're beginning, use them whenever you feel like it. Then
try to do the same thing with functions.
* Use them when you need new syntax - new evaluation rules not
provided out of the box. Examples:
You try:
Remember this?
more succinctly, like this:
Bonus
`and` is a macro:
study it till you understand it, and optionally implement or as a macro | (ns training.exercises.ex11-read-eval-macros)
The reader reads text to produce a Clojure data structure . When you
write Clojure , you 're writing text that represents data structures .
(read-string "(+ 1 2)")
(= '(+ 1 2) (read-string "(+ 1 2)"))
(= 3 (read-string "(+ 1 2)"))
(eval (read-string "(+ 1 2)"))
(eval '(+ 1 2))
map
'map
(quote map)
(map inc [1 2 3])
'(map inc [1 2 3])
(eval (list '+ 1 2))
(defn infix
[expr]
(let [x (first expr)
op (second expr)
y (last expr)]
(list op x y)))
(eval (infix '(1 + 2)))
(defn infix'
[[x op y]]
(list op x y))
Macros let you manipulate the data structures emitted by the reader ,
(defmacro infix-macro
[x op y]
(list op x y))
(infix-macro 1 + 2)
(eval true)
(eval false)
(eval {})
(eval 1)
(eval #{1 2})
(eval ())
In general , Clojure resolves a symbol by :
1 . Looking up whether the symbol names a special form . If it does n’t . . .
2 . Looking up whether the symbol corresponds to a local binding . If it does n’t . . .
3 . Trying to find a namespace mapping introduced by def . If it does n’t . . .
4 . Throwing an exception
(if true :a :b)
(let [x 5]
(+ x 3))
(def x 15)
x
(let [x 5]
(let [x 6]
(+ x 3)))
(defn exclaim
[exclamation]
(str exclamation "!"))
(read-string "+")
(type (read-string "+"))
(list (read-string "+") 1 2)
(eval (list (read-string "+") 1 2))
(+ 1 2)
(if true 1 2)
(read-string "(1 + 1)")
(comment (eval (read-string "(1 + 1)")))
(let [infix (read-string "(1 + 1)")]
(list (second infix) (first infix) (last infix)))
Macro anatomy
1 .
2 . macro name
3 . macro arguments . When the macro is called ,
4 . macro body - works exactly like a function body
(defmacro infix-m
[[x op y]]
(list op x y))
Macros have to return a list . Why does n't this work ?
(defmacro broken-infix
[[x op y]]
(op x y))
(broken-infix (1 + 2))
_ symbol _ to the arguments 1 and 2 , and the return value of that is
2 . 2 is then passed to the evaluator .
(macroexpand-1 '(broken-infix (1 + 2)))
(macroexpand-1 '(when true (pr "when") (pr "true")))
(defmacro when'
"Evaluates test. If logical true, evaluates body in an implicit do."
{:added "1.0"}
[test & body]
(list 'if test (cons 'do body)))
'+
`+
`(+ 1 (- 2 3))
(def flibbity :a)
`(get {:a 1} flibbity)
`(get {:a 1} ~flibbity)
`(+ 1 (inc 1))
`(+ 1 ~(inc 1))
(defmacro wait
[timeout & body]
`(do (Thread/sleep ~timeout) ~@body))
(macroexpand-1 '(wait 500
(println "waited!")
(reduce + [1 2 3])))
Without unquote splicing , ~body is a list
(defmacro bad-wait
[timeout & body]
`(do (Thread/sleep ~timeout) ~body))
(macroexpand-1 '(bad-wait 500
(println "waited!")
(reduce + [1 2 3])))
(comment
(do (java.lang.Thread/sleep 500)
((println "waited!") (reduce + [1 2 3]))))
Macro pitfalls
(def message "Good job!")
(defmacro with-mischief
[& stuff-to-do]
(concat (list 'let ['message "Oh, big deal!"])
stuff-to-do))
(with-mischief
(println "Here's how I feel about that thing you did: " message))
(macroexpand-1 '(with-mischief
(println "Here's how I feel about that thing you did: " message)))
get around this with
(gensym)
(gensym 'message)
(defmacro without-mischief
[& stuff-to-do]
(let [macro-message (gensym 'message)]
`(let [~macro-message "Oh, big deal!"]
~@stuff-to-do
(println "I still need to say: " ~macro-message))))
(without-mischief
(println "Here's how I feel about that thing you did: " message))
`(blarg# blarg#)
(defmacro without-mischief'
[& stuff-to-do]
`(let [message# "Oh, big deal!"]
~@stuff-to-do
(println "I still need to say: " message#)))
(macroexpand-1 '(without-mischief'
(println "Here's how I feel about that thing you did: " message)))
(comment
(clojure.core/let
[message__21129__auto__ "Oh, big deal!"]
(println "Here's how I feel about that thing you did: " message)
(clojure.core/println "I still need to say: " message__21129__auto__)))
(-> "Catface Meowmers"
(clojure.string/lower-case)
(clojure.string/split #" "))
(comment
(if-valid
rest-params validation-map errors
false
[true (errors-map errors)]))
(comment
(def character
{:name "Smooches McCutes"
:attributes {:intelligence 10
:strength 4
:dexterity 5}})
(def c-int (comp :intelligence :attributes))
(def c-str (comp :strength :attributes))
(def c-dex (comp :dexterity :attributes)))
write a macro , defattrs , which lets you define c - int , c - str , c - dex
(comment
(defattrs
c-int :intelligence
c-str :strength
c-dex :dexterity)
= > 10
)
(comment
(defmacro and
"Evaluates exprs one at a time, from left to right. If a form
returns logical false (nil or false), and returns that value and
doesn't evaluate any of the other expressions, otherwise it returns
the value of the last expr. (and) returns true."
{:added "1.0"}
([] true)
([x] x)
([x & next]
`(let [and# ~x]
(if and# (and ~@next) and#)))))
|
f5fddcd2bd68cffeadbdeb312778cabdad4fd49e6ec63f880cc6e1cb2287a565 | janestreet/async_smtp | envelope_selector.ml | open! Core.Core_stable
module Stable = struct
module Base = struct
module V1 = struct
type t =
(* When adding to this type, don't forget to add to examples below. *)
[ Email_message.Email_selector.Stable.Base.V1.t
| `envelope_sender of Re2.Stable.V1_no_options.t
| `exists_envelope_recipient of Re2.Stable.V1_no_options.t
| `all_envelope_recipients of Re2.Stable.V1_no_options.t
]
[@@deriving bin_shape, sexp]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 60bd581ef4f767466e97e74e3c15f1b8 |}]
;;
end
end
module V1 = struct
type t = Base.V1.t Blang.V1.t [@@deriving bin_shape, sexp]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 343a90f661b8f1e5dbe9d2c82f93ce4c |}]
;;
end
end
open Core
open Email_message
module Regex = Re2
module Base = struct
type t = Stable.Base.V1.t [@@deriving sexp_of]
let matches' t envelope =
match t with
| #Email_selector.Base.t as t ->
Email_selector.Base.matches' t (Envelope_bodiless.headers envelope)
| `envelope_sender regex ->
Regex.matches regex (Envelope_bodiless.string_sender envelope)
| `exists_envelope_recipient regex ->
List.exists (Envelope_bodiless.string_recipients envelope) ~f:(fun recipient ->
Regex.matches regex recipient)
| `all_envelope_recipients regex ->
List.for_all (Envelope_bodiless.string_recipients envelope) ~f:(fun recipient ->
Regex.matches regex recipient)
;;
let matches t envelope =
let bodiless, _ = Envelope.split_bodiless envelope in
matches' t bodiless
;;
let examples : t list =
[ `envelope_sender (Regex.of_string ".*@janestreet.com")
; `exists_envelope_recipient (Regex.of_string ".*@janestreet.com")
; `all_envelope_recipients (Regex.of_string ".*@janestreet.com")
]
;;
end
type t = Base.t Blang.t [@@deriving sexp_of]
let matches' t envelope = Blang.eval t (fun base -> Base.matches' base envelope)
let matches t envelope =
let bodiless, _ = Envelope.split_bodiless envelope in
matches' t bodiless
;;
let example : t =
(Email_selector.Base.examples :> Base.t list) @ Base.examples
|> List.map ~f:Blang.base
|> Blang.and_
;;
| null | https://raw.githubusercontent.com/janestreet/async_smtp/7b0633e75a84e1cca91441ebacfb8200c308b759/types/envelope_selector.ml | ocaml | When adding to this type, don't forget to add to examples below. | open! Core.Core_stable
module Stable = struct
module Base = struct
module V1 = struct
type t =
[ Email_message.Email_selector.Stable.Base.V1.t
| `envelope_sender of Re2.Stable.V1_no_options.t
| `exists_envelope_recipient of Re2.Stable.V1_no_options.t
| `all_envelope_recipients of Re2.Stable.V1_no_options.t
]
[@@deriving bin_shape, sexp]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 60bd581ef4f767466e97e74e3c15f1b8 |}]
;;
end
end
module V1 = struct
type t = Base.V1.t Blang.V1.t [@@deriving bin_shape, sexp]
let%expect_test _ =
print_endline [%bin_digest: t];
[%expect {| 343a90f661b8f1e5dbe9d2c82f93ce4c |}]
;;
end
end
open Core
open Email_message
module Regex = Re2
module Base = struct
type t = Stable.Base.V1.t [@@deriving sexp_of]
let matches' t envelope =
match t with
| #Email_selector.Base.t as t ->
Email_selector.Base.matches' t (Envelope_bodiless.headers envelope)
| `envelope_sender regex ->
Regex.matches regex (Envelope_bodiless.string_sender envelope)
| `exists_envelope_recipient regex ->
List.exists (Envelope_bodiless.string_recipients envelope) ~f:(fun recipient ->
Regex.matches regex recipient)
| `all_envelope_recipients regex ->
List.for_all (Envelope_bodiless.string_recipients envelope) ~f:(fun recipient ->
Regex.matches regex recipient)
;;
let matches t envelope =
let bodiless, _ = Envelope.split_bodiless envelope in
matches' t bodiless
;;
let examples : t list =
[ `envelope_sender (Regex.of_string ".*@janestreet.com")
; `exists_envelope_recipient (Regex.of_string ".*@janestreet.com")
; `all_envelope_recipients (Regex.of_string ".*@janestreet.com")
]
;;
end
type t = Base.t Blang.t [@@deriving sexp_of]
let matches' t envelope = Blang.eval t (fun base -> Base.matches' base envelope)
let matches t envelope =
let bodiless, _ = Envelope.split_bodiless envelope in
matches' t bodiless
;;
let example : t =
(Email_selector.Base.examples :> Base.t list) @ Base.examples
|> List.map ~f:Blang.base
|> Blang.and_
;;
|
ef1560655d7b466bfb2d2af5dd351afb48568c08d48f78111f692d011fbd224e | pyr/riemann-extra | webhook.clj | (ns org.spootnik.riemann.webhook
(:require [clj-http.client :as client])
(:require [cheshire.core :as json]))
(defn post
"POST to a webhook URL."
[request url]
(client/post url
{:body (json/generate-string request)
:socket-timeout 5000
:conn-timeout 5000
:content-type :json
:accept :json
:throw-entire-message? true}))
(defn format-event
"Formats an event for PD. event-type is one of :trigger, :acknowledge,
:resolve"
[event]
{:description (str (:host event) " "
(:service event) " is "
(:state event) " ("
(:metric event) ")")
:details event})
(defn webhook
[url]
(fn [event]
(post (format-event event) url))) | null | https://raw.githubusercontent.com/pyr/riemann-extra/6a70763a4a86ed4fe82382486113de0f6b765e28/src/org/spootnik/riemann/webhook.clj | clojure | (ns org.spootnik.riemann.webhook
(:require [clj-http.client :as client])
(:require [cheshire.core :as json]))
(defn post
"POST to a webhook URL."
[request url]
(client/post url
{:body (json/generate-string request)
:socket-timeout 5000
:conn-timeout 5000
:content-type :json
:accept :json
:throw-entire-message? true}))
(defn format-event
"Formats an event for PD. event-type is one of :trigger, :acknowledge,
:resolve"
[event]
{:description (str (:host event) " "
(:service event) " is "
(:state event) " ("
(:metric event) ")")
:details event})
(defn webhook
[url]
(fn [event]
(post (format-event event) url))) |
|
f82e83886ec5740559f224a7ec5bfa548ec424a06c87ed8ee33ddd0d34c97745 | fugue/fregot | Test.hs | |
Copyright : ( c ) 2020 Fugue , Inc.
License : Apache License , version 2.0
Maintainer :
Stability : experimental
Portability : POSIX
Copyright : (c) 2020 Fugue, Inc.
License : Apache License, version 2.0
Maintainer :
Stability : experimental
Portability : POSIX
-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Fregot.Main.Test
( Options
, parseOptions
, main
) where
import Control.Lens ((&), (.~), (^.))
import Control.Lens.TH (makeLenses)
import Control.Monad.Extended (foldMapM, forM_)
import qualified Control.Monad.Parachute as Parachute
import Data.Bifunctor (bimap)
import qualified Data.IORef as IORef
import Data.List (sortOn)
import qualified Fregot.Error as Error
import qualified Fregot.Find as Find
import qualified Fregot.Interpreter as Interpreter
import Fregot.Main.GlobalOptions
import Fregot.Names
import qualified Fregot.Parser as Parser
import qualified Fregot.Sources as Sources
import Fregot.Test
import qualified Options.Applicative as OA
import System.Exit (ExitCode (..))
import qualified System.IO as IO
data Options = Options
{ _paths :: [DestinationPrefix FilePath]
} deriving (Show)
$(makeLenses ''Options)
parseOptions :: OA.Parser Options
parseOptions = Options
<$> (OA.some $ fmap parseDestinationPrefix $ OA.strArgument $
OA.metavar "PATHS" <>
OA.help "Rego files or directories to test")
main :: GlobalOptions -> Options -> IO ExitCode
main gopts opts = do
sources <- Sources.newHandle
interpreter <- Interpreter.newHandle
(Interpreter.defaultConfig
& Interpreter.dumpTags .~ gopts ^. dumpTags
& Interpreter.strictBuiltinErrors .~ gopts ^. strictBuiltinErrors)
sources
regoPaths <- Find.findPrefixedRegoFiles (opts ^. paths)
(errors, mbResult) <- Parachute.runParachuteT $ do
forM_ regoPaths $ Interpreter.loadFileByExtension
interpreter Parser.defaultParserOptions
Interpreter.compileRules interpreter
tests <- sortOn (bimap unPackageName unVar) . filter isTest <$>
Interpreter.readAllRules interpreter
foldMapM (\t -> runTest interpreter t) tests
sources' <- IORef.readIORef sources
forM_ mbResult (printTestResults IO.stdout sources')
Error.hPutErrors IO.stderr sources' (gopts ^. format) errors
return $! case mbResult of
_ | Error.severe errors -> ExitFailure 1
Just tr | null (tr ^. failed) && null (tr ^. errored) -> ExitSuccess
_ -> ExitFailure 1
| null | https://raw.githubusercontent.com/fugue/fregot/c3d87f37c43558761d5f6ac758d2f1a4117adb3e/lib/Fregot/Main/Test.hs | haskell | # LANGUAGE OverloadedStrings # | |
Copyright : ( c ) 2020 Fugue , Inc.
License : Apache License , version 2.0
Maintainer :
Stability : experimental
Portability : POSIX
Copyright : (c) 2020 Fugue, Inc.
License : Apache License, version 2.0
Maintainer :
Stability : experimental
Portability : POSIX
-}
# LANGUAGE TemplateHaskell #
module Fregot.Main.Test
( Options
, parseOptions
, main
) where
import Control.Lens ((&), (.~), (^.))
import Control.Lens.TH (makeLenses)
import Control.Monad.Extended (foldMapM, forM_)
import qualified Control.Monad.Parachute as Parachute
import Data.Bifunctor (bimap)
import qualified Data.IORef as IORef
import Data.List (sortOn)
import qualified Fregot.Error as Error
import qualified Fregot.Find as Find
import qualified Fregot.Interpreter as Interpreter
import Fregot.Main.GlobalOptions
import Fregot.Names
import qualified Fregot.Parser as Parser
import qualified Fregot.Sources as Sources
import Fregot.Test
import qualified Options.Applicative as OA
import System.Exit (ExitCode (..))
import qualified System.IO as IO
data Options = Options
{ _paths :: [DestinationPrefix FilePath]
} deriving (Show)
$(makeLenses ''Options)
parseOptions :: OA.Parser Options
parseOptions = Options
<$> (OA.some $ fmap parseDestinationPrefix $ OA.strArgument $
OA.metavar "PATHS" <>
OA.help "Rego files or directories to test")
main :: GlobalOptions -> Options -> IO ExitCode
main gopts opts = do
sources <- Sources.newHandle
interpreter <- Interpreter.newHandle
(Interpreter.defaultConfig
& Interpreter.dumpTags .~ gopts ^. dumpTags
& Interpreter.strictBuiltinErrors .~ gopts ^. strictBuiltinErrors)
sources
regoPaths <- Find.findPrefixedRegoFiles (opts ^. paths)
(errors, mbResult) <- Parachute.runParachuteT $ do
forM_ regoPaths $ Interpreter.loadFileByExtension
interpreter Parser.defaultParserOptions
Interpreter.compileRules interpreter
tests <- sortOn (bimap unPackageName unVar) . filter isTest <$>
Interpreter.readAllRules interpreter
foldMapM (\t -> runTest interpreter t) tests
sources' <- IORef.readIORef sources
forM_ mbResult (printTestResults IO.stdout sources')
Error.hPutErrors IO.stderr sources' (gopts ^. format) errors
return $! case mbResult of
_ | Error.severe errors -> ExitFailure 1
Just tr | null (tr ^. failed) && null (tr ^. errored) -> ExitSuccess
_ -> ExitFailure 1
|
a910f450fc4dcd3068a65ada9615873ca2b9a94b0b424e32fb5a70b1bdd6d47b | FundingCircle/fc4-framework | watch_test.clj | (ns fc4.io.watch-test
(:require [clojure.java.io :refer [copy delete-file file writer]]
[clojure.string :refer [split-lines]]
[clojure.test :refer [deftest is testing use-fixtures]]
[fc4.io.watch :as e]
[fc4.io.util :as u]
[fc4.test-utils.io :refer [tmp-copy]]))
(defn count-substring
{:source "#Clojure"}
[txt sub]
(count (re-seq (re-pattern sub) txt)))
(defn append
[f v]
(with-open [w (writer f :append true)]
(.write w v)))
(defn no-debug
"Ensure that debug messages don’t get printed, so we can make assertions about
the output of the functions under test."
[f]
(reset! u/debug? false)
(f))
(use-fixtures :each no-debug)
(deftest watch
(testing "changing a single file once"
;; ...should trigger a single invocation of the process fn, even though the process fn changes
;; the file.
(let [yaml-file (tmp-copy "test/data/structurizr/express/diagram_valid_messy.yaml")
invocations (atom 0)
f (fn [fp]
(swap! invocations inc)
(when (< @invocations 5) ; failsafe
; change the file, which hopefully will not trigger the watch again
(append fp "ha!\n")))
watch (delay (e/start f [yaml-file])) ; watch needs to be started inside the with-out-str
output (with-out-str
(force watch)
(Thread/sleep 100)
(append yaml-file "yo!\n") ; change the file, triggering the watch
(Thread/sleep 3000))]
(e/stop @watch)
(is (= 1 @invocations))
(is (= 1 (count-substring (slurp yaml-file) "ha!")))
(is (= 2 (count (split-lines output))) (str "output: " output))
(delete-file yaml-file)))
;; We had a bug wherein if an exception was thrown while rendering — and the
;; current workflow does use exceptions, to my regret — further changes to
;; that file would not trigger processing.
(testing "an error should not break the watch for that file"
(let [yaml-file (tmp-copy "test/data/structurizr/express/diagram_valid_messy.yaml")
invocations (atom 0)
f (fn [fp]
(try
(if (= @invocations 0)
(println "processing...🚨 ruh roh")
(println "processing...✅"))
(finally
(swap! invocations inc))))
watch (delay (e/start f [yaml-file])) ; watch needs to be started inside the with-out-str
output (with-out-str
(force watch)
(Thread/sleep 100)
(append yaml-file "ha!\n")
(Thread/sleep 2200)
(append yaml-file "ha!\n")
(Thread/sleep 300))]
(e/stop @watch)
(println output)
(is (= 2 @invocations))
(is (= 1 (count-substring output "✅")) (str "output: " output))
(is (= 1 (count-substring output "🚨")) (str "output: " output))
(is (= 3 (count (split-lines output))) (str "output: " output))
(delete-file yaml-file))))
| null | https://raw.githubusercontent.com/FundingCircle/fc4-framework/674af39e7edb2cbfd3e1941e6abe80fd87d93bed/test/fc4/io/watch_test.clj | clojure | ...should trigger a single invocation of the process fn, even though the process fn changes
the file.
failsafe
change the file, which hopefully will not trigger the watch again
watch needs to be started inside the with-out-str
change the file, triggering the watch
We had a bug wherein if an exception was thrown while rendering — and the
current workflow does use exceptions, to my regret — further changes to
that file would not trigger processing.
watch needs to be started inside the with-out-str | (ns fc4.io.watch-test
(:require [clojure.java.io :refer [copy delete-file file writer]]
[clojure.string :refer [split-lines]]
[clojure.test :refer [deftest is testing use-fixtures]]
[fc4.io.watch :as e]
[fc4.io.util :as u]
[fc4.test-utils.io :refer [tmp-copy]]))
(defn count-substring
{:source "#Clojure"}
[txt sub]
(count (re-seq (re-pattern sub) txt)))
(defn append
[f v]
(with-open [w (writer f :append true)]
(.write w v)))
(defn no-debug
"Ensure that debug messages don’t get printed, so we can make assertions about
the output of the functions under test."
[f]
(reset! u/debug? false)
(f))
(use-fixtures :each no-debug)
(deftest watch
(testing "changing a single file once"
(let [yaml-file (tmp-copy "test/data/structurizr/express/diagram_valid_messy.yaml")
invocations (atom 0)
f (fn [fp]
(swap! invocations inc)
(append fp "ha!\n")))
output (with-out-str
(force watch)
(Thread/sleep 100)
(Thread/sleep 3000))]
(e/stop @watch)
(is (= 1 @invocations))
(is (= 1 (count-substring (slurp yaml-file) "ha!")))
(is (= 2 (count (split-lines output))) (str "output: " output))
(delete-file yaml-file)))
(testing "an error should not break the watch for that file"
(let [yaml-file (tmp-copy "test/data/structurizr/express/diagram_valid_messy.yaml")
invocations (atom 0)
f (fn [fp]
(try
(if (= @invocations 0)
(println "processing...🚨 ruh roh")
(println "processing...✅"))
(finally
(swap! invocations inc))))
output (with-out-str
(force watch)
(Thread/sleep 100)
(append yaml-file "ha!\n")
(Thread/sleep 2200)
(append yaml-file "ha!\n")
(Thread/sleep 300))]
(e/stop @watch)
(println output)
(is (= 2 @invocations))
(is (= 1 (count-substring output "✅")) (str "output: " output))
(is (= 1 (count-substring output "🚨")) (str "output: " output))
(is (= 3 (count (split-lines output))) (str "output: " output))
(delete-file yaml-file))))
|
22fb9523fc12f910d505d2efe70fdc85d8ae9275235dc6f6fe81fae324142261 | fujita-y/ypsilon | 28.scm | #!nobacktrace
(define-library (srfi 28) (import (core)) (export format))
| null | https://raw.githubusercontent.com/fujita-y/ypsilon/44260d99e24000f9847e79c94826c3d9b76872c2/sitelib/srfi/28.scm | scheme | #!nobacktrace
(define-library (srfi 28) (import (core)) (export format))
|
|
bfe72122f1d98fdba469555af1f9360bc10f2d418332960cf42eae92dd70349c | humorless/spark-movie-lens | profiles.clj | {:profiles/dev {:env {:database-url "jdbc:postgresql"}}
:profiles/test {:env {:database-url "jdbc:postgresql"}}}
| null | https://raw.githubusercontent.com/humorless/spark-movie-lens/3062227f0087f8437144cebc5f8cfbd2ee74e965/profiles.clj | clojure | {:profiles/dev {:env {:database-url "jdbc:postgresql"}}
:profiles/test {:env {:database-url "jdbc:postgresql"}}}
|
|
1c76757b2f5e40e029ab5ea649768a8322eef29025760c39efa2bb4e72214127 | martinsumner/kv_index_tictactree | mockvnode_SUITE.erl | -module(mockvnode_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0]).
-export([coveragefold_nativemedium/1,
coveragefold_nativesmall/1,
coveragefold_parallelmedium/1,
coveragefold_parallelmediumko/1,
coveragefold_parallelsmall/1,
loadexchangeandrebuild_stbucketko/1,
loadexchangeandrebuild_tuplebucketko/1,
loadexchangeandrebuild_stbucketso/1,
loadexchangeandrebuild_tuplebucketso/1]).
all() -> [
coveragefold_nativemedium,
coveragefold_nativesmall,
coveragefold_parallelmedium,
coveragefold_parallelmediumko,
coveragefold_parallelsmall,
loadexchangeandrebuild_stbucketso,
loadexchangeandrebuild_tuplebucketso,
loadexchangeandrebuild_stbucketko,
loadexchangeandrebuild_tuplebucketko
].
-include("testutil.hrl").
loadexchangeandrebuild_stbucketko(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(false, parallel_ko),
mock_vnode_loadexchangeandrebuild_tester(false, parallel_ko).
loadexchangeandrebuild_stbucketso(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(false, parallel_so).
loadexchangeandrebuild_tuplebucketko(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(true, parallel_ko),
mock_vnode_loadexchangeandrebuild_tester(true, parallel_ko).
loadexchangeandrebuild_tuplebucketso(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(true, parallel_so).
mock_vnode_loadexchangeandrebuild_tester(TupleBuckets, PType) ->
Load up two vnodes with same data , with the data in each node split
across 3 partitions ( n=1 ) .
%
The purpose if to perform exchanges to first highlight no differences ,
% and then once a difference is created, discover any difference
TestStartPoint = os:timestamp(),
LogProgress =
fun(Point) ->
io:format("Test reached point ~s in ~w s~n",
[Point,
timer:now_diff(os:timestamp(), TestStartPoint)
div 1000])
end,
LogProgress("T0"),
InitialKeyCount = 80000,
RootPath = testutil:reset_filestructure(),
MockPathN = filename:join(RootPath, "mock_native/"),
MockPathP = filename:join(RootPath, "mock_parallel/"),
IndexNs = [{1, 3}, {2, 3}, {3, 3}],
PreflistFun =
fun(_B, K) ->
Idx = erlang:phash2(K) rem length(IndexNs),
lists:nth(Idx + 1, IndexNs)
end,
Start up to two mock vnodes
- VNN is a native vnode ( where the AAE process will not keep a parallel
% key store)
- VNP is a parallel vnode ( where a separate AAE key store is required
% to be kept in parallel)
{ok, VNN} = mock_kv_vnode:open(MockPathN, native, IndexNs, PreflistFun),
{ok, VNP} = mock_kv_vnode:open(MockPathP, PType, IndexNs, PreflistFun),
RPid = self(),
LogNotRepairFun =
fun(KL) ->
lists:foreach(fun({{B, K}, VCCompare}) ->
io:format("Delta found in ~w ~s ~w~n",
[B,
binary_to_list(K),
VCCompare])
end,
KL)
end,
NullRepairFun = fun(_KL) -> ok end,
ReturnFun = fun(R) -> RPid ! {result, R} end,
io:format("Exchange between empty vnodes~n"),
{ok, _P0, GUID0} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID0]),
{ExchangeState0, 0} = testutil:start_receiver(),
true = ExchangeState0 == root_compare,
io:format("Same exchange - now using tree compare~n"),
GetBucketFun =
fun(I) ->
case TupleBuckets of
true ->
{?BUCKET_TYPE, integer_to_binary(I)};
false ->
integer_to_binary(I)
end
end,
Bucket1 = GetBucketFun(1),
Bucket2 = GetBucketFun(2),
Bucket3 = GetBucketFun(3),
Bucket4 = GetBucketFun(4),
{ok, _TC_P0, TC_GUID0} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun,
{filter, Bucket3, all,
small, all, all, pre_hash},
[{transition_pause_ms, 100},
{log_levels, [warn, error, critical]},
{purpose, test}]),
io:format("Exchange id for tree compare ~s~n", [TC_GUID0]),
{ExchangeStateTC0, 0} = testutil:start_receiver(),
true = ExchangeStateTC0 == tree_compare,
{ok, _TC_P1, TC_GUID1} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNP), IndexNs}],
[{exchange_vnodesendfun(VNN), IndexNs}],
LogNotRepairFun,
ReturnFun,
{filter, Bucket3, all,
small, all, all, pre_hash},
[{transition_pause_ms, 100}]),
io:format("Exchange id for tree compare ~s~n", [TC_GUID1]),
{ExchangeStateTC1, 0} = testutil:start_receiver(),
true = ExchangeStateTC1 == tree_compare,
ObjList = testutil:gen_riakobjects(InitialKeyCount, [], TupleBuckets),
ReplaceList = testutil:gen_riakobjects(100, [], TupleBuckets),
some objects to replace the first 100 objects
DeleteList1 = lists:sublist(ObjList, 200, 100),
DeleteList2 =
lists:sublist(ObjList, 400, 10) ++
lists:sublist(ObjList, 500, 10) ++
lists:sublist(ObjList, 600, 10),
RehashList = lists:sublist(ObjList, 700, 10),
PutFun =
fun(Store1, Store2) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
mock_kv_vnode:put(Store1, Object, PL, [Store2])
end
end,
DeleteFun =
fun(Stores) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
lists:foreach(
fun(Store) ->
mock_kv_vnode:backend_delete(Store,
Object#r_object.bucket,
Object#r_object.key,
PL)
end,
Stores)
end
end,
RehashFun =
fun(Stores) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
lists:foreach(
fun(Store) ->
mock_kv_vnode:rehash(Store,
Object#r_object.bucket,
Object#r_object.key,
PL)
end,
Stores)
end
end,
LogProgress("T1"),
io:format("Load objects into both stores~n"),
PutFun1 = PutFun(VNN, VNP),
PutFun2 = PutFun(VNP, VNN),
{OL1, OL2A} = lists:split(InitialKeyCount div 2, ObjList),
{[RogueObjC1, RogueObjC2], OL2} = lists:split(2, OL2A),
% Keep some rogue objects to cause failures, by not putting them
% correctly into both vnodes. These aren't loaded yet
RogueObj1 = RogueObjC1#r_object{bucket = Bucket1},
RogueObj2 = RogueObjC2#r_object{bucket = Bucket2},
ok = lists:foreach(PutFun1, OL1),
ok = lists:foreach(PutFun2, OL2),
ok = lists:foreach(PutFun1, ReplaceList),
ok = lists:foreach(DeleteFun([VNN, VNP]), DeleteList1),
io:format("Exchange between equivalent vnodes~n"),
{ok, _P1, GUID1} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID1]),
{ExchangeState1, 0} = testutil:start_receiver(),
true = ExchangeState1 == root_compare,
io:format("Rehash some entries and confirm root_compare " ++
"still matches, as rehash doesn't do anything~n"),
ok = lists:foreach(RehashFun([VNN, VNP]), RehashList),
{ok, _P1a, GUID1a} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID1a]),
{ExchangeState1a, 0} = testutil:start_receiver(),
true = ExchangeState1a == root_compare,
io:format("Compare the two stores using an AAE fold - " ++
"and prove that AAE fold is working as expected~n"),
Bucket =
case TupleBuckets of
true ->
{?BUCKET_TYPE, integer_to_binary(3)};
false ->
integer_to_binary(3)
end,
StartKey = list_to_binary(string:right(integer_to_list(10), 6, $0)),
EndKey = list_to_binary(string:right(integer_to_list(50), 6, $0)),
Elements = [{sibcount, null}, {clock, null}, {hash, null}],
InitAcc = {[], 0},
FoldKRFun =
fun(FB, FK, FEs, {KCHAcc, SCAcc}) ->
true = FB == Bucket,
true = FK >= StartKey,
true = FK < EndKey,
{clock, FC} = lists:keyfind(clock, 1, FEs),
{hash, FH} = lists:keyfind(hash, 1, FEs),
{sibcount, FSC} = lists:keyfind(sibcount, 1, FEs),
{lists:usort([{FK, FC, FH}|KCHAcc]), SCAcc + FSC}
end,
{async, VNNF} =
mock_kv_vnode:fold_aae(VNN,
{key_range, Bucket, StartKey, EndKey},
all,
FoldKRFun,
InitAcc,
Elements),
{async, VNPF} =
mock_kv_vnode:fold_aae(VNP,
{key_range, Bucket, StartKey, EndKey},
all,
FoldKRFun,
InitAcc,
Elements),
{VNNF_KL, VNNF_SC} = VNNF(),
{VNPF_KL, VNPF_SC} = VNPF(),
true = VNNF_SC == 8,
true = VNPF_SC == 8,
true = lists:usort(VNNF_KL) == lists:usort(VNPF_KL),
true = length(VNNF_KL) == 8,
true = length(VNPF_KL) == 8,
[{K1, C1, H1}|Rest] = VNNF_KL,
[{K2, C2, H2}|_Rest] = Rest,
BinaryKey1 = aae_util:make_binarykey(Bucket, K1),
BinaryKey2 = aae_util:make_binarykey(Bucket, K2),
SegmentID1 =
leveled_tictac:get_segment(
element(1, leveled_tictac:tictac_hash(BinaryKey1, <<>>)),
small),
SegmentID2 =
leveled_tictac:get_segment(
element(1, leveled_tictac:tictac_hash(BinaryKey2, <<>>)),
small),
io:format("Looking for Segment IDs K1 ~w ~w K2 ~w ~w~n",
[K1, SegmentID1, K2, SegmentID2]),
{async, VNNF_SL} =
mock_kv_vnode:fold_aae(VNN,
{key_range, Bucket, StartKey, EndKey},
{segments, [SegmentID1, SegmentID2], small},
FoldKRFun,
InitAcc,
Elements),
{async, VNPF_SL} =
mock_kv_vnode:fold_aae(VNP,
{key_range, Bucket, StartKey, EndKey},
{segments, [SegmentID1, SegmentID2], small},
FoldKRFun,
InitAcc,
Elements),
{[{K1, C1, H1}, {K2, C2, H2}], 2} = VNNF_SL(),
{[{K1, C1, H1}, {K2, C2, H2}], 2} = VNPF_SL(),
io:format("Make change to one vnode only (the parallel one)~n"),
Idx1 = erlang:phash2(RogueObj1#r_object.key) rem length(IndexNs),
mock_kv_vnode:put(VNP, RogueObj1, lists:nth(Idx1 + 1, IndexNs), []),
io:format("Exchange between nodes to expose difference~n"),
{ok, _P2, GUID2} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID2]),
{ExchangeState2, 1} = testutil:start_receiver(),
true = ExchangeState2 == clock_compare,
LogProgress("T2"),
io:format("Make change to one vnode only (the native one)~n"),
Idx2 = erlang:phash2(RogueObj2#r_object.key) rem length(IndexNs),
mock_kv_vnode:put(VNN, RogueObj2, lists:nth(Idx2 + 1, IndexNs), []),
io:format("Exchange between nodes to expose differences" ++
"(one in VNN, one in VNP)~n"),
{ok, _P3, GUID3} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3]),
{ExchangeState3, 2} = testutil:start_receiver(),
true = ExchangeState3 == clock_compare,
{RebuildN, false} = mock_kv_vnode:rebuild(VNN, false),
{RebuildP, false} = mock_kv_vnode:rebuild(VNP, false),
io:format("Discover Next rebuild times - should be in the future " ++
"as both stores were started empty, and hence without " ++
"the need to rebuild~n"),
io:format("Next rebuild vnn ~w vnp ~w~n", [RebuildN, RebuildP]),
true = RebuildN > os:timestamp(),
true = RebuildP > os:timestamp(),
ok = mock_kv_vnode:close(VNN),
ok = mock_kv_vnode:close(VNP),
io:format("Restart the vnodes, " ++
"confirm next rebuilds are still in the future~n"),
% Between startup and shutdown the next_rebuild will be rescheduled to
% a different time, as the look at the last rebuild time and schedule
% forward from there.
{ok, VNNa} = mock_kv_vnode:open(MockPathN, native, IndexNs, PreflistFun),
{ok, VNPa} = mock_kv_vnode:open(MockPathP, PType, IndexNs, PreflistFun),
{RebuildNa, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPa, false} = mock_kv_vnode:rebuild(VNPa, false),
io:format("Next rebuild vnn ~w vnp ~w~n", [RebuildNa, RebuildPa]),
true = RebuildNa > os:timestamp(),
true = RebuildPa > os:timestamp(),
Exchange between nodes to expose differences ( one in VNN , one in VNP )
io:format("Should still discover the same difference " ++
"as when they were close~n"),
{ok, _P3a, GUID3a} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3a]),
{ExchangeState3a, 2} = testutil:start_receiver(),
true = ExchangeState3a == clock_compare,
LogProgress("T3"),
Exchange with a one hour modified range - should see same differences
io:format("Repeat exchange with 1 hour modified range~n"),
MRH = convert_ts(os:timestamp()),
{ok, _P3mr1, GUID3mr1} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, all, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3mr1]),
{ExchangeState3mr1, 2} = testutil:start_receiver(),
true = ExchangeState3mr1 == clock_compare,
Exchnage with an older modified range - see clock_compare but no
% differences
io:format("Repeat exchange, but with change outside of modified range~n"),
{ok, _P3mr2, GUID3mr2} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, all, all, large, all,
{MRH - (2 * 60 * 60), MRH - (60 * 60)},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3mr2]),
{ExchangeState3mr2, 0} = testutil:start_receiver(),
true = ExchangeState3mr2 == clock_compare,
io:format("Repeat exchange with modified range and Bucket constraint~n"),
{ok, _P3bmr1, GUID3bmr1} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket1, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr1]),
{ExchangeState3bmr1, 1} = testutil:start_receiver(),
true = ExchangeState3bmr1 == clock_compare,
io:format("Repeat exchange with modified range and Bucket constraint~n"),
{ok, _P3bmr2, GUID3bmr2} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket2, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr2]),
{ExchangeState3bmr2, 1} = testutil:start_receiver(),
true = ExchangeState3bmr2 == clock_compare,
io:format("Repeat exchange with modified range and unmodified Bucket~n"),
{ok, _P3bmr3, GUID3bmr3} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr3]),
{ExchangeState3bmr3, 0} = testutil:start_receiver(),
true = ExchangeState3bmr3 == clock_compare,
LogProgress("T4"),
io:format("Prompts for a rebuild of both stores~n"),
% The rebuild is a rebuild of both
% the store and the tree in the case of the parallel vnode, and just the
% tree in the case of the native rebuild
{RebuildNb, true} = mock_kv_vnode:rebuild(VNNa, true),
true = RebuildNb > os:timestamp(),
% next rebuild was in the future, and is still scheduled as such
key thing that the ongoing rebuild status is now true ( the second
% element of the rebuild response)
io:format("Now poll to check to see when the rebuild is complete~n"),
wait_for_rebuild(VNNa),
Next rebuild times should now still be in the future
{RebuildNc, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPc, false} = mock_kv_vnode:rebuild(VNPa, false),
true = RebuildPc == RebuildPa, % Should not have changed
io:format("Following a completed rebuild - the exchange should still" ++
" work as before, spotting two differences~n"),
{ok, _P3b, GUID3b} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3b]),
{ExchangeState3b, 2} = testutil:start_receiver(),
true = ExchangeState3b == clock_compare,
{RebuildPb, true} = mock_kv_vnode:rebuild(VNPa, true),
true = RebuildPb > os:timestamp(),
io:format("Rebuild in progress, exchange still working~n"),
% There should now be a rebuild in progress - but immediately check that
an exchange will still work ( spotting the same two differences as before
% following a clock_compare)
{ok, _P3c, GUID3c} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3c]),
% This could receive {error, 0}. On complete of rebuild the leveled
% store is shutdown - and by design this closes all iterators. So this
% may crash if in the fetch_clock state
{ExchangeState3c, 2} =
case testutil:start_receiver() of
{error, 0} ->
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
% Retry in the case this exchange times out on the rebuild
% completing faster than expected
testutil:start_receiver();
Other ->
Other
end,
true = ExchangeState3c == clock_compare,
io:format("Waiting for rebuild after exchange success~n"),
wait_for_rebuild(VNPa),
{RebuildNd, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPd, false} = mock_kv_vnode:rebuild(VNPa, false),
true = RebuildNd == RebuildNc,
true = RebuildPd > os:timestamp(),
io:format("Rebuild now complete - " ++
"should get the same result for an exchange~n"),
{ok, _P3d, GUID3d} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3d]),
{ExchangeState3d, 2} = testutil:start_receiver(),
true = ExchangeState3d == clock_compare,
LogProgress("T5"),
io:format("Delete some keys - and see the size of the delta increase~n"),
ok = lists:foreach(DeleteFun([VNPa]), DeleteList2),
{ok, _P4a, GUID4a} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID4a]),
{ExchangeState4a, 32} = testutil:start_receiver(),
true = ExchangeState4a == clock_compare,
% Balance the deletions
ok = lists:foreach(DeleteFun([VNNa]), DeleteList2),
{ok, _P4b, GUID4b} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID4b]),
{ExchangeState4b, 2} = testutil:start_receiver(),
true = ExchangeState4b == clock_compare,
io:format("Same exchange - now using tree compare~n"),
CheckBucketList = [Bucket1, Bucket2],
CheckBucketFun =
fun(CheckBucket, Acc) ->
CBFilters = {filter, CheckBucket, all, large, all, all, pre_hash},
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
CBFilters,
[]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
{ExchangeStateTCCB, CBN} = testutil:start_receiver(),
true = ExchangeStateTCCB == clock_compare,
io:format("~w differences found in bucket ~w~n",
[CBN, CheckBucket]),
Acc + CBN
end,
true = 2 == lists:foldl(CheckBucketFun, 0, CheckBucketList),
LogProgress("T5.1"),
io:format("Tree compare section - with large deltas~n"),
Next section is going to test tree_compare with large deltas , and
% with a genuine repair fun (one which actually repairs). Repairs
should happen in stages as the mismatched segment list will at first
be too large - so there will be a down selection in select_ids
%
A delta between buckets is created both ways , with bucket3 out of
sync one way , and bucket 4 out of sync the other way
true = InitialKeyCount > 2000,
RplObjListTC = testutil:gen_riakobjects(2000, [], TupleBuckets),
FilterBucket3Fun = fun(RObj) -> RObj#r_object.bucket == Bucket3 end,
FilterBucket4Fun = fun(RObj) -> RObj#r_object.bucket == Bucket4 end,
RplObjListTC3 = lists:filter(FilterBucket3Fun, RplObjListTC),
Only have changes in Bucket 3
RplObjListTC4 = lists:filter(FilterBucket4Fun, RplObjListTC),
Only have changes in Bucket 4
SingleSidedPutFun =
fun(MVN) ->
fun(RObj) ->
PL = PreflistFun(null, RObj#r_object.key),
mock_kv_vnode:put(MVN, RObj, PL, [])
end
end,
lists:foreach(SingleSidedPutFun(VNNa), RplObjListTC3),
lists:foreach(SingleSidedPutFun(VNPa), RplObjListTC4),
LogProgress("T5.2"),
NoRepairCheckB3 = CheckBucketFun(Bucket3, 0),
NoRepairCheckB4 = CheckBucketFun(Bucket4, 0),
true = length(RplObjListTC3) > NoRepairCheckB3,
true = length(RplObjListTC4) > NoRepairCheckB4,
% this should be less than, as a subset of mismatched segments will
% be passed to fetch clocks
true = 0 < NoRepairCheckB3,
true = 0 < NoRepairCheckB4,
LogProgress("T5.3"),
RepairListMapFun = fun(RObj) -> {RObj#r_object.key, RObj} end,
RepairListTC3 = lists:map(RepairListMapFun, RplObjListTC3),
RepairListTC4 = lists:map(RepairListMapFun, RplObjListTC4),
GenuineRepairFun =
fun(SourceVnode, TargetVnode, RepairList) ->
fun(KL) ->
SubRepairFun =
fun({{RepB, K}, _VCCompare}, Acc) ->
case lists:keyfind(K, 1, RepairList) of
{K, RObj} ->
PL = PreflistFun(null, K),
ok = mock_kv_vnode:read_repair(SourceVnode,
RObj,
PL,
[TargetVnode]),
Acc + 1;
false ->
io:format("Missing from repair list ~w ~w~n",
[RepB, K]),
Acc
end
end,
Repaired = lists:foldl(SubRepairFun, 0, KL),
io:format("~w keys repaired to vnode ~w~n",
[Repaired, TargetVnode])
end
end,
RepairFunTC3 = GenuineRepairFun(VNNa, VNPa, RepairListTC3),
RepairFunTC4 = GenuineRepairFun(VNPa, VNNa, RepairListTC4),
LogProgress("T5.4"),
RepairBucketFun =
fun(CheckBucket, TargettedRepairFun, KR, MR, Hash) ->
CBFilters = {filter, CheckBucket, KR, large, all, MR, Hash},
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
TargettedRepairFun,
ReturnFun,
CBFilters,
[]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
testutil:start_receiver()
end,
FoldRepair3Fun =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunTC3, all, all, pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
TotalRepairs3 = lists:foldl(FoldRepair3Fun, 0, lists:seq(1, 6)),
io:format("Repaired ~w from list of length ~w~n",
[TotalRepairs3, length(RepairListTC3)]),
true = length(RepairListTC3) == TotalRepairs3,
LogProgress("T5.5"),
FoldRepair4Fun =
fun(_I, Acc) ->
case RepairBucketFun(Bucket4, RepairFunTC4,
all, all, {rehash, 5000}) of
{clock_compare, Count4} ->
Acc + Count4;
{tree_compare, 0} ->
Acc
end
end,
TotalRepairs4 = lists:foldl(FoldRepair4Fun, 0, lists:seq(1, 6)),
io:format("Repaired ~w from list of length ~w~n",
[TotalRepairs4, length(RepairListTC4)]),
true = length(RepairListTC4) == TotalRepairs4,
LogProgress("T6"),
io:format("Check with a key range~n"),
% Testing with a modified range requires more
% effort as the objects don't have a last modified date
LimiterCheckBucketFun =
fun(LimiterFilters) ->
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
LimiterFilters,
[{transition_pause_ms, 100}]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
testutil:start_receiver()
end,
CheckFiltersB =
{filter, Bucket3, all, small, all, all, pre_hash},
% verify no hangover going into the key range test
true = {tree_compare, 0} == LimiterCheckBucketFun(CheckFiltersB),
RplObjListKR = testutil:gen_riakobjects(1000, [], TupleBuckets),
RplObjListKR3 = lists:filter(FilterBucket3Fun, RplObjListKR),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListKR3),
RepairListKR3 = lists:map(RepairListMapFun, RplObjListKR3),
RLKR3_SL = lists:sublist(lists:ukeysort(1, RepairListKR3), 50, 50),
[{SK, _SObj}|_RestKRSL] = RLKR3_SL,
{EK, _EObj} = lists:last(RLKR3_SL),
io:format("StartKey ~s EndKey ~s in Range test~n",
[binary_to_list(SK), binary_to_list(EK)]),
CheckFiltersKR =
{filter, Bucket3, {SK, EK}, medium, all, all, pre_hash},
true = {clock_compare, 50} == LimiterCheckBucketFun(CheckFiltersKR),
RepairFunKR3 = GenuineRepairFun(VNNa, VNPa, RepairListKR3),
FoldRepair3FunKR =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunKR3,
{SK, EK},
all, pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
Now repair those deltas - do the key range deltas first then the rest
TotalRepairsKR3 = lists:foldl(FoldRepair3FunKR, 0, lists:seq(1, 3)),
io:format("Total range repairs after key range test ~w~n",
[TotalRepairsKR3]),
true = 50 == TotalRepairsKR3,
AllRepairsKR3 = lists:foldl(FoldRepair3Fun, 0, lists:seq(1, 5)),
io:format("Total repairs after key range test ~w~n", [AllRepairsKR3]),
true = length(RplObjListKR3) - 50 == AllRepairsKR3,
LogProgress("T7"),
io:format("Tests with a modified range~n"),
Some tests with a modified range . Split a bunch of changes into two
lots . Apply those two lots in two distinct time ranges . Find the
% deltas by time range
MDR_TS1 = os:timestamp(),
timer:sleep(1000),
RplObjListMRa = testutil:gen_riakobjects(500, [], TupleBuckets),
RplObjListMRa3 = lists:filter(FilterBucket3Fun, RplObjListMRa),
MDR_TS2 = os:timestamp(),
timer:sleep(1000),
MDR_TS3 = os:timestamp(),
timer:sleep(1000),
RplObjListMRb = testutil:gen_riakobjects(100, [], TupleBuckets),
RplObjListMRb3 = lists:filter(FilterBucket3Fun, RplObjListMRb),
MDR_TS4 = os:timestamp(),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListMRa3),
% add some deltas
lists:foreach(SingleSidedPutFun(VNPa), RplObjListMRb3),
% update some of those deltas
% updating the other vnode
% check between TS3 and TS4 - should only see 'b' changes
TS3_4_Range = {convert_ts(MDR_TS3), convert_ts(MDR_TS4)},
CheckFiltersMRb =
{filter, Bucket3, all, large, all, TS3_4_Range, pre_hash},
% verify no hangover going into the key range test
TS3_4_Result = LimiterCheckBucketFun(CheckFiltersMRb),
io:format("Exchange in second modified range resulted in ~w~n",
[TS3_4_Result]),
true = {clock_compare, length(RplObjListMRb3)} == TS3_4_Result,
% check between TS1 and TS2 - should only see 'a' changes, but
% not 'b' chnages as they have a higher last modified date
TS1_2_Range = {convert_ts(MDR_TS1), convert_ts(MDR_TS2)},
CheckFiltersMRa =
{filter, Bucket3, all, large, all, TS1_2_Range, pre_hash},
% verify no hangover going into the key range test
TS1_2_Result = LimiterCheckBucketFun(CheckFiltersMRa),
io:format("Exchange in first modified range resulted in ~w~n",
[TS1_2_Result]),
true = {clock_compare, length(RplObjListMRa3)} == TS1_2_Result,
Important to relaise that as the second amendments were made
% on the other side (VNPa) - VNPa will return no modified
objects , and as VNNa so none of the second round of updates it
% will see all of the firts round of changes soon.
Conflicting updates to ( b ) - but to be applied to VNN not VNP
RplObjListMRc = testutil:gen_riakobjects(100, [], TupleBuckets),
RplObjListMRc3 = lists:filter(FilterBucket3Fun, RplObjListMRc),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListMRc3),
TS1_2_Result_ii = LimiterCheckBucketFun(CheckFiltersMRa),
io:format("Exchange in first modified range resulted in ~w~n",
[TS1_2_Result_ii]),
true =
{clock_compare, length(RplObjListMRa3) - length(RplObjListMRc3)}
== TS1_2_Result_ii,
RepairListMR3 = lists:map(RepairListMapFun, RplObjListMRa3),
RepairFunMR3 = GenuineRepairFun(VNNa, VNPa, RepairListMR3),
FoldRepair3FunMR =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunMR3,
all,
{convert_ts(MDR_TS1),
convert_ts(os:timestamp())},
pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
% Now repair those deltas - still using the modified range filter, but
% with the modified range being from TS1 to now
TotalRepairsMR3 = lists:foldl(FoldRepair3FunMR, 0, lists:seq(1, 6)),
io:format("Total range repairs after modified range test ~w~n",
[TotalRepairsMR3]),
true = length(RplObjListMRa3) == TotalRepairsMR3,
LogProgress("T8"),
% Shutdown and clear down files
ok = mock_kv_vnode:close(VNNa),
ok = mock_kv_vnode:close(VNPa),
RootPath = testutil:reset_filestructure().
wait_for_rebuild(Vnode) ->
RebuildComplete =
lists:foldl(fun(Wait, Complete) ->
case Complete of
true ->
true;
false ->
timer:sleep(Wait),
{_TSN, RSN} =
mock_kv_vnode:rebuild(Vnode, false),
% Waiting for rebuild status to be false
% on both vnodes, which would indicate
% that both rebuilds have completed
(not RSN)
end
end,
false,
[1000, 2000, 3000, 5000, 8000, 13000, 21000]),
% Both rebuilds have completed
true = RebuildComplete == true.
coveragefold_nativemedium(_Config) ->
mock_vnode_coveragefolder(native, 50000, true).
coveragefold_nativesmall(_Config) ->
mock_vnode_coveragefolder(native, 5000, false).
coveragefold_parallelsmall(_Config) ->
mock_vnode_coveragefolder(parallel_so, 5000, true).
coveragefold_parallelmedium(_Config) ->
mock_vnode_coveragefolder(parallel_so, 50000, false).
coveragefold_parallelmediumko(_Config) ->
mock_vnode_coveragefolder(parallel_ko, 50000, false).
mock_vnode_coveragefolder(Type, InitialKeyCount, TupleBuckets) ->
This should load a set of 4 vnodes , with the data partitioned across the
vnodes n=2 to provide for 2 different coverage plans .
%
% After the load, an exchange can confirm consistency between the coverage
% plans. Then run some folds to make sure that the folds produce the
% expected results
RootPath = testutil:reset_filestructure(),
MockPathN1 = filename:join(RootPath, "mock_native1/"),
MockPathN2 = filename:join(RootPath, "mock_native2/"),
MockPathN3 = filename:join(RootPath, "mock_native3/"),
MockPathN4 = filename:join(RootPath, "mock_native4/"),
IndexNs =
[{1, 2}, {2, 2}, {3, 2}, {0, 2}],
PreflistFun =
fun(_B, K) ->
Idx = erlang:phash2(K) rem length(IndexNs),
lists:nth(Idx + 1, IndexNs)
end,
Open four vnodes to take two of the preflists each
% - this is intended to replicate a ring-size=4, n-val=2 ring
{ok, VNN1} =
mock_kv_vnode:open(MockPathN1, Type, [{1, 2}, {0, 2}], PreflistFun),
{ok, VNN2} =
mock_kv_vnode:open(MockPathN2, Type, [{2, 2}, {1, 2}], PreflistFun),
{ok, VNN3} =
mock_kv_vnode:open(MockPathN3, Type, [{3, 2}, {2, 2}], PreflistFun),
{ok, VNN4} =
mock_kv_vnode:open(MockPathN4, Type, [{0, 2}, {3, 2}], PreflistFun),
% Mapping of preflists to [Primary, Secondary] vnodes
RingN =
[{{1, 2}, [VNN1, VNN2]}, {{2, 2}, [VNN2, VNN3]},
{{3, 2}, [VNN3, VNN4]}, {{0, 2}, [VNN4, VNN1]}],
% Add each key to the vnode at the head of the preflist, and then push the
% change to the one at the tail.
PutFun =
fun(Ring) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
{PL, [Primary, Secondary]} = lists:keyfind(PL, 1, Ring),
mock_kv_vnode:put(Primary, Object, PL, [Secondary])
end
end,
ObjList = testutil:gen_riakobjects(InitialKeyCount, [], TupleBuckets),
ok = lists:foreach(PutFun(RingN), ObjList),
Provide two coverage plans , equivalent to normal ring coverage plans
% with offset=0 and offset=1
AllPrimariesMap =
fun({IndexN, [Pri, _FB]}) ->
{exchange_vnodesendfun(Pri), [IndexN]}
end,
AllSecondariesMap =
fun({IndexN, [_Pri, FB]}) ->
{exchange_vnodesendfun(FB), [IndexN]}
end,
AllPrimaries = lists:map(AllPrimariesMap, RingN),
AllSecondaries = lists:map(AllSecondariesMap, RingN),
RPid = self(),
RepairFun = fun(_KL) -> null end,
ReturnFun = fun(R) -> RPid ! {result, R} end,
{ok, _P1, GUID1} =
aae_exchange:start(AllPrimaries, AllSecondaries, RepairFun, ReturnFun),
io:format("Exchange id ~s~n", [GUID1]),
{ExchangeState1, 0} = testutil:start_receiver(),
true = ExchangeState1 == root_compare,
Fold over a valid coverage plan to find siblings ( there are none )
SibCountFoldFun =
fun(B, K, V, {NoSibAcc, SibAcc}) ->
{sibcount, SC} = lists:keyfind(sibcount, 1, V),
case SC of
1 -> {NoSibAcc + 1, SibAcc};
_ -> {NoSibAcc, [{B, K}|SibAcc]}
end
end,
SWF1 = os:timestamp(),
{async, Folder1} =
mock_kv_vnode:fold_aae(VNN1, all, all, SibCountFoldFun,
{0, []}, [{sibcount, null}]),
{async, Folder3} =
mock_kv_vnode:fold_aae(VNN3, all, all, SibCountFoldFun,
Folder1(), [{sibcount, null}]),
{SC1, SibL1} = Folder3(),
io:format("Coverage fold took ~w with output ~w for store ~w~n",
[timer:now_diff(os:timestamp(), SWF1),SC1, Type]),
true = SC1 == InitialKeyCount,
true = [] == SibL1,
SWF2 = os:timestamp(),
BucketListA =
case TupleBuckets of
true ->
[{?BUCKET_TYPE, integer_to_binary(0)},
{?BUCKET_TYPE, integer_to_binary(1)}];
false ->
[integer_to_binary(0), integer_to_binary(1)]
end,
{async, Folder2} =
mock_kv_vnode:fold_aae(VNN2,
{buckets, BucketListA}, all,
SibCountFoldFun, {0, []},
[{sibcount, null}]),
{async, Folder4} =
mock_kv_vnode:fold_aae(VNN4,
{buckets, BucketListA}, all,
SibCountFoldFun, Folder2(),
[{sibcount, null}]),
{SC2, SibL2} = Folder4(),
io:format("Coverage fold took ~w with output ~w for store ~w~n",
[timer:now_diff(os:timestamp(), SWF2),SC2, Type]),
true = SC2 == 2 * (InitialKeyCount div 5),
true = [] == SibL2,
A fold over two coverage plans to compare the list of { B , K , H , Sz }
% tuples found within the coverage plans
HashSizeFoldFun =
fun(B, K, V, Acc) ->
{hash, H} = lists:keyfind(hash, 1, V),
{size, Sz} = lists:keyfind(size, 1, V),
[{B, K, H, Sz}|Acc]
end,
{async, Folder1HS} =
mock_kv_vnode:fold_aae(VNN1, all, all, HashSizeFoldFun,
[], [{hash, null}, {size, null}]),
{async, Folder3HS} =
mock_kv_vnode:fold_aae(VNN3, all, all, HashSizeFoldFun,
Folder1HS(), [{hash, null}, {size, null}]),
BKHSzL1 = Folder3HS(),
true = length(BKHSzL1) == InitialKeyCount,
{async, Folder2HS} =
mock_kv_vnode:fold_aae(VNN2, all, all, HashSizeFoldFun,
[], [{hash, null}, {size, null}]),
{async, Folder4HS} =
mock_kv_vnode:fold_aae(VNN4, all, all, HashSizeFoldFun,
Folder2HS(), [{hash, null}, {size, null}]),
BKHSzL2 = Folder4HS(),
true = length(BKHSzL2) == InitialKeyCount,
true = lists:usort(BKHSzL2) == lists:usort(BKHSzL1),
Fold over a valid coverage plan to find siblings ( there are none )
RandMetadataFoldFun =
fun(_B, _K, V, RandAcc) ->
{md, MD} = lists:keyfind(md, 1, V),
[MD_Dict] = fold_metabin(MD, []),
The fold needs to cope with the being in different
formats between parallel and native stores . Preference
%% is to avoid this going forward - but this is the case
given how ObjectSplitFun was initially implemented in Riak
{random, X} = lists:keyfind(random, 1, MD_Dict),
[X|RandAcc]
end,
{async, Folder2MDR} =
mock_kv_vnode:fold_aae(VNN2, all, all, RandMetadataFoldFun,
[], [{md, null}]),
{async, Folder4MDR} =
mock_kv_vnode:fold_aae(VNN4, all, all, RandMetadataFoldFun,
Folder2MDR(), [{md, null}]),
CountFun = fun(X, Acc) -> setelement(X, Acc, element(X, Acc) + 1) end,
MDRAcc = lists:foldl(CountFun, {0, 0, 0}, Folder4MDR()),
MinVal = InitialKeyCount div 3 - InitialKeyCount div 6,
{A, B, C} = MDRAcc,
true = InitialKeyCount == A + B + C,
true = (A > MinVal) and (B > MinVal) and (C > MinVal),
{async, BucketListF1} = mock_kv_vnode:bucketlist_aae(VNN1),
{async, BucketListF2} = mock_kv_vnode:bucketlist_aae(VNN2),
{async, BucketListF3} = mock_kv_vnode:bucketlist_aae(VNN3),
{async, BucketListF4} = mock_kv_vnode:bucketlist_aae(VNN4),
DedupedBL = lists:usort(BucketListF1() ++ BucketListF2()
++ BucketListF3() ++ BucketListF4()),
true = 5 == length(DedupedBL),
ok = mock_kv_vnode:close(VNN1),
ok = mock_kv_vnode:close(VNN2),
ok = mock_kv_vnode:close(VNN3),
ok = mock_kv_vnode:close(VNN4),
RootPath = testutil:reset_filestructure().
fold_metabin(<<>>, MDAcc) ->
lists:reverse(MDAcc);
fold_metabin(<<0:32/integer,
MetaLen:32/integer, MetaBin:MetaLen/binary,
Rest/binary>>, MDAcc) ->
<<_LastModBin:12/binary, VTagLen:8/integer, _VTagBin:VTagLen/binary,
_Deleted:1/binary-unit:8, MetaDictBin/binary>> = MetaBin,
fold_metabin(Rest, [binary_to_term(MetaDictBin)|MDAcc]);
fold_metabin(<<MetaLen:32/integer, MetaBin:MetaLen/binary,
Rest/binary>>, MDAcc) ->
<<_LastModBin:12/binary, VTagLen:8/integer, _VTagBin:VTagLen/binary,
_Deleted:1/binary-unit:8, MetaDictBin/binary>> = MetaBin,
fold_metabin(Rest, [binary_to_term(MetaDictBin)|MDAcc]).
exchange_vnodesendfun(MVN) -> testutil:exchange_vnodesendfun(MVN).
convert_ts({Tmeg, Tsec, _Tmcr}) -> Tmeg * 1000000 + Tsec. | null | https://raw.githubusercontent.com/martinsumner/kv_index_tictactree/7c825d5edd435b88f3474702d4651db90cbdafb9/test/end_to_end/mockvnode_SUITE.erl | erlang |
and then once a difference is created, discover any difference
key store)
to be kept in parallel)
Keep some rogue objects to cause failures, by not putting them
correctly into both vnodes. These aren't loaded yet
Between startup and shutdown the next_rebuild will be rescheduled to
a different time, as the look at the last rebuild time and schedule
forward from there.
differences
The rebuild is a rebuild of both
the store and the tree in the case of the parallel vnode, and just the
tree in the case of the native rebuild
next rebuild was in the future, and is still scheduled as such
element of the rebuild response)
Should not have changed
There should now be a rebuild in progress - but immediately check that
following a clock_compare)
This could receive {error, 0}. On complete of rebuild the leveled
store is shutdown - and by design this closes all iterators. So this
may crash if in the fetch_clock state
Retry in the case this exchange times out on the rebuild
completing faster than expected
Balance the deletions
with a genuine repair fun (one which actually repairs). Repairs
this should be less than, as a subset of mismatched segments will
be passed to fetch clocks
Testing with a modified range requires more
effort as the objects don't have a last modified date
verify no hangover going into the key range test
deltas by time range
add some deltas
update some of those deltas
updating the other vnode
check between TS3 and TS4 - should only see 'b' changes
verify no hangover going into the key range test
check between TS1 and TS2 - should only see 'a' changes, but
not 'b' chnages as they have a higher last modified date
verify no hangover going into the key range test
on the other side (VNPa) - VNPa will return no modified
will see all of the firts round of changes soon.
Now repair those deltas - still using the modified range filter, but
with the modified range being from TS1 to now
Shutdown and clear down files
Waiting for rebuild status to be false
on both vnodes, which would indicate
that both rebuilds have completed
Both rebuilds have completed
After the load, an exchange can confirm consistency between the coverage
plans. Then run some folds to make sure that the folds produce the
expected results
- this is intended to replicate a ring-size=4, n-val=2 ring
Mapping of preflists to [Primary, Secondary] vnodes
Add each key to the vnode at the head of the preflist, and then push the
change to the one at the tail.
with offset=0 and offset=1
tuples found within the coverage plans
is to avoid this going forward - but this is the case | -module(mockvnode_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([all/0]).
-export([coveragefold_nativemedium/1,
coveragefold_nativesmall/1,
coveragefold_parallelmedium/1,
coveragefold_parallelmediumko/1,
coveragefold_parallelsmall/1,
loadexchangeandrebuild_stbucketko/1,
loadexchangeandrebuild_tuplebucketko/1,
loadexchangeandrebuild_stbucketso/1,
loadexchangeandrebuild_tuplebucketso/1]).
all() -> [
coveragefold_nativemedium,
coveragefold_nativesmall,
coveragefold_parallelmedium,
coveragefold_parallelmediumko,
coveragefold_parallelsmall,
loadexchangeandrebuild_stbucketso,
loadexchangeandrebuild_tuplebucketso,
loadexchangeandrebuild_stbucketko,
loadexchangeandrebuild_tuplebucketko
].
-include("testutil.hrl").
loadexchangeandrebuild_stbucketko(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(false, parallel_ko),
mock_vnode_loadexchangeandrebuild_tester(false, parallel_ko).
loadexchangeandrebuild_stbucketso(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(false, parallel_so).
loadexchangeandrebuild_tuplebucketko(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(true, parallel_ko),
mock_vnode_loadexchangeandrebuild_tester(true, parallel_ko).
loadexchangeandrebuild_tuplebucketso(_Config) ->
mock_vnode_loadexchangeandrebuild_tester(true, parallel_so).
mock_vnode_loadexchangeandrebuild_tester(TupleBuckets, PType) ->
Load up two vnodes with same data , with the data in each node split
across 3 partitions ( n=1 ) .
The purpose if to perform exchanges to first highlight no differences ,
TestStartPoint = os:timestamp(),
LogProgress =
fun(Point) ->
io:format("Test reached point ~s in ~w s~n",
[Point,
timer:now_diff(os:timestamp(), TestStartPoint)
div 1000])
end,
LogProgress("T0"),
InitialKeyCount = 80000,
RootPath = testutil:reset_filestructure(),
MockPathN = filename:join(RootPath, "mock_native/"),
MockPathP = filename:join(RootPath, "mock_parallel/"),
IndexNs = [{1, 3}, {2, 3}, {3, 3}],
PreflistFun =
fun(_B, K) ->
Idx = erlang:phash2(K) rem length(IndexNs),
lists:nth(Idx + 1, IndexNs)
end,
Start up to two mock vnodes
- VNN is a native vnode ( where the AAE process will not keep a parallel
- VNP is a parallel vnode ( where a separate AAE key store is required
{ok, VNN} = mock_kv_vnode:open(MockPathN, native, IndexNs, PreflistFun),
{ok, VNP} = mock_kv_vnode:open(MockPathP, PType, IndexNs, PreflistFun),
RPid = self(),
LogNotRepairFun =
fun(KL) ->
lists:foreach(fun({{B, K}, VCCompare}) ->
io:format("Delta found in ~w ~s ~w~n",
[B,
binary_to_list(K),
VCCompare])
end,
KL)
end,
NullRepairFun = fun(_KL) -> ok end,
ReturnFun = fun(R) -> RPid ! {result, R} end,
io:format("Exchange between empty vnodes~n"),
{ok, _P0, GUID0} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID0]),
{ExchangeState0, 0} = testutil:start_receiver(),
true = ExchangeState0 == root_compare,
io:format("Same exchange - now using tree compare~n"),
GetBucketFun =
fun(I) ->
case TupleBuckets of
true ->
{?BUCKET_TYPE, integer_to_binary(I)};
false ->
integer_to_binary(I)
end
end,
Bucket1 = GetBucketFun(1),
Bucket2 = GetBucketFun(2),
Bucket3 = GetBucketFun(3),
Bucket4 = GetBucketFun(4),
{ok, _TC_P0, TC_GUID0} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun,
{filter, Bucket3, all,
small, all, all, pre_hash},
[{transition_pause_ms, 100},
{log_levels, [warn, error, critical]},
{purpose, test}]),
io:format("Exchange id for tree compare ~s~n", [TC_GUID0]),
{ExchangeStateTC0, 0} = testutil:start_receiver(),
true = ExchangeStateTC0 == tree_compare,
{ok, _TC_P1, TC_GUID1} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNP), IndexNs}],
[{exchange_vnodesendfun(VNN), IndexNs}],
LogNotRepairFun,
ReturnFun,
{filter, Bucket3, all,
small, all, all, pre_hash},
[{transition_pause_ms, 100}]),
io:format("Exchange id for tree compare ~s~n", [TC_GUID1]),
{ExchangeStateTC1, 0} = testutil:start_receiver(),
true = ExchangeStateTC1 == tree_compare,
ObjList = testutil:gen_riakobjects(InitialKeyCount, [], TupleBuckets),
ReplaceList = testutil:gen_riakobjects(100, [], TupleBuckets),
some objects to replace the first 100 objects
DeleteList1 = lists:sublist(ObjList, 200, 100),
DeleteList2 =
lists:sublist(ObjList, 400, 10) ++
lists:sublist(ObjList, 500, 10) ++
lists:sublist(ObjList, 600, 10),
RehashList = lists:sublist(ObjList, 700, 10),
PutFun =
fun(Store1, Store2) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
mock_kv_vnode:put(Store1, Object, PL, [Store2])
end
end,
DeleteFun =
fun(Stores) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
lists:foreach(
fun(Store) ->
mock_kv_vnode:backend_delete(Store,
Object#r_object.bucket,
Object#r_object.key,
PL)
end,
Stores)
end
end,
RehashFun =
fun(Stores) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
lists:foreach(
fun(Store) ->
mock_kv_vnode:rehash(Store,
Object#r_object.bucket,
Object#r_object.key,
PL)
end,
Stores)
end
end,
LogProgress("T1"),
io:format("Load objects into both stores~n"),
PutFun1 = PutFun(VNN, VNP),
PutFun2 = PutFun(VNP, VNN),
{OL1, OL2A} = lists:split(InitialKeyCount div 2, ObjList),
{[RogueObjC1, RogueObjC2], OL2} = lists:split(2, OL2A),
RogueObj1 = RogueObjC1#r_object{bucket = Bucket1},
RogueObj2 = RogueObjC2#r_object{bucket = Bucket2},
ok = lists:foreach(PutFun1, OL1),
ok = lists:foreach(PutFun2, OL2),
ok = lists:foreach(PutFun1, ReplaceList),
ok = lists:foreach(DeleteFun([VNN, VNP]), DeleteList1),
io:format("Exchange between equivalent vnodes~n"),
{ok, _P1, GUID1} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID1]),
{ExchangeState1, 0} = testutil:start_receiver(),
true = ExchangeState1 == root_compare,
io:format("Rehash some entries and confirm root_compare " ++
"still matches, as rehash doesn't do anything~n"),
ok = lists:foreach(RehashFun([VNN, VNP]), RehashList),
{ok, _P1a, GUID1a} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
LogNotRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID1a]),
{ExchangeState1a, 0} = testutil:start_receiver(),
true = ExchangeState1a == root_compare,
io:format("Compare the two stores using an AAE fold - " ++
"and prove that AAE fold is working as expected~n"),
Bucket =
case TupleBuckets of
true ->
{?BUCKET_TYPE, integer_to_binary(3)};
false ->
integer_to_binary(3)
end,
StartKey = list_to_binary(string:right(integer_to_list(10), 6, $0)),
EndKey = list_to_binary(string:right(integer_to_list(50), 6, $0)),
Elements = [{sibcount, null}, {clock, null}, {hash, null}],
InitAcc = {[], 0},
FoldKRFun =
fun(FB, FK, FEs, {KCHAcc, SCAcc}) ->
true = FB == Bucket,
true = FK >= StartKey,
true = FK < EndKey,
{clock, FC} = lists:keyfind(clock, 1, FEs),
{hash, FH} = lists:keyfind(hash, 1, FEs),
{sibcount, FSC} = lists:keyfind(sibcount, 1, FEs),
{lists:usort([{FK, FC, FH}|KCHAcc]), SCAcc + FSC}
end,
{async, VNNF} =
mock_kv_vnode:fold_aae(VNN,
{key_range, Bucket, StartKey, EndKey},
all,
FoldKRFun,
InitAcc,
Elements),
{async, VNPF} =
mock_kv_vnode:fold_aae(VNP,
{key_range, Bucket, StartKey, EndKey},
all,
FoldKRFun,
InitAcc,
Elements),
{VNNF_KL, VNNF_SC} = VNNF(),
{VNPF_KL, VNPF_SC} = VNPF(),
true = VNNF_SC == 8,
true = VNPF_SC == 8,
true = lists:usort(VNNF_KL) == lists:usort(VNPF_KL),
true = length(VNNF_KL) == 8,
true = length(VNPF_KL) == 8,
[{K1, C1, H1}|Rest] = VNNF_KL,
[{K2, C2, H2}|_Rest] = Rest,
BinaryKey1 = aae_util:make_binarykey(Bucket, K1),
BinaryKey2 = aae_util:make_binarykey(Bucket, K2),
SegmentID1 =
leveled_tictac:get_segment(
element(1, leveled_tictac:tictac_hash(BinaryKey1, <<>>)),
small),
SegmentID2 =
leveled_tictac:get_segment(
element(1, leveled_tictac:tictac_hash(BinaryKey2, <<>>)),
small),
io:format("Looking for Segment IDs K1 ~w ~w K2 ~w ~w~n",
[K1, SegmentID1, K2, SegmentID2]),
{async, VNNF_SL} =
mock_kv_vnode:fold_aae(VNN,
{key_range, Bucket, StartKey, EndKey},
{segments, [SegmentID1, SegmentID2], small},
FoldKRFun,
InitAcc,
Elements),
{async, VNPF_SL} =
mock_kv_vnode:fold_aae(VNP,
{key_range, Bucket, StartKey, EndKey},
{segments, [SegmentID1, SegmentID2], small},
FoldKRFun,
InitAcc,
Elements),
{[{K1, C1, H1}, {K2, C2, H2}], 2} = VNNF_SL(),
{[{K1, C1, H1}, {K2, C2, H2}], 2} = VNPF_SL(),
io:format("Make change to one vnode only (the parallel one)~n"),
Idx1 = erlang:phash2(RogueObj1#r_object.key) rem length(IndexNs),
mock_kv_vnode:put(VNP, RogueObj1, lists:nth(Idx1 + 1, IndexNs), []),
io:format("Exchange between nodes to expose difference~n"),
{ok, _P2, GUID2} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID2]),
{ExchangeState2, 1} = testutil:start_receiver(),
true = ExchangeState2 == clock_compare,
LogProgress("T2"),
io:format("Make change to one vnode only (the native one)~n"),
Idx2 = erlang:phash2(RogueObj2#r_object.key) rem length(IndexNs),
mock_kv_vnode:put(VNN, RogueObj2, lists:nth(Idx2 + 1, IndexNs), []),
io:format("Exchange between nodes to expose differences" ++
"(one in VNN, one in VNP)~n"),
{ok, _P3, GUID3} =
aae_exchange:start([{exchange_vnodesendfun(VNN), IndexNs}],
[{exchange_vnodesendfun(VNP), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3]),
{ExchangeState3, 2} = testutil:start_receiver(),
true = ExchangeState3 == clock_compare,
{RebuildN, false} = mock_kv_vnode:rebuild(VNN, false),
{RebuildP, false} = mock_kv_vnode:rebuild(VNP, false),
io:format("Discover Next rebuild times - should be in the future " ++
"as both stores were started empty, and hence without " ++
"the need to rebuild~n"),
io:format("Next rebuild vnn ~w vnp ~w~n", [RebuildN, RebuildP]),
true = RebuildN > os:timestamp(),
true = RebuildP > os:timestamp(),
ok = mock_kv_vnode:close(VNN),
ok = mock_kv_vnode:close(VNP),
io:format("Restart the vnodes, " ++
"confirm next rebuilds are still in the future~n"),
{ok, VNNa} = mock_kv_vnode:open(MockPathN, native, IndexNs, PreflistFun),
{ok, VNPa} = mock_kv_vnode:open(MockPathP, PType, IndexNs, PreflistFun),
{RebuildNa, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPa, false} = mock_kv_vnode:rebuild(VNPa, false),
io:format("Next rebuild vnn ~w vnp ~w~n", [RebuildNa, RebuildPa]),
true = RebuildNa > os:timestamp(),
true = RebuildPa > os:timestamp(),
Exchange between nodes to expose differences ( one in VNN , one in VNP )
io:format("Should still discover the same difference " ++
"as when they were close~n"),
{ok, _P3a, GUID3a} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3a]),
{ExchangeState3a, 2} = testutil:start_receiver(),
true = ExchangeState3a == clock_compare,
LogProgress("T3"),
Exchange with a one hour modified range - should see same differences
io:format("Repeat exchange with 1 hour modified range~n"),
MRH = convert_ts(os:timestamp()),
{ok, _P3mr1, GUID3mr1} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, all, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3mr1]),
{ExchangeState3mr1, 2} = testutil:start_receiver(),
true = ExchangeState3mr1 == clock_compare,
Exchnage with an older modified range - see clock_compare but no
io:format("Repeat exchange, but with change outside of modified range~n"),
{ok, _P3mr2, GUID3mr2} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, all, all, large, all,
{MRH - (2 * 60 * 60), MRH - (60 * 60)},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3mr2]),
{ExchangeState3mr2, 0} = testutil:start_receiver(),
true = ExchangeState3mr2 == clock_compare,
io:format("Repeat exchange with modified range and Bucket constraint~n"),
{ok, _P3bmr1, GUID3bmr1} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket1, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr1]),
{ExchangeState3bmr1, 1} = testutil:start_receiver(),
true = ExchangeState3bmr1 == clock_compare,
io:format("Repeat exchange with modified range and Bucket constraint~n"),
{ok, _P3bmr2, GUID3bmr2} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket2, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr2]),
{ExchangeState3bmr2, 1} = testutil:start_receiver(),
true = ExchangeState3bmr2 == clock_compare,
io:format("Repeat exchange with modified range and unmodified Bucket~n"),
{ok, _P3bmr3, GUID3bmr3} =
aae_exchange:start(full,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
{filter, Bucket, all, large, all,
{MRH - (60 * 60), MRH},
pre_hash},
[]),
io:format("Exchange id ~s~n", [GUID3bmr3]),
{ExchangeState3bmr3, 0} = testutil:start_receiver(),
true = ExchangeState3bmr3 == clock_compare,
LogProgress("T4"),
io:format("Prompts for a rebuild of both stores~n"),
{RebuildNb, true} = mock_kv_vnode:rebuild(VNNa, true),
true = RebuildNb > os:timestamp(),
key thing that the ongoing rebuild status is now true ( the second
io:format("Now poll to check to see when the rebuild is complete~n"),
wait_for_rebuild(VNNa),
Next rebuild times should now still be in the future
{RebuildNc, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPc, false} = mock_kv_vnode:rebuild(VNPa, false),
io:format("Following a completed rebuild - the exchange should still" ++
" work as before, spotting two differences~n"),
{ok, _P3b, GUID3b} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3b]),
{ExchangeState3b, 2} = testutil:start_receiver(),
true = ExchangeState3b == clock_compare,
{RebuildPb, true} = mock_kv_vnode:rebuild(VNPa, true),
true = RebuildPb > os:timestamp(),
io:format("Rebuild in progress, exchange still working~n"),
an exchange will still work ( spotting the same two differences as before
{ok, _P3c, GUID3c} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3c]),
{ExchangeState3c, 2} =
case testutil:start_receiver() of
{error, 0} ->
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
testutil:start_receiver();
Other ->
Other
end,
true = ExchangeState3c == clock_compare,
io:format("Waiting for rebuild after exchange success~n"),
wait_for_rebuild(VNPa),
{RebuildNd, false} = mock_kv_vnode:rebuild(VNNa, false),
{RebuildPd, false} = mock_kv_vnode:rebuild(VNPa, false),
true = RebuildNd == RebuildNc,
true = RebuildPd > os:timestamp(),
io:format("Rebuild now complete - " ++
"should get the same result for an exchange~n"),
{ok, _P3d, GUID3d} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID3d]),
{ExchangeState3d, 2} = testutil:start_receiver(),
true = ExchangeState3d == clock_compare,
LogProgress("T5"),
io:format("Delete some keys - and see the size of the delta increase~n"),
ok = lists:foreach(DeleteFun([VNPa]), DeleteList2),
{ok, _P4a, GUID4a} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID4a]),
{ExchangeState4a, 32} = testutil:start_receiver(),
true = ExchangeState4a == clock_compare,
ok = lists:foreach(DeleteFun([VNNa]), DeleteList2),
{ok, _P4b, GUID4b} =
aae_exchange:start([{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun),
io:format("Exchange id ~s~n", [GUID4b]),
{ExchangeState4b, 2} = testutil:start_receiver(),
true = ExchangeState4b == clock_compare,
io:format("Same exchange - now using tree compare~n"),
CheckBucketList = [Bucket1, Bucket2],
CheckBucketFun =
fun(CheckBucket, Acc) ->
CBFilters = {filter, CheckBucket, all, large, all, all, pre_hash},
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
CBFilters,
[]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
{ExchangeStateTCCB, CBN} = testutil:start_receiver(),
true = ExchangeStateTCCB == clock_compare,
io:format("~w differences found in bucket ~w~n",
[CBN, CheckBucket]),
Acc + CBN
end,
true = 2 == lists:foldl(CheckBucketFun, 0, CheckBucketList),
LogProgress("T5.1"),
io:format("Tree compare section - with large deltas~n"),
Next section is going to test tree_compare with large deltas , and
should happen in stages as the mismatched segment list will at first
be too large - so there will be a down selection in select_ids
A delta between buckets is created both ways , with bucket3 out of
sync one way , and bucket 4 out of sync the other way
true = InitialKeyCount > 2000,
RplObjListTC = testutil:gen_riakobjects(2000, [], TupleBuckets),
FilterBucket3Fun = fun(RObj) -> RObj#r_object.bucket == Bucket3 end,
FilterBucket4Fun = fun(RObj) -> RObj#r_object.bucket == Bucket4 end,
RplObjListTC3 = lists:filter(FilterBucket3Fun, RplObjListTC),
Only have changes in Bucket 3
RplObjListTC4 = lists:filter(FilterBucket4Fun, RplObjListTC),
Only have changes in Bucket 4
SingleSidedPutFun =
fun(MVN) ->
fun(RObj) ->
PL = PreflistFun(null, RObj#r_object.key),
mock_kv_vnode:put(MVN, RObj, PL, [])
end
end,
lists:foreach(SingleSidedPutFun(VNNa), RplObjListTC3),
lists:foreach(SingleSidedPutFun(VNPa), RplObjListTC4),
LogProgress("T5.2"),
NoRepairCheckB3 = CheckBucketFun(Bucket3, 0),
NoRepairCheckB4 = CheckBucketFun(Bucket4, 0),
true = length(RplObjListTC3) > NoRepairCheckB3,
true = length(RplObjListTC4) > NoRepairCheckB4,
true = 0 < NoRepairCheckB3,
true = 0 < NoRepairCheckB4,
LogProgress("T5.3"),
RepairListMapFun = fun(RObj) -> {RObj#r_object.key, RObj} end,
RepairListTC3 = lists:map(RepairListMapFun, RplObjListTC3),
RepairListTC4 = lists:map(RepairListMapFun, RplObjListTC4),
GenuineRepairFun =
fun(SourceVnode, TargetVnode, RepairList) ->
fun(KL) ->
SubRepairFun =
fun({{RepB, K}, _VCCompare}, Acc) ->
case lists:keyfind(K, 1, RepairList) of
{K, RObj} ->
PL = PreflistFun(null, K),
ok = mock_kv_vnode:read_repair(SourceVnode,
RObj,
PL,
[TargetVnode]),
Acc + 1;
false ->
io:format("Missing from repair list ~w ~w~n",
[RepB, K]),
Acc
end
end,
Repaired = lists:foldl(SubRepairFun, 0, KL),
io:format("~w keys repaired to vnode ~w~n",
[Repaired, TargetVnode])
end
end,
RepairFunTC3 = GenuineRepairFun(VNNa, VNPa, RepairListTC3),
RepairFunTC4 = GenuineRepairFun(VNPa, VNNa, RepairListTC4),
LogProgress("T5.4"),
RepairBucketFun =
fun(CheckBucket, TargettedRepairFun, KR, MR, Hash) ->
CBFilters = {filter, CheckBucket, KR, large, all, MR, Hash},
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
TargettedRepairFun,
ReturnFun,
CBFilters,
[]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
testutil:start_receiver()
end,
FoldRepair3Fun =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunTC3, all, all, pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
TotalRepairs3 = lists:foldl(FoldRepair3Fun, 0, lists:seq(1, 6)),
io:format("Repaired ~w from list of length ~w~n",
[TotalRepairs3, length(RepairListTC3)]),
true = length(RepairListTC3) == TotalRepairs3,
LogProgress("T5.5"),
FoldRepair4Fun =
fun(_I, Acc) ->
case RepairBucketFun(Bucket4, RepairFunTC4,
all, all, {rehash, 5000}) of
{clock_compare, Count4} ->
Acc + Count4;
{tree_compare, 0} ->
Acc
end
end,
TotalRepairs4 = lists:foldl(FoldRepair4Fun, 0, lists:seq(1, 6)),
io:format("Repaired ~w from list of length ~w~n",
[TotalRepairs4, length(RepairListTC4)]),
true = length(RepairListTC4) == TotalRepairs4,
LogProgress("T6"),
io:format("Check with a key range~n"),
LimiterCheckBucketFun =
fun(LimiterFilters) ->
{ok, _TCCB_P, TCCB_GUID} =
aae_exchange:start(partial,
[{exchange_vnodesendfun(VNNa), IndexNs}],
[{exchange_vnodesendfun(VNPa), IndexNs}],
NullRepairFun,
ReturnFun,
LimiterFilters,
[{transition_pause_ms, 100}]),
io:format("Exchange id for tree compare ~s~n", [TCCB_GUID]),
testutil:start_receiver()
end,
CheckFiltersB =
{filter, Bucket3, all, small, all, all, pre_hash},
true = {tree_compare, 0} == LimiterCheckBucketFun(CheckFiltersB),
RplObjListKR = testutil:gen_riakobjects(1000, [], TupleBuckets),
RplObjListKR3 = lists:filter(FilterBucket3Fun, RplObjListKR),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListKR3),
RepairListKR3 = lists:map(RepairListMapFun, RplObjListKR3),
RLKR3_SL = lists:sublist(lists:ukeysort(1, RepairListKR3), 50, 50),
[{SK, _SObj}|_RestKRSL] = RLKR3_SL,
{EK, _EObj} = lists:last(RLKR3_SL),
io:format("StartKey ~s EndKey ~s in Range test~n",
[binary_to_list(SK), binary_to_list(EK)]),
CheckFiltersKR =
{filter, Bucket3, {SK, EK}, medium, all, all, pre_hash},
true = {clock_compare, 50} == LimiterCheckBucketFun(CheckFiltersKR),
RepairFunKR3 = GenuineRepairFun(VNNa, VNPa, RepairListKR3),
FoldRepair3FunKR =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunKR3,
{SK, EK},
all, pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
Now repair those deltas - do the key range deltas first then the rest
TotalRepairsKR3 = lists:foldl(FoldRepair3FunKR, 0, lists:seq(1, 3)),
io:format("Total range repairs after key range test ~w~n",
[TotalRepairsKR3]),
true = 50 == TotalRepairsKR3,
AllRepairsKR3 = lists:foldl(FoldRepair3Fun, 0, lists:seq(1, 5)),
io:format("Total repairs after key range test ~w~n", [AllRepairsKR3]),
true = length(RplObjListKR3) - 50 == AllRepairsKR3,
LogProgress("T7"),
io:format("Tests with a modified range~n"),
Some tests with a modified range . Split a bunch of changes into two
lots . Apply those two lots in two distinct time ranges . Find the
MDR_TS1 = os:timestamp(),
timer:sleep(1000),
RplObjListMRa = testutil:gen_riakobjects(500, [], TupleBuckets),
RplObjListMRa3 = lists:filter(FilterBucket3Fun, RplObjListMRa),
MDR_TS2 = os:timestamp(),
timer:sleep(1000),
MDR_TS3 = os:timestamp(),
timer:sleep(1000),
RplObjListMRb = testutil:gen_riakobjects(100, [], TupleBuckets),
RplObjListMRb3 = lists:filter(FilterBucket3Fun, RplObjListMRb),
MDR_TS4 = os:timestamp(),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListMRa3),
lists:foreach(SingleSidedPutFun(VNPa), RplObjListMRb3),
TS3_4_Range = {convert_ts(MDR_TS3), convert_ts(MDR_TS4)},
CheckFiltersMRb =
{filter, Bucket3, all, large, all, TS3_4_Range, pre_hash},
TS3_4_Result = LimiterCheckBucketFun(CheckFiltersMRb),
io:format("Exchange in second modified range resulted in ~w~n",
[TS3_4_Result]),
true = {clock_compare, length(RplObjListMRb3)} == TS3_4_Result,
TS1_2_Range = {convert_ts(MDR_TS1), convert_ts(MDR_TS2)},
CheckFiltersMRa =
{filter, Bucket3, all, large, all, TS1_2_Range, pre_hash},
TS1_2_Result = LimiterCheckBucketFun(CheckFiltersMRa),
io:format("Exchange in first modified range resulted in ~w~n",
[TS1_2_Result]),
true = {clock_compare, length(RplObjListMRa3)} == TS1_2_Result,
Important to relaise that as the second amendments were made
objects , and as VNNa so none of the second round of updates it
Conflicting updates to ( b ) - but to be applied to VNN not VNP
RplObjListMRc = testutil:gen_riakobjects(100, [], TupleBuckets),
RplObjListMRc3 = lists:filter(FilterBucket3Fun, RplObjListMRc),
lists:foreach(SingleSidedPutFun(VNNa), RplObjListMRc3),
TS1_2_Result_ii = LimiterCheckBucketFun(CheckFiltersMRa),
io:format("Exchange in first modified range resulted in ~w~n",
[TS1_2_Result_ii]),
true =
{clock_compare, length(RplObjListMRa3) - length(RplObjListMRc3)}
== TS1_2_Result_ii,
RepairListMR3 = lists:map(RepairListMapFun, RplObjListMRa3),
RepairFunMR3 = GenuineRepairFun(VNNa, VNPa, RepairListMR3),
FoldRepair3FunMR =
fun(_I, Acc) ->
case RepairBucketFun(Bucket3, RepairFunMR3,
all,
{convert_ts(MDR_TS1),
convert_ts(os:timestamp())},
pre_hash) of
{clock_compare, Count3} ->
Acc + Count3;
{tree_compare, 0} ->
Acc
end
end,
TotalRepairsMR3 = lists:foldl(FoldRepair3FunMR, 0, lists:seq(1, 6)),
io:format("Total range repairs after modified range test ~w~n",
[TotalRepairsMR3]),
true = length(RplObjListMRa3) == TotalRepairsMR3,
LogProgress("T8"),
ok = mock_kv_vnode:close(VNNa),
ok = mock_kv_vnode:close(VNPa),
RootPath = testutil:reset_filestructure().
wait_for_rebuild(Vnode) ->
RebuildComplete =
lists:foldl(fun(Wait, Complete) ->
case Complete of
true ->
true;
false ->
timer:sleep(Wait),
{_TSN, RSN} =
mock_kv_vnode:rebuild(Vnode, false),
(not RSN)
end
end,
false,
[1000, 2000, 3000, 5000, 8000, 13000, 21000]),
true = RebuildComplete == true.
coveragefold_nativemedium(_Config) ->
mock_vnode_coveragefolder(native, 50000, true).
coveragefold_nativesmall(_Config) ->
mock_vnode_coveragefolder(native, 5000, false).
coveragefold_parallelsmall(_Config) ->
mock_vnode_coveragefolder(parallel_so, 5000, true).
coveragefold_parallelmedium(_Config) ->
mock_vnode_coveragefolder(parallel_so, 50000, false).
coveragefold_parallelmediumko(_Config) ->
mock_vnode_coveragefolder(parallel_ko, 50000, false).
mock_vnode_coveragefolder(Type, InitialKeyCount, TupleBuckets) ->
This should load a set of 4 vnodes , with the data partitioned across the
vnodes n=2 to provide for 2 different coverage plans .
RootPath = testutil:reset_filestructure(),
MockPathN1 = filename:join(RootPath, "mock_native1/"),
MockPathN2 = filename:join(RootPath, "mock_native2/"),
MockPathN3 = filename:join(RootPath, "mock_native3/"),
MockPathN4 = filename:join(RootPath, "mock_native4/"),
IndexNs =
[{1, 2}, {2, 2}, {3, 2}, {0, 2}],
PreflistFun =
fun(_B, K) ->
Idx = erlang:phash2(K) rem length(IndexNs),
lists:nth(Idx + 1, IndexNs)
end,
Open four vnodes to take two of the preflists each
{ok, VNN1} =
mock_kv_vnode:open(MockPathN1, Type, [{1, 2}, {0, 2}], PreflistFun),
{ok, VNN2} =
mock_kv_vnode:open(MockPathN2, Type, [{2, 2}, {1, 2}], PreflistFun),
{ok, VNN3} =
mock_kv_vnode:open(MockPathN3, Type, [{3, 2}, {2, 2}], PreflistFun),
{ok, VNN4} =
mock_kv_vnode:open(MockPathN4, Type, [{0, 2}, {3, 2}], PreflistFun),
RingN =
[{{1, 2}, [VNN1, VNN2]}, {{2, 2}, [VNN2, VNN3]},
{{3, 2}, [VNN3, VNN4]}, {{0, 2}, [VNN4, VNN1]}],
PutFun =
fun(Ring) ->
fun(Object) ->
PL = PreflistFun(null, Object#r_object.key),
{PL, [Primary, Secondary]} = lists:keyfind(PL, 1, Ring),
mock_kv_vnode:put(Primary, Object, PL, [Secondary])
end
end,
ObjList = testutil:gen_riakobjects(InitialKeyCount, [], TupleBuckets),
ok = lists:foreach(PutFun(RingN), ObjList),
Provide two coverage plans , equivalent to normal ring coverage plans
AllPrimariesMap =
fun({IndexN, [Pri, _FB]}) ->
{exchange_vnodesendfun(Pri), [IndexN]}
end,
AllSecondariesMap =
fun({IndexN, [_Pri, FB]}) ->
{exchange_vnodesendfun(FB), [IndexN]}
end,
AllPrimaries = lists:map(AllPrimariesMap, RingN),
AllSecondaries = lists:map(AllSecondariesMap, RingN),
RPid = self(),
RepairFun = fun(_KL) -> null end,
ReturnFun = fun(R) -> RPid ! {result, R} end,
{ok, _P1, GUID1} =
aae_exchange:start(AllPrimaries, AllSecondaries, RepairFun, ReturnFun),
io:format("Exchange id ~s~n", [GUID1]),
{ExchangeState1, 0} = testutil:start_receiver(),
true = ExchangeState1 == root_compare,
Fold over a valid coverage plan to find siblings ( there are none )
SibCountFoldFun =
fun(B, K, V, {NoSibAcc, SibAcc}) ->
{sibcount, SC} = lists:keyfind(sibcount, 1, V),
case SC of
1 -> {NoSibAcc + 1, SibAcc};
_ -> {NoSibAcc, [{B, K}|SibAcc]}
end
end,
SWF1 = os:timestamp(),
{async, Folder1} =
mock_kv_vnode:fold_aae(VNN1, all, all, SibCountFoldFun,
{0, []}, [{sibcount, null}]),
{async, Folder3} =
mock_kv_vnode:fold_aae(VNN3, all, all, SibCountFoldFun,
Folder1(), [{sibcount, null}]),
{SC1, SibL1} = Folder3(),
io:format("Coverage fold took ~w with output ~w for store ~w~n",
[timer:now_diff(os:timestamp(), SWF1),SC1, Type]),
true = SC1 == InitialKeyCount,
true = [] == SibL1,
SWF2 = os:timestamp(),
BucketListA =
case TupleBuckets of
true ->
[{?BUCKET_TYPE, integer_to_binary(0)},
{?BUCKET_TYPE, integer_to_binary(1)}];
false ->
[integer_to_binary(0), integer_to_binary(1)]
end,
{async, Folder2} =
mock_kv_vnode:fold_aae(VNN2,
{buckets, BucketListA}, all,
SibCountFoldFun, {0, []},
[{sibcount, null}]),
{async, Folder4} =
mock_kv_vnode:fold_aae(VNN4,
{buckets, BucketListA}, all,
SibCountFoldFun, Folder2(),
[{sibcount, null}]),
{SC2, SibL2} = Folder4(),
io:format("Coverage fold took ~w with output ~w for store ~w~n",
[timer:now_diff(os:timestamp(), SWF2),SC2, Type]),
true = SC2 == 2 * (InitialKeyCount div 5),
true = [] == SibL2,
A fold over two coverage plans to compare the list of { B , K , H , Sz }
HashSizeFoldFun =
fun(B, K, V, Acc) ->
{hash, H} = lists:keyfind(hash, 1, V),
{size, Sz} = lists:keyfind(size, 1, V),
[{B, K, H, Sz}|Acc]
end,
{async, Folder1HS} =
mock_kv_vnode:fold_aae(VNN1, all, all, HashSizeFoldFun,
[], [{hash, null}, {size, null}]),
{async, Folder3HS} =
mock_kv_vnode:fold_aae(VNN3, all, all, HashSizeFoldFun,
Folder1HS(), [{hash, null}, {size, null}]),
BKHSzL1 = Folder3HS(),
true = length(BKHSzL1) == InitialKeyCount,
{async, Folder2HS} =
mock_kv_vnode:fold_aae(VNN2, all, all, HashSizeFoldFun,
[], [{hash, null}, {size, null}]),
{async, Folder4HS} =
mock_kv_vnode:fold_aae(VNN4, all, all, HashSizeFoldFun,
Folder2HS(), [{hash, null}, {size, null}]),
BKHSzL2 = Folder4HS(),
true = length(BKHSzL2) == InitialKeyCount,
true = lists:usort(BKHSzL2) == lists:usort(BKHSzL1),
Fold over a valid coverage plan to find siblings ( there are none )
RandMetadataFoldFun =
fun(_B, _K, V, RandAcc) ->
{md, MD} = lists:keyfind(md, 1, V),
[MD_Dict] = fold_metabin(MD, []),
The fold needs to cope with the being in different
formats between parallel and native stores . Preference
given how ObjectSplitFun was initially implemented in Riak
{random, X} = lists:keyfind(random, 1, MD_Dict),
[X|RandAcc]
end,
{async, Folder2MDR} =
mock_kv_vnode:fold_aae(VNN2, all, all, RandMetadataFoldFun,
[], [{md, null}]),
{async, Folder4MDR} =
mock_kv_vnode:fold_aae(VNN4, all, all, RandMetadataFoldFun,
Folder2MDR(), [{md, null}]),
CountFun = fun(X, Acc) -> setelement(X, Acc, element(X, Acc) + 1) end,
MDRAcc = lists:foldl(CountFun, {0, 0, 0}, Folder4MDR()),
MinVal = InitialKeyCount div 3 - InitialKeyCount div 6,
{A, B, C} = MDRAcc,
true = InitialKeyCount == A + B + C,
true = (A > MinVal) and (B > MinVal) and (C > MinVal),
{async, BucketListF1} = mock_kv_vnode:bucketlist_aae(VNN1),
{async, BucketListF2} = mock_kv_vnode:bucketlist_aae(VNN2),
{async, BucketListF3} = mock_kv_vnode:bucketlist_aae(VNN3),
{async, BucketListF4} = mock_kv_vnode:bucketlist_aae(VNN4),
DedupedBL = lists:usort(BucketListF1() ++ BucketListF2()
++ BucketListF3() ++ BucketListF4()),
true = 5 == length(DedupedBL),
ok = mock_kv_vnode:close(VNN1),
ok = mock_kv_vnode:close(VNN2),
ok = mock_kv_vnode:close(VNN3),
ok = mock_kv_vnode:close(VNN4),
RootPath = testutil:reset_filestructure().
fold_metabin(<<>>, MDAcc) ->
lists:reverse(MDAcc);
fold_metabin(<<0:32/integer,
MetaLen:32/integer, MetaBin:MetaLen/binary,
Rest/binary>>, MDAcc) ->
<<_LastModBin:12/binary, VTagLen:8/integer, _VTagBin:VTagLen/binary,
_Deleted:1/binary-unit:8, MetaDictBin/binary>> = MetaBin,
fold_metabin(Rest, [binary_to_term(MetaDictBin)|MDAcc]);
fold_metabin(<<MetaLen:32/integer, MetaBin:MetaLen/binary,
Rest/binary>>, MDAcc) ->
<<_LastModBin:12/binary, VTagLen:8/integer, _VTagBin:VTagLen/binary,
_Deleted:1/binary-unit:8, MetaDictBin/binary>> = MetaBin,
fold_metabin(Rest, [binary_to_term(MetaDictBin)|MDAcc]).
exchange_vnodesendfun(MVN) -> testutil:exchange_vnodesendfun(MVN).
convert_ts({Tmeg, Tsec, _Tmcr}) -> Tmeg * 1000000 + Tsec. |
d7f70489096fdfc7adcad9cb887336a5b7dd41b297f21095ca02328411fb4ab0 | MinaProtocol/mina | call_stack_digest_intf.ml | open Core_kernel
module type Full = sig
include Digest_intf.S
val cons : Stack_frame.Digest.t -> t -> t
val empty : t
val gen : t Quickcheck.Generator.t
module Checked : sig
include Digest_intf.S_checked
val cons : Stack_frame.Digest.Checked.t -> t -> t
end
include Digest_intf.S_aux with type t := t and type checked := Checked.t
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/7a380064e215dc6aa152b76a7c3254949e383b1f/src/lib/mina_base/call_stack_digest_intf.ml | ocaml | open Core_kernel
module type Full = sig
include Digest_intf.S
val cons : Stack_frame.Digest.t -> t -> t
val empty : t
val gen : t Quickcheck.Generator.t
module Checked : sig
include Digest_intf.S_checked
val cons : Stack_frame.Digest.Checked.t -> t -> t
end
include Digest_intf.S_aux with type t := t and type checked := Checked.t
end
|
|
df6d9733b9e5aa32542931875875fc4e8ec7df1885d16a37d9728e78f1fb996c | xapi-project/xen-api | get_vhd_vsize.ml | module Impl = Vhd_format.F.From_file (Vhd_format_lwt.IO)
open Vhd_format.F
open Vhd_format_lwt.IO
module In = From_input (Input)
open In
let get_vhd_vsize filename =
Vhd_format_lwt.IO.openfile filename false >>= fun fd ->
let rec loop = function
| End ->
return ()
| Cons (hd, tl) ->
( match hd with
| Fragment.Footer x ->
let size = x.Footer.current_size in
Printf.printf "%Ld\n" size ; exit 0
| _ ->
()
) ;
tl () >>= fun x -> loop x
in
Vhd_format_lwt.IO.get_file_size filename >>= fun file_size ->
openstream (Some file_size) (Input.of_fd (Vhd_format_lwt.IO.to_file_descr fd))
>>= fun stream ->
loop stream >>= fun () -> Vhd_format_lwt.IO.close fd
let _ =
let t = get_vhd_vsize Sys.argv.(1) in
Lwt_main.run t
| null | https://raw.githubusercontent.com/xapi-project/xen-api/4bec5fac844f5b1e030dd0ec19f35395bef85f00/ocaml/vhd-tool/cli/get_vhd_vsize.ml | ocaml | module Impl = Vhd_format.F.From_file (Vhd_format_lwt.IO)
open Vhd_format.F
open Vhd_format_lwt.IO
module In = From_input (Input)
open In
let get_vhd_vsize filename =
Vhd_format_lwt.IO.openfile filename false >>= fun fd ->
let rec loop = function
| End ->
return ()
| Cons (hd, tl) ->
( match hd with
| Fragment.Footer x ->
let size = x.Footer.current_size in
Printf.printf "%Ld\n" size ; exit 0
| _ ->
()
) ;
tl () >>= fun x -> loop x
in
Vhd_format_lwt.IO.get_file_size filename >>= fun file_size ->
openstream (Some file_size) (Input.of_fd (Vhd_format_lwt.IO.to_file_descr fd))
>>= fun stream ->
loop stream >>= fun () -> Vhd_format_lwt.IO.close fd
let _ =
let t = get_vhd_vsize Sys.argv.(1) in
Lwt_main.run t
|
|
253b887f8ce4152ae6be4bc44685e212e681909cd21468da85af147ad816cc57 | PEZ/rich4clojure | problem_039.clj | (ns rich4clojure.easy.problem-039
(:require [hyperfiddle.rcf :refer [tests]]))
= Interleave Two Seqs =
By 4Clojure user :
;; Difficulty: Easy
Tags : [ seqs core - functions ]
;;
Write a function which takes two sequences and returns
the first item from each , then the second item from
each , then the third , etc .
(def restricted [interleave])
(def __ :tests-will-fail)
(comment
)
(tests
(__ [1 2 3] [:a :b :c]) := '(1 :a 2 :b 3 :c)
(__ [1 2] [3 4 5 6]) := '(1 3 2 4)
(__ [1 2 3 4] [5]) := [1 5]
(__ [30 20] [25 15]) := [30 25 20 15])
;; Share your solution, and/or check how others did it:
;; | null | https://raw.githubusercontent.com/PEZ/rich4clojure/28ea575ede8677f3a97437a646cdb3376a28ebc9/src/rich4clojure/easy/problem_039.clj | clojure | Difficulty: Easy
Share your solution, and/or check how others did it:
| (ns rich4clojure.easy.problem-039
(:require [hyperfiddle.rcf :refer [tests]]))
= Interleave Two Seqs =
By 4Clojure user :
Tags : [ seqs core - functions ]
Write a function which takes two sequences and returns
the first item from each , then the second item from
each , then the third , etc .
(def restricted [interleave])
(def __ :tests-will-fail)
(comment
)
(tests
(__ [1 2 3] [:a :b :c]) := '(1 :a 2 :b 3 :c)
(__ [1 2] [3 4 5 6]) := '(1 3 2 4)
(__ [1 2 3 4] [5]) := [1 5]
(__ [30 20] [25 15]) := [30 25 20 15])
|
24e37d06723ddbd40a8f39d334a39e82c2b239de4612272c0ce61a459e59c594 | mbenke/zpf2012 | DotTime.hs | import System.CPUTime (getCPUTime)
import System.Random (newStdGen)
import Control.Exception (evaluate)
import Data.Array.Parallel.PArray (PArray, randomRs, nf)
import DotP (dotp_wrapper) -- import vectorised code
main :: IO ()
main
= do
-- generate random input vectors
gen1 <- newStdGen
gen2 <- newStdGen
let v = randomRs n range gen1
w = randomRs n range gen2
-- force the evaluation of the input vectors
evaluate $ nf v
evaluate $ nf w
-- timed computations
start <- getCPUTime
let result = dotp_wrapper v w
evaluate result
end <- getCPUTime
-- print the result
putStrLn $ show result ++ " in " ++ show ((end - start) `div` 1000000) ++ "us"
where
n = 1000000 -- vector length
range = (-100, 100) -- range of vector elements
| null | https://raw.githubusercontent.com/mbenke/zpf2012/faad6468f9400059de1c0735e12a84a2fdf24bb4/Code/dph/DotTime.hs | haskell | import vectorised code
generate random input vectors
force the evaluation of the input vectors
timed computations
print the result
vector length
range of vector elements | import System.CPUTime (getCPUTime)
import System.Random (newStdGen)
import Control.Exception (evaluate)
import Data.Array.Parallel.PArray (PArray, randomRs, nf)
main :: IO ()
main
= do
gen1 <- newStdGen
gen2 <- newStdGen
let v = randomRs n range gen1
w = randomRs n range gen2
evaluate $ nf v
evaluate $ nf w
start <- getCPUTime
let result = dotp_wrapper v w
evaluate result
end <- getCPUTime
putStrLn $ show result ++ " in " ++ show ((end - start) `div` 1000000) ++ "us"
where
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.