_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
d189b143d70422725c2ded79e8ba44e4d2b3c74977d78c0ae6166d185133815e | spechub/Hets | State.hs | |
Module : ./Common / Lib / State.hs
Description : State type from Control . Monad . State but no class MonadState
Copyright : and Uni Bremen 2002 - 2005
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
State type from Control . Monad . State but no class MonadState
This module was a replacement of the ( non - haskell98 ) module
Control . Monad . State , but now Control . . Trans . State can be used instead .
Module : ./Common/Lib/State.hs
Description : State type from Control.Monad.State but no class MonadState
Copyright : C. Maeder and Uni Bremen 2002-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
State type from Control.Monad.State but no class MonadState
This module was a replacement of the (non-haskell98) module
Control.Monad.State, but now Control.Monad.Trans.State can be used instead.
-}
module Common.Lib.State where
import Control.Applicative ()
import Control.Monad
import qualified Control.Monad.Fail as Fail
-- | Our fixed state monad
newtype State s a = State { runState :: s -> (a, s) }
state :: (s -> (a, s)) -> State s a
state = State
instance Functor (State s) where
fmap = liftM
instance Applicative (State s) where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State $ \ s -> (a, s)
State f >>= k = State $ \ s ->
let (a, s') = f s in runState (k a) s'
instance Fail.MonadFail (State s) where
fail str = State $ \_ -> error str
-- put and get are non-overloaded here!
get :: State s s
get = State $ \ s -> (s, s)
put :: s -> State s ()
put s = State $ const ((), s)
modify :: (s -> s) -> State s ()
modify f = get >>= put . f
gets :: (s -> a) -> State s a
gets f = liftM f get
evalState :: State s a -> s -> a
evalState m = fst . runState m
execState :: State s a -> s -> s
execState m = snd . runState m
mapState :: ((a, s) -> (b, s)) -> State s a -> State s b
mapState f m = State $ f . runState m
withState :: (s -> s) -> State s a -> State s a
withState f m = State $ runState m . f
| null | https://raw.githubusercontent.com/spechub/Hets/f582640a174df08d4c965d7c0a1ab24d1a31000d/Common/Lib/State.hs | haskell | | Our fixed state monad
put and get are non-overloaded here! | |
Module : ./Common / Lib / State.hs
Description : State type from Control . Monad . State but no class MonadState
Copyright : and Uni Bremen 2002 - 2005
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
State type from Control . Monad . State but no class MonadState
This module was a replacement of the ( non - haskell98 ) module
Control . Monad . State , but now Control . . Trans . State can be used instead .
Module : ./Common/Lib/State.hs
Description : State type from Control.Monad.State but no class MonadState
Copyright : C. Maeder and Uni Bremen 2002-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : experimental
Portability : portable
State type from Control.Monad.State but no class MonadState
This module was a replacement of the (non-haskell98) module
Control.Monad.State, but now Control.Monad.Trans.State can be used instead.
-}
module Common.Lib.State where
import Control.Applicative ()
import Control.Monad
import qualified Control.Monad.Fail as Fail
newtype State s a = State { runState :: s -> (a, s) }
state :: (s -> (a, s)) -> State s a
state = State
instance Functor (State s) where
fmap = liftM
instance Applicative (State s) where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State $ \ s -> (a, s)
State f >>= k = State $ \ s ->
let (a, s') = f s in runState (k a) s'
instance Fail.MonadFail (State s) where
fail str = State $ \_ -> error str
get :: State s s
get = State $ \ s -> (s, s)
put :: s -> State s ()
put s = State $ const ((), s)
modify :: (s -> s) -> State s ()
modify f = get >>= put . f
gets :: (s -> a) -> State s a
gets f = liftM f get
evalState :: State s a -> s -> a
evalState m = fst . runState m
execState :: State s a -> s -> s
execState m = snd . runState m
mapState :: ((a, s) -> (b, s)) -> State s a -> State s b
mapState f m = State $ f . runState m
withState :: (s -> s) -> State s a -> State s a
withState f m = State $ runState m . f
|
7a11dfcb3e26cb76abe38c951eae90b2763f576b18fa02dfc12cc143e4735507 | bennn/dissertation | make-history.rkt | #lang racket/base
(require (only-in racket/string string-join))
(define binop* '(+ - *))
(define other* '(dup drop over swap))
(define new* (box '()))
(define (random-ref xs)
(list-ref xs (random (length xs))))
(define (random-def)
(define cmd* (for/list ([_i (in-range (add1 (random 10)))])
(random-ref other*)))
(define name (gensym (apply string-append (map symbol->string cmd*))))
(set-box! new* (cons name (unbox new*)))
(string-join (list* "define" (map symbol->string (cons name cmd*))) " "))
(define (print-random-command n)
(if (< n 2)
(begin (printf "push ~a\n" (random 9001))
(+ 1 n))
(case (random 5)
[(0) (displayln (random-ref binop*))
(- n 1)]
[(1 2) (printf "push ~a\n" (random 9001))
(+ 1 n)]
[(2) (displayln (random-ref other*))
n]
[(3) (displayln (if (null? (unbox new*))
(random-def)
(random-ref (unbox new*))))
n]
[(4) (displayln (random-def))
n])))
(module+ main
(require racket/cmdline)
(command-line
#:args (N-str out-file)
(define N (string->number N-str))
(with-output-to-file out-file #:exists 'replace
(lambda ()
(for/fold ([size 0])
([i (in-range N)])
(print-random-command size))))
(void)))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/forth/base/make-history.rkt | racket | #lang racket/base
(require (only-in racket/string string-join))
(define binop* '(+ - *))
(define other* '(dup drop over swap))
(define new* (box '()))
(define (random-ref xs)
(list-ref xs (random (length xs))))
(define (random-def)
(define cmd* (for/list ([_i (in-range (add1 (random 10)))])
(random-ref other*)))
(define name (gensym (apply string-append (map symbol->string cmd*))))
(set-box! new* (cons name (unbox new*)))
(string-join (list* "define" (map symbol->string (cons name cmd*))) " "))
(define (print-random-command n)
(if (< n 2)
(begin (printf "push ~a\n" (random 9001))
(+ 1 n))
(case (random 5)
[(0) (displayln (random-ref binop*))
(- n 1)]
[(1 2) (printf "push ~a\n" (random 9001))
(+ 1 n)]
[(2) (displayln (random-ref other*))
n]
[(3) (displayln (if (null? (unbox new*))
(random-def)
(random-ref (unbox new*))))
n]
[(4) (displayln (random-def))
n])))
(module+ main
(require racket/cmdline)
(command-line
#:args (N-str out-file)
(define N (string->number N-str))
(with-output-to-file out-file #:exists 'replace
(lambda ()
(for/fold ([size 0])
([i (in-range N)])
(print-random-command size))))
(void)))
|
|
20b16cfc380f3bc3afb9eb7597cb30c9900dc2b08d40c6396e11ece56bd40046 | byorgey/AoC | 11.hs |
import Data.List
import Data.List.Split
step "n" = [1,0,-1]
step "s" = [-1,0,1]
step "ne" = [1,-1,0]
step "se" = [0,-1,1]
step "sw" = [-1,1,0]
step "nw" = [0,1,-1]
main = do
steps <- (splitOn "," . init) <$> getContents
let end = foldl' (zipWith (+)) [0,0,0] $ map step steps
print (dist end)
let locs = scanl (zipWith (+)) [0,0,0] $ map step steps
print (maximum (map dist locs))
dist loc = (sum . map abs $ loc) `div` 2
| null | https://raw.githubusercontent.com/byorgey/AoC/30eb51eb41af9ca86b05de598a3a96d25bd428e3/2017/11/11.hs | haskell |
import Data.List
import Data.List.Split
step "n" = [1,0,-1]
step "s" = [-1,0,1]
step "ne" = [1,-1,0]
step "se" = [0,-1,1]
step "sw" = [-1,1,0]
step "nw" = [0,1,-1]
main = do
steps <- (splitOn "," . init) <$> getContents
let end = foldl' (zipWith (+)) [0,0,0] $ map step steps
print (dist end)
let locs = scanl (zipWith (+)) [0,0,0] $ map step steps
print (maximum (map dist locs))
dist loc = (sum . map abs $ loc) `div` 2
|
|
1d288853da7770fa8d4c5279b28d78db09a6d5f2019bc58843e36e34dd491c9b | ygrek/mldonkey | guiUtf8.mli | Copyright 2004 b8_bavard ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
(* Internationalization functions *)
type lang =
AR
| HY
| LT
| LV
| MI
| CY
| BS
| CS
| HR
| HU
| PL
| SK
| SL
| SH
| SR
| ZH
| BE
| BG
| MK
| RU
| UK
| KA
| EL
| HE
| IW
| JA
| KO
| RO
| MT
| TG
| TH
| TR
| VI
| AF
| AN
| BR
| CA
| DA
| DE
| EN
| ES
| ET
| EU
| FI
| FO
| FR
| GA
| GL
| GV
| ID
| IS
| IT
| KL
| KW
| MS
| NL
| NN
| NO
| OC
| PT
| SQ
| SV
| TL
| UZ
| WA
| YI
| UnknownLang
type codeset =
WINDOWS_1256
| ISO_8859_6
| IBM864
| ARMSCII_8
| WINDOWS_1257
| ISO_8859_13
| ISO_8859_4
| ISO_8859_14
| WINDOWS_1250
| ISO_8859_2
| IBM852
| GB18030
| GBK
| GB2312
| BIG5_HKSCS
| BIG5
| EUC_TW
| WINDOWS_1251
| CP1251
| CP866
| KOI8_R
| ISO_8859_5
| IBM855
| ISO_IR_111
| KOI8R
| KOI8U
| GEORGIAN_ACADEMYE
| WINDOWS_1253
| ISO_8859_7
| WINDOWS_1255
| ISO_8859_8
| IBM862
| EUC_JP
| ISO_2022_JP
| SHIFT_JIS
| UHC
| JOHAB
| EUC_KR
| ISO_2022_KR
| ISO_8859_10
| ISO_8859_16
| ISO_8859_3
| KOI8_T
| TIS_620
| WINDOWS_1254
| ISO_8859_9
| IBM857
| UCS_2
| UTF_8
| UTF_7
| UTF_16
| UTF_32
| UCS_4
| WINDOWS_1258
| VISCII
| TCVN
| WINDOWS_1252
| ISO_8859_15
| IBM850
| ISO_8859_1
type region =
Default
| Arabic
| Armenian
| Baltic
| Celtic
| CentralEuropean
| ChineseSimplified
| ChineseTraditional
| Cyrillic
| Georgian
| Greek
| Hebrew
| Japanese
| Korean
| Nordic
| Romanian
| SouthEuropean
| Tajik
| Thai
| Turkish
| Vietnamese
| WesternEuropean
| Unicode
*
[ lang_to_string lang ] returns a string , as a 2 - letter ISO 639 language code ,
corresponding to [ lang ] .
If [ lang ] is not known , returns " EN " .
[lang_to_string lang] returns a string, as a 2-letter ISO 639 language code,
corresponding to [lang].
If [lang] is not known, returns "EN".
*)
val lang_to_string : lang -> string
*
[ string_to_lang s ] returns a lang corresponding to the string [ s ] .
If [ s ] is not a 2 - letter ISO 639 language code known , returns UnknownLang .
[string_to_lang s] returns a lang corresponding to the string [s].
If [s] is not a 2-letter ISO 639 language code known, returns UnknownLang.
*)
val string_to_lang : string -> lang
(**
[codeset_to_string codeset] returns the string, as defined by glib,
corresponding to [codeset].
*)
val codeset_to_string : codeset -> string
(**
[string_to_codeset s] returns the codeset corresponding to the string [s].
If [s] is unknown, returns ISO_8859_1.
*)
val string_to_codeset : string -> codeset
(**
[codeset_list_from_region region] returns a predefined codeset list.
It is provided as a convenience.
*)
val codeset_list_from_region : region -> codeset list
(**
[codeset_list_from_lang lang] returns a predefined codeset list.
It is provided as a convenience.
*)
val codeset_list_from_lang : lang -> codeset list
*
[ set_default_codeset_list lang ] sets the current codeset used by the
functions utf8_of and simple_utf8_of .
[set_default_codeset_list lang] sets the current codeset used by the
functions utf8_of and simple_utf8_of.
*)
val set_default_codeset_list : lang -> unit
(**
[simple_utf8_of s] converts the string [s] into a valid utf8 string.
If it fails to convert [s] using the current codeset list, returns
a string of the same length filled with [Glib.Utf8.from_unichar 2].
*)
val simple_utf8_of : string -> string
(**
[utf8_of s] converts the string [s] into a valid utf8 string.
The process may be longer than with [simple_utf8_of], as it
tries to convert the string character by character.
*)
val utf8_of : string -> string
(**
[private_utf8_of s enc_list] converts the string [s] into a valid utf8 string.
It is similar to [utf8_of], except that it uses a private codeset list to parse
the string [s], instead of using the default codeset list.
*)
val private_utf8_of : string -> codeset list -> string
*
[ all_regions ] returns the list of regions , as predefined codesets , that
can be used according to the language or country returns by Glib . Convert.get_charset ( ) .
It is provided as a convenience .
[all_regions] returns the list of regions, as predefined codesets, that
can be used according to the language or country returns by Glib.Convert.get_charset ().
It is provided as a convenience.
*)
val all_regions : region list
val default_codeset_list : unit -> string list
(** Options *)
val languages : string list
val language_to_string : lang -> string
val language_option : lang Options.option_class
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/src/gtk2/gui/guiUtf8.mli | ocaml | Internationalization functions
*
[codeset_to_string codeset] returns the string, as defined by glib,
corresponding to [codeset].
*
[string_to_codeset s] returns the codeset corresponding to the string [s].
If [s] is unknown, returns ISO_8859_1.
*
[codeset_list_from_region region] returns a predefined codeset list.
It is provided as a convenience.
*
[codeset_list_from_lang lang] returns a predefined codeset list.
It is provided as a convenience.
*
[simple_utf8_of s] converts the string [s] into a valid utf8 string.
If it fails to convert [s] using the current codeset list, returns
a string of the same length filled with [Glib.Utf8.from_unichar 2].
*
[utf8_of s] converts the string [s] into a valid utf8 string.
The process may be longer than with [simple_utf8_of], as it
tries to convert the string character by character.
*
[private_utf8_of s enc_list] converts the string [s] into a valid utf8 string.
It is similar to [utf8_of], except that it uses a private codeset list to parse
the string [s], instead of using the default codeset list.
* Options | Copyright 2004 b8_bavard ,
This file is part of mldonkey .
mldonkey is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
mldonkey is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with mldonkey ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This file is part of mldonkey.
mldonkey is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
mldonkey is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mldonkey; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
type lang =
AR
| HY
| LT
| LV
| MI
| CY
| BS
| CS
| HR
| HU
| PL
| SK
| SL
| SH
| SR
| ZH
| BE
| BG
| MK
| RU
| UK
| KA
| EL
| HE
| IW
| JA
| KO
| RO
| MT
| TG
| TH
| TR
| VI
| AF
| AN
| BR
| CA
| DA
| DE
| EN
| ES
| ET
| EU
| FI
| FO
| FR
| GA
| GL
| GV
| ID
| IS
| IT
| KL
| KW
| MS
| NL
| NN
| NO
| OC
| PT
| SQ
| SV
| TL
| UZ
| WA
| YI
| UnknownLang
type codeset =
WINDOWS_1256
| ISO_8859_6
| IBM864
| ARMSCII_8
| WINDOWS_1257
| ISO_8859_13
| ISO_8859_4
| ISO_8859_14
| WINDOWS_1250
| ISO_8859_2
| IBM852
| GB18030
| GBK
| GB2312
| BIG5_HKSCS
| BIG5
| EUC_TW
| WINDOWS_1251
| CP1251
| CP866
| KOI8_R
| ISO_8859_5
| IBM855
| ISO_IR_111
| KOI8R
| KOI8U
| GEORGIAN_ACADEMYE
| WINDOWS_1253
| ISO_8859_7
| WINDOWS_1255
| ISO_8859_8
| IBM862
| EUC_JP
| ISO_2022_JP
| SHIFT_JIS
| UHC
| JOHAB
| EUC_KR
| ISO_2022_KR
| ISO_8859_10
| ISO_8859_16
| ISO_8859_3
| KOI8_T
| TIS_620
| WINDOWS_1254
| ISO_8859_9
| IBM857
| UCS_2
| UTF_8
| UTF_7
| UTF_16
| UTF_32
| UCS_4
| WINDOWS_1258
| VISCII
| TCVN
| WINDOWS_1252
| ISO_8859_15
| IBM850
| ISO_8859_1
type region =
Default
| Arabic
| Armenian
| Baltic
| Celtic
| CentralEuropean
| ChineseSimplified
| ChineseTraditional
| Cyrillic
| Georgian
| Greek
| Hebrew
| Japanese
| Korean
| Nordic
| Romanian
| SouthEuropean
| Tajik
| Thai
| Turkish
| Vietnamese
| WesternEuropean
| Unicode
*
[ lang_to_string lang ] returns a string , as a 2 - letter ISO 639 language code ,
corresponding to [ lang ] .
If [ lang ] is not known , returns " EN " .
[lang_to_string lang] returns a string, as a 2-letter ISO 639 language code,
corresponding to [lang].
If [lang] is not known, returns "EN".
*)
val lang_to_string : lang -> string
*
[ string_to_lang s ] returns a lang corresponding to the string [ s ] .
If [ s ] is not a 2 - letter ISO 639 language code known , returns UnknownLang .
[string_to_lang s] returns a lang corresponding to the string [s].
If [s] is not a 2-letter ISO 639 language code known, returns UnknownLang.
*)
val string_to_lang : string -> lang
val codeset_to_string : codeset -> string
val string_to_codeset : string -> codeset
val codeset_list_from_region : region -> codeset list
val codeset_list_from_lang : lang -> codeset list
*
[ set_default_codeset_list lang ] sets the current codeset used by the
functions utf8_of and simple_utf8_of .
[set_default_codeset_list lang] sets the current codeset used by the
functions utf8_of and simple_utf8_of.
*)
val set_default_codeset_list : lang -> unit
val simple_utf8_of : string -> string
val utf8_of : string -> string
val private_utf8_of : string -> codeset list -> string
*
[ all_regions ] returns the list of regions , as predefined codesets , that
can be used according to the language or country returns by Glib . Convert.get_charset ( ) .
It is provided as a convenience .
[all_regions] returns the list of regions, as predefined codesets, that
can be used according to the language or country returns by Glib.Convert.get_charset ().
It is provided as a convenience.
*)
val all_regions : region list
val default_codeset_list : unit -> string list
val languages : string list
val language_to_string : lang -> string
val language_option : lang Options.option_class
|
ae1e9f8f7700e29288d21e480bbb960a6e93f55bc97526da973c1b21a40c1380 | skanev/playground | 01.scm | EOPL exercise 3.01
;
In figure 3.3 , list all the places where we used the fact that
; ⎣ ⎡ n⎤ ⎦ = n.
I will use { } for ⎣ and ⎦ and [ ] for ⎡ and I will annotate the code below :
;
Let p = [ i = 1 , v = 5 , x = 10 ] .
;
; (value-of
< < -(-(x , 3 ) , -(v , i ) ) > >
; p)
;
; = [(-
{ ( value - of < < -(x , 3 ) > > p ) }
; {(value-of <<-(v, i)>> p)})]
;
; = [(-
; (-
; {(value-of <<x>> p)}
; {(value-of <<3>> p)})
; {(value-of <<-(v, i)>>)})]
;
; = [(-
; (-
10 ; HERE
; {(value-of <<3>> p)})
; {(value-of <<-(v, i)>>)})]
;
; = [(-
; (-
10
; 3) ; HERE
; {(value-of <<-(v, i)>>)})]
;
; = [(-
7
; {(value-of <<-(v, i)>>)})]
;
; = [(-
7
; (-
; {(value-of <<x>> p)}
; {(value-of <<i>> p)}))]
;
; = [(-
7
; (-
5 ; HERE
; {(value-of <<i>> p)}))]
;
; = [(-
7
; (-
5
; 1))] ; HERE
; = [(-
7
; 4)]
;
= [ 4 ]
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/eopl/03/01.scm | scheme |
⎣ ⎡ n⎤ ⎦ = n.
(value-of
p)
= [(-
{(value-of <<-(v, i)>> p)})]
= [(-
(-
{(value-of <<x>> p)}
{(value-of <<3>> p)})
{(value-of <<-(v, i)>>)})]
= [(-
(-
HERE
{(value-of <<3>> p)})
{(value-of <<-(v, i)>>)})]
= [(-
(-
3) ; HERE
{(value-of <<-(v, i)>>)})]
= [(-
{(value-of <<-(v, i)>>)})]
= [(-
(-
{(value-of <<x>> p)}
{(value-of <<i>> p)}))]
= [(-
(-
HERE
{(value-of <<i>> p)}))]
= [(-
(-
1))] ; HERE
= [(-
4)]
| EOPL exercise 3.01
In figure 3.3 , list all the places where we used the fact that
I will use { } for ⎣ and ⎦ and [ ] for ⎡ and I will annotate the code below :
Let p = [ i = 1 , v = 5 , x = 10 ] .
< < -(-(x , 3 ) , -(v , i ) ) > >
{ ( value - of < < -(x , 3 ) > > p ) }
10
7
7
7
7
5
7
= [ 4 ]
|
750c7ac044d7b91996c7aef640fed22a8df5b25c47c5fb56a15393c4b15eeda1 | fourmolu/fourmolu | SpanStream.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE GeneralizedNewtypeDeriving #
| Build span stream from AST .
module Ormolu.Printer.SpanStream
( SpanStream (..),
mkSpanStream,
)
where
import Data.DList (DList)
import qualified Data.DList as D
import Data.Data (Data)
import Data.Generics (everything, ext1Q, ext2Q)
import Data.List (sortOn)
import Data.Maybe (maybeToList)
import Data.Typeable (cast)
import GHC.Parser.Annotation
import GHC.Types.SrcLoc
-- | A stream of 'RealSrcSpan's in ascending order. This allows us to tell
e.g. whether there is another \"located\ " element of AST between current
-- element and comment we're considering for printing.
newtype SpanStream = SpanStream [RealSrcSpan]
deriving (Eq, Show, Data, Semigroup, Monoid)
-- | Create 'SpanStream' from a data structure containing \"located\"
-- elements.
mkSpanStream ::
(Data a) =>
-- | Data structure to inspect (AST)
a ->
SpanStream
mkSpanStream a =
SpanStream
. sortOn realSrcSpanStart
. D.toList
$ everything mappend (const mempty `ext2Q` queryLocated `ext1Q` querySrcSpanAnn) a
where
queryLocated ::
(Data e0) =>
GenLocated e0 e1 ->
DList RealSrcSpan
queryLocated (L mspn _) =
maybe mempty srcSpanToRealSrcSpanDList (cast mspn :: Maybe SrcSpan)
querySrcSpanAnn :: SrcSpanAnn' a -> DList RealSrcSpan
querySrcSpanAnn = srcSpanToRealSrcSpanDList . locA
srcSpanToRealSrcSpanDList =
D.fromList . maybeToList . srcSpanToRealSrcSpan
| null | https://raw.githubusercontent.com/fourmolu/fourmolu/f47860f01cb3cac3b973c5df6ecbae48bbb4c295/src/Ormolu/Printer/SpanStream.hs | haskell | # LANGUAGE DeriveDataTypeable #
| A stream of 'RealSrcSpan's in ascending order. This allows us to tell
element and comment we're considering for printing.
| Create 'SpanStream' from a data structure containing \"located\"
elements.
| Data structure to inspect (AST) | # LANGUAGE GeneralizedNewtypeDeriving #
| Build span stream from AST .
module Ormolu.Printer.SpanStream
( SpanStream (..),
mkSpanStream,
)
where
import Data.DList (DList)
import qualified Data.DList as D
import Data.Data (Data)
import Data.Generics (everything, ext1Q, ext2Q)
import Data.List (sortOn)
import Data.Maybe (maybeToList)
import Data.Typeable (cast)
import GHC.Parser.Annotation
import GHC.Types.SrcLoc
e.g. whether there is another \"located\ " element of AST between current
newtype SpanStream = SpanStream [RealSrcSpan]
deriving (Eq, Show, Data, Semigroup, Monoid)
mkSpanStream ::
(Data a) =>
a ->
SpanStream
mkSpanStream a =
SpanStream
. sortOn realSrcSpanStart
. D.toList
$ everything mappend (const mempty `ext2Q` queryLocated `ext1Q` querySrcSpanAnn) a
where
queryLocated ::
(Data e0) =>
GenLocated e0 e1 ->
DList RealSrcSpan
queryLocated (L mspn _) =
maybe mempty srcSpanToRealSrcSpanDList (cast mspn :: Maybe SrcSpan)
querySrcSpanAnn :: SrcSpanAnn' a -> DList RealSrcSpan
querySrcSpanAnn = srcSpanToRealSrcSpanDList . locA
srcSpanToRealSrcSpanDList =
D.fromList . maybeToList . srcSpanToRealSrcSpan
|
af1cc0cab6bca60402bcb708bb757cd198fa94adae334f6f54f4c35018a183f6 | dfinity/motoko | languageServer.mli | val start :
string (** The entry point *) ->
* Log debug messages to ?
int option
* Listen on the given port rather than communicating via stdin / out ?
'a
| null | https://raw.githubusercontent.com/dfinity/motoko/399b8e8b0b47890388cd38ee0ace7638d9092b1a/src/languageServer/languageServer.mli | ocaml | * The entry point | val start :
* Log debug messages to ?
int option
* Listen on the given port rather than communicating via stdin / out ?
'a
|
45eff8ee2f56e2a654e50971d0c2f847c28c3ed71439496781f6853f0ce7eb5f | granule-project/granule | CheckerSpec.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE ImplicitParams #
# LANGUAGE DataKinds #
module Language.Granule.Checker.CheckerSpec where
import Test.Hspec
import Language.Granule.Checker.Checker
import Language.Granule.Checker.Predicates
import Language.Granule.Checker.Monad
import Language.Granule.Syntax.Parser
import Language.Granule.Syntax.Expr
import Language.Granule.Syntax.Def
import Language.Granule.Syntax.Type
import Language.Granule.Syntax.Span
import Language.Granule.Syntax.Identifiers
import Language.Granule.Syntax.Annotated
import Language.Granule.Utils
spec :: Spec
spec = let ?globals = mempty in do
let tyVarK = TyVar $ mkId "k"
let varA = mkId "a"
-- Unit tests
describe "joinCtxts" $ do
it "join ctxts with discharged assumption in both" $ do
((c, tyVars), pred) <- runCtxts joinCtxts
[(varA, Discharged tyVarK (TySig (TyInt 5) natInterval))]
[(varA, Discharged tyVarK (cNatOrdered 10))]
c `shouldBe` [(varA, Discharged tyVarK (TyVar (mkId "a.0")))]
tyVars `shouldBe` [(mkId "a.0", natInterval)]
pred `shouldBe`
[Conj [Con (Lub nullSpan (cNatOrdered 5) (cNatOrdered 10) (TyVar (mkId "a.0")) natInterval)]]
[ [ Con ( ( cNatOrdered 10 ) ( TyVar ( mkId " a.0 " ) ) natInterval )
, Con ( ( cNatOrdered 5 ) ( TyVar ( mkId " a.0 " ) ) natInterval ) ] ]
it "join ctxts with discharged assumption in one" $ do
((c, _), pred) <- runCtxts joinCtxts
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[]
c `shouldBe` [(varA, Discharged (tyVarK) (TyVar (mkId "a.0")))]
pred `shouldBe`
[Conj [Con (Lub (Span {startPos = (0,0), endPos = (0,0), filename = ""})
(TySig (TyInt 5)
(TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat"))))
(TyGrade (Just (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat")))) 0)
(TyVar (Id "a.0" "a.0")) (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat"))))]]
[ [ Con ( ( CZero natInterval ) ( TyVar ( mkId " a.0 " ) ) natInterval )
, Con ( ( cNatOrdered 5 ) ( TyVar ( mkId " a.0 " ) ) natInterval ) ] ]
describe "intersectCtxtsWithWeaken" $ do
it "contexts with matching discharged variables" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
it "contexts with matching discharged variables" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
it "contexts with matching discharged variables" $ do
(c, preds) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
it "contexts with matching discharged variables (symm)" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[]
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
c `shouldBe`
[(varA, Discharged (tyVarK) (TyGrade (Just (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat")))) 0))]
describe "elaborator tests" $
it "simple elaborator tests" $ do
-- Simple definitions
\x - > x + 1
(AST _ (def1:_) _ _ _, _) <- parseAndDoImportsAndFreshenDefs "foo : Int -> Int\nfoo x = x + 1"
(Right defElab, _) <- runChecker initState (checkDef [] def1)
annotation (extractMainExpr defElab) `shouldBe` ((TyCon $ mkId "Int") :: Type)
extractMainExpr :: Def v a -> Expr v a
extractMainExpr (Def _ _ _ (EquationList _ _ _ [Equation _ _ _ _ _ e]) _) = e
extractMainExpr _ = undefined
runCtxts
:: (?globals::Globals)
=> (Span -> a -> a -> Checker b)
-> a
-> a
-> IO (b, [Pred])
runCtxts f a b = do
(Right res, state) <- runChecker initState (f nullSpan a b)
pure (res, predicateStack state)
cNatOrdered :: Int -> Type
cNatOrdered x = TySig (TyInt x) natInterval
natInterval :: Type
natInterval = TyApp (TyCon $ mkId "Interval") (TyCon $ mkId "Nat")
| null | https://raw.githubusercontent.com/granule-project/granule/aa869e0522ad961f6627e827055700c5fcabfc75/frontend/tests/hspec/Language/Granule/Checker/CheckerSpec.hs | haskell | Unit tests
Simple definitions | # LANGUAGE ScopedTypeVariables #
# LANGUAGE ImplicitParams #
# LANGUAGE DataKinds #
module Language.Granule.Checker.CheckerSpec where
import Test.Hspec
import Language.Granule.Checker.Checker
import Language.Granule.Checker.Predicates
import Language.Granule.Checker.Monad
import Language.Granule.Syntax.Parser
import Language.Granule.Syntax.Expr
import Language.Granule.Syntax.Def
import Language.Granule.Syntax.Type
import Language.Granule.Syntax.Span
import Language.Granule.Syntax.Identifiers
import Language.Granule.Syntax.Annotated
import Language.Granule.Utils
spec :: Spec
spec = let ?globals = mempty in do
let tyVarK = TyVar $ mkId "k"
let varA = mkId "a"
describe "joinCtxts" $ do
it "join ctxts with discharged assumption in both" $ do
((c, tyVars), pred) <- runCtxts joinCtxts
[(varA, Discharged tyVarK (TySig (TyInt 5) natInterval))]
[(varA, Discharged tyVarK (cNatOrdered 10))]
c `shouldBe` [(varA, Discharged tyVarK (TyVar (mkId "a.0")))]
tyVars `shouldBe` [(mkId "a.0", natInterval)]
pred `shouldBe`
[Conj [Con (Lub nullSpan (cNatOrdered 5) (cNatOrdered 10) (TyVar (mkId "a.0")) natInterval)]]
[ [ Con ( ( cNatOrdered 10 ) ( TyVar ( mkId " a.0 " ) ) natInterval )
, Con ( ( cNatOrdered 5 ) ( TyVar ( mkId " a.0 " ) ) natInterval ) ] ]
it "join ctxts with discharged assumption in one" $ do
((c, _), pred) <- runCtxts joinCtxts
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[]
c `shouldBe` [(varA, Discharged (tyVarK) (TyVar (mkId "a.0")))]
pred `shouldBe`
[Conj [Con (Lub (Span {startPos = (0,0), endPos = (0,0), filename = ""})
(TySig (TyInt 5)
(TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat"))))
(TyGrade (Just (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat")))) 0)
(TyVar (Id "a.0" "a.0")) (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat"))))]]
[ [ Con ( ( CZero natInterval ) ( TyVar ( mkId " a.0 " ) ) natInterval )
, Con ( ( cNatOrdered 5 ) ( TyVar ( mkId " a.0 " ) ) natInterval ) ] ]
describe "intersectCtxtsWithWeaken" $ do
it "contexts with matching discharged variables" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
it "contexts with matching discharged variables" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 10))]
it "contexts with matching discharged variables" $ do
(c, preds) <- (runCtxts intersectCtxtsWithWeaken)
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
[]
c `shouldBe`
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
it "contexts with matching discharged variables (symm)" $ do
(c, _) <- (runCtxts intersectCtxtsWithWeaken)
[]
[(varA, Discharged (tyVarK) (cNatOrdered 5))]
c `shouldBe`
[(varA, Discharged (tyVarK) (TyGrade (Just (TyApp (TyCon (Id "Interval" "Interval")) (TyCon (Id "Nat" "Nat")))) 0))]
describe "elaborator tests" $
it "simple elaborator tests" $ do
\x - > x + 1
(AST _ (def1:_) _ _ _, _) <- parseAndDoImportsAndFreshenDefs "foo : Int -> Int\nfoo x = x + 1"
(Right defElab, _) <- runChecker initState (checkDef [] def1)
annotation (extractMainExpr defElab) `shouldBe` ((TyCon $ mkId "Int") :: Type)
extractMainExpr :: Def v a -> Expr v a
extractMainExpr (Def _ _ _ (EquationList _ _ _ [Equation _ _ _ _ _ e]) _) = e
extractMainExpr _ = undefined
runCtxts
:: (?globals::Globals)
=> (Span -> a -> a -> Checker b)
-> a
-> a
-> IO (b, [Pred])
runCtxts f a b = do
(Right res, state) <- runChecker initState (f nullSpan a b)
pure (res, predicateStack state)
cNatOrdered :: Int -> Type
cNatOrdered x = TySig (TyInt x) natInterval
natInterval :: Type
natInterval = TyApp (TyCon $ mkId "Interval") (TyCon $ mkId "Nat")
|
a92c233c938f8bcaee159357d1a492f1fca47200d914c694715f3eb4a0649004 | con-kitty/categorifier-c | Libm.hs | # LANGUAGE ForeignFunctionInterface #
module Categorifier.C.KTypes.Libm
( -- * Direct FFI bindings
c_fmod,
c_atan2,
c_fmin,
c_fmax,
c_log,
c_exp,
c_pow,
c_sin,
c_cos,
c_tan,
c_asin,
c_acos,
c_atan,
c_sinh,
c_cosh,
c_tanh,
c_fmodf,
c_atan2f,
c_fminf,
c_fmaxf,
c_logf,
c_expf,
c_powf,
c_sinf,
c_cosf,
c_tanf,
c_asinf,
c_acosf,
c_atanf,
c_sinhf,
c_coshf,
c_tanhf,
-- * Haskell type-converted bindings
libmFMod,
libmAtan2,
libmFmin,
libmFmax,
libmLog,
libmExp,
libmPow,
libmSin,
libmCos,
libmTan,
libmAsin,
libmAcos,
libmAtan,
libmSinh,
libmCosh,
libmTanh,
libmFModf,
libmAtan2f,
libmFminf,
libmFmaxf,
libmLogf,
libmExpf,
libmPowf,
libmSinf,
libmCosf,
libmTanf,
libmAsinf,
libmAcosf,
libmAtanf,
libmSinhf,
libmCoshf,
libmTanhf,
)
where
import Foreign.C.Types (CDouble (..), CFloat (..))
foreign import ccall unsafe "math.h fmod" c_fmod :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h atan2" c_atan2 :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h fmin" c_fmin :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h fmax" c_fmax :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h log" c_log :: CDouble -> CDouble
foreign import ccall unsafe "math.h exp" c_exp :: CDouble -> CDouble
foreign import ccall unsafe "math.h pow" c_pow :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h sin" c_sin :: CDouble -> CDouble
foreign import ccall unsafe "math.h cos" c_cos :: CDouble -> CDouble
foreign import ccall unsafe "math.h tan" c_tan :: CDouble -> CDouble
foreign import ccall unsafe "math.h asin" c_asin :: CDouble -> CDouble
foreign import ccall unsafe "math.h acos" c_acos :: CDouble -> CDouble
foreign import ccall unsafe "math.h atan" c_atan :: CDouble -> CDouble
foreign import ccall unsafe "math.h sinh" c_sinh :: CDouble -> CDouble
foreign import ccall unsafe "math.h cosh" c_cosh :: CDouble -> CDouble
foreign import ccall unsafe "math.h tanh" c_tanh :: CDouble -> CDouble
foreign import ccall unsafe "math.h fmodf" c_fmodf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h atan2f" c_atan2f :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h fminf" c_fminf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h fmaxf" c_fmaxf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h logf" c_logf :: CFloat -> CFloat
foreign import ccall unsafe "math.h expf" c_expf :: CFloat -> CFloat
foreign import ccall unsafe "math.h powf" c_powf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h sinf" c_sinf :: CFloat -> CFloat
foreign import ccall unsafe "math.h cosf" c_cosf :: CFloat -> CFloat
foreign import ccall unsafe "math.h tanf" c_tanf :: CFloat -> CFloat
foreign import ccall unsafe "math.h asinf" c_asinf :: CFloat -> CFloat
foreign import ccall unsafe "math.h acosf" c_acosf :: CFloat -> CFloat
foreign import ccall unsafe "math.h atanf" c_atanf :: CFloat -> CFloat
foreign import ccall unsafe "math.h sinhf" c_sinhf :: CFloat -> CFloat
foreign import ccall unsafe "math.h coshf" c_coshf :: CFloat -> CFloat
foreign import ccall unsafe "math.h tanhf" c_tanhf :: CFloat -> CFloat
onCDouble :: (CDouble -> CDouble) -> Double -> Double
onCDouble f x = ret
where
CDouble ret = f (CDouble x)
onTwoCDoubles :: (CDouble -> CDouble -> CDouble) -> Double -> Double -> Double
onTwoCDoubles f x y = ret
where
CDouble ret = f (CDouble x) (CDouble y)
onCFloat :: (CFloat -> CFloat) -> Float -> Float
onCFloat f x = ret
where
CFloat ret = f (CFloat x)
onTwoCFloats :: (CFloat -> CFloat -> CFloat) -> Float -> Float -> Float
onTwoCFloats f x y = ret
where
CFloat ret = f (CFloat x) (CFloat y)
libmAtan2 :: Double -> Double -> Double
libmAtan2 = onTwoCDoubles c_atan2
libmFmin :: Double -> Double -> Double
libmFmin = onTwoCDoubles c_fmin
libmFmax :: Double -> Double -> Double
libmFmax = onTwoCDoubles c_fmax
libmPow :: Double -> Double -> Double
libmPow = onTwoCDoubles c_pow
libmFMod :: Double -> Double -> Double
libmFMod = onTwoCDoubles c_fmod
libmFModf :: Float -> Float -> Float
libmFModf = onTwoCFloats c_fmodf
libmAtan2f :: Float -> Float -> Float
libmAtan2f = onTwoCFloats c_atan2f
libmFminf :: Float -> Float -> Float
libmFminf = onTwoCFloats c_fminf
libmFmaxf :: Float -> Float -> Float
libmFmaxf = onTwoCFloats c_fmaxf
libmPowf :: Float -> Float -> Float
libmPowf = onTwoCFloats c_powf
libmLog :: Double -> Double
libmLog = onCDouble c_log
libmExp :: Double -> Double
libmExp = onCDouble c_exp
libmSin :: Double -> Double
libmSin = onCDouble c_sin
libmCos :: Double -> Double
libmCos = onCDouble c_cos
libmTan :: Double -> Double
libmTan = onCDouble c_tan
libmAsin :: Double -> Double
libmAsin = onCDouble c_asin
libmAcos :: Double -> Double
libmAcos = onCDouble c_acos
libmAtan :: Double -> Double
libmAtan = onCDouble c_atan
libmSinh :: Double -> Double
libmSinh = onCDouble c_sinh
libmCosh :: Double -> Double
libmCosh = onCDouble c_cosh
libmTanh :: Double -> Double
libmTanh = onCDouble c_tanh
libmLogf :: Float -> Float
libmLogf = onCFloat c_logf
libmExpf :: Float -> Float
libmExpf = onCFloat c_expf
libmSinf :: Float -> Float
libmSinf = onCFloat c_sinf
libmCosf :: Float -> Float
libmCosf = onCFloat c_cosf
libmTanf :: Float -> Float
libmTanf = onCFloat c_tanf
libmAsinf :: Float -> Float
libmAsinf = onCFloat c_asinf
libmAcosf :: Float -> Float
libmAcosf = onCFloat c_acosf
libmAtanf :: Float -> Float
libmAtanf = onCFloat c_atanf
libmSinhf :: Float -> Float
libmSinhf = onCFloat c_sinhf
libmCoshf :: Float -> Float
libmCoshf = onCFloat c_coshf
libmTanhf :: Float -> Float
libmTanhf = onCFloat c_tanhf
| null | https://raw.githubusercontent.com/con-kitty/categorifier-c/a34ff2603529b4da7ad6ffe681dad095f102d1b9/Categorifier/C/KTypes/Libm.hs | haskell | * Direct FFI bindings
* Haskell type-converted bindings | # LANGUAGE ForeignFunctionInterface #
module Categorifier.C.KTypes.Libm
c_fmod,
c_atan2,
c_fmin,
c_fmax,
c_log,
c_exp,
c_pow,
c_sin,
c_cos,
c_tan,
c_asin,
c_acos,
c_atan,
c_sinh,
c_cosh,
c_tanh,
c_fmodf,
c_atan2f,
c_fminf,
c_fmaxf,
c_logf,
c_expf,
c_powf,
c_sinf,
c_cosf,
c_tanf,
c_asinf,
c_acosf,
c_atanf,
c_sinhf,
c_coshf,
c_tanhf,
libmFMod,
libmAtan2,
libmFmin,
libmFmax,
libmLog,
libmExp,
libmPow,
libmSin,
libmCos,
libmTan,
libmAsin,
libmAcos,
libmAtan,
libmSinh,
libmCosh,
libmTanh,
libmFModf,
libmAtan2f,
libmFminf,
libmFmaxf,
libmLogf,
libmExpf,
libmPowf,
libmSinf,
libmCosf,
libmTanf,
libmAsinf,
libmAcosf,
libmAtanf,
libmSinhf,
libmCoshf,
libmTanhf,
)
where
import Foreign.C.Types (CDouble (..), CFloat (..))
foreign import ccall unsafe "math.h fmod" c_fmod :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h atan2" c_atan2 :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h fmin" c_fmin :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h fmax" c_fmax :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h log" c_log :: CDouble -> CDouble
foreign import ccall unsafe "math.h exp" c_exp :: CDouble -> CDouble
foreign import ccall unsafe "math.h pow" c_pow :: CDouble -> CDouble -> CDouble
foreign import ccall unsafe "math.h sin" c_sin :: CDouble -> CDouble
foreign import ccall unsafe "math.h cos" c_cos :: CDouble -> CDouble
foreign import ccall unsafe "math.h tan" c_tan :: CDouble -> CDouble
foreign import ccall unsafe "math.h asin" c_asin :: CDouble -> CDouble
foreign import ccall unsafe "math.h acos" c_acos :: CDouble -> CDouble
foreign import ccall unsafe "math.h atan" c_atan :: CDouble -> CDouble
foreign import ccall unsafe "math.h sinh" c_sinh :: CDouble -> CDouble
foreign import ccall unsafe "math.h cosh" c_cosh :: CDouble -> CDouble
foreign import ccall unsafe "math.h tanh" c_tanh :: CDouble -> CDouble
foreign import ccall unsafe "math.h fmodf" c_fmodf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h atan2f" c_atan2f :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h fminf" c_fminf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h fmaxf" c_fmaxf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h logf" c_logf :: CFloat -> CFloat
foreign import ccall unsafe "math.h expf" c_expf :: CFloat -> CFloat
foreign import ccall unsafe "math.h powf" c_powf :: CFloat -> CFloat -> CFloat
foreign import ccall unsafe "math.h sinf" c_sinf :: CFloat -> CFloat
foreign import ccall unsafe "math.h cosf" c_cosf :: CFloat -> CFloat
foreign import ccall unsafe "math.h tanf" c_tanf :: CFloat -> CFloat
foreign import ccall unsafe "math.h asinf" c_asinf :: CFloat -> CFloat
foreign import ccall unsafe "math.h acosf" c_acosf :: CFloat -> CFloat
foreign import ccall unsafe "math.h atanf" c_atanf :: CFloat -> CFloat
foreign import ccall unsafe "math.h sinhf" c_sinhf :: CFloat -> CFloat
foreign import ccall unsafe "math.h coshf" c_coshf :: CFloat -> CFloat
foreign import ccall unsafe "math.h tanhf" c_tanhf :: CFloat -> CFloat
onCDouble :: (CDouble -> CDouble) -> Double -> Double
onCDouble f x = ret
where
CDouble ret = f (CDouble x)
onTwoCDoubles :: (CDouble -> CDouble -> CDouble) -> Double -> Double -> Double
onTwoCDoubles f x y = ret
where
CDouble ret = f (CDouble x) (CDouble y)
onCFloat :: (CFloat -> CFloat) -> Float -> Float
onCFloat f x = ret
where
CFloat ret = f (CFloat x)
onTwoCFloats :: (CFloat -> CFloat -> CFloat) -> Float -> Float -> Float
onTwoCFloats f x y = ret
where
CFloat ret = f (CFloat x) (CFloat y)
libmAtan2 :: Double -> Double -> Double
libmAtan2 = onTwoCDoubles c_atan2
libmFmin :: Double -> Double -> Double
libmFmin = onTwoCDoubles c_fmin
libmFmax :: Double -> Double -> Double
libmFmax = onTwoCDoubles c_fmax
libmPow :: Double -> Double -> Double
libmPow = onTwoCDoubles c_pow
libmFMod :: Double -> Double -> Double
libmFMod = onTwoCDoubles c_fmod
libmFModf :: Float -> Float -> Float
libmFModf = onTwoCFloats c_fmodf
libmAtan2f :: Float -> Float -> Float
libmAtan2f = onTwoCFloats c_atan2f
libmFminf :: Float -> Float -> Float
libmFminf = onTwoCFloats c_fminf
libmFmaxf :: Float -> Float -> Float
libmFmaxf = onTwoCFloats c_fmaxf
libmPowf :: Float -> Float -> Float
libmPowf = onTwoCFloats c_powf
libmLog :: Double -> Double
libmLog = onCDouble c_log
libmExp :: Double -> Double
libmExp = onCDouble c_exp
libmSin :: Double -> Double
libmSin = onCDouble c_sin
libmCos :: Double -> Double
libmCos = onCDouble c_cos
libmTan :: Double -> Double
libmTan = onCDouble c_tan
libmAsin :: Double -> Double
libmAsin = onCDouble c_asin
libmAcos :: Double -> Double
libmAcos = onCDouble c_acos
libmAtan :: Double -> Double
libmAtan = onCDouble c_atan
libmSinh :: Double -> Double
libmSinh = onCDouble c_sinh
libmCosh :: Double -> Double
libmCosh = onCDouble c_cosh
libmTanh :: Double -> Double
libmTanh = onCDouble c_tanh
libmLogf :: Float -> Float
libmLogf = onCFloat c_logf
libmExpf :: Float -> Float
libmExpf = onCFloat c_expf
libmSinf :: Float -> Float
libmSinf = onCFloat c_sinf
libmCosf :: Float -> Float
libmCosf = onCFloat c_cosf
libmTanf :: Float -> Float
libmTanf = onCFloat c_tanf
libmAsinf :: Float -> Float
libmAsinf = onCFloat c_asinf
libmAcosf :: Float -> Float
libmAcosf = onCFloat c_acosf
libmAtanf :: Float -> Float
libmAtanf = onCFloat c_atanf
libmSinhf :: Float -> Float
libmSinhf = onCFloat c_sinhf
libmCoshf :: Float -> Float
libmCoshf = onCFloat c_coshf
libmTanhf :: Float -> Float
libmTanhf = onCFloat c_tanhf
|
1d653d633cb0beb5a483976ae6f2500580e2a6326716accb7fec9a84fa965630 | mauny/the-functional-approach-to-programming | xCour-B.ml | (* *)
(* Projet Formel *)
(* *)
CAML - light :
(* *)
(*************************************************************************)
(* *)
(* LIENS *)
45 rue d'Ulm
75005 PARIS
France
(* *)
(*************************************************************************)
#open "MLgraph";;
let fn =
(*Begin Font Description*)
Courier_Bold,{font_descr_filename="preloaded"; font_descr_name="Courier_Bold";
font_descr_width=7.2;
font_descr_height=12.0;
font_descr_descr= vect_of_list [
]; font_descr_descr_bbox= vect_of_list [
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (2.54401,-0.179993),(4.65601,6.864));
( (1.74001,3.564),(5.46001,6.744));
( (0.792007,-0.539993),(6.408,7.812));
( (1.104,-1.51199),(6.108,7.992));
( (0.0599976,-0.179993),(7.14,7.392));
( (0.552002,-0.179993),(6.43201,6.51601));
( (2.256,3.564),(4.944,6.744));
( (2.748,-1.224),(5.412,7.392));
( (1.78799,-1.224),(4.452,7.392));
( (1.092,2.62801),(6.108,7.21201));
( (0.972,0.0),(6.228,5.26801));
( (1.67999,-1.21201),(4.48801,1.968));
( (1.32001,2.556),(5.88,3.636));
( (2.472,-0.179993),(4.728,1.812));
( (1.25999,-0.923996),(5.952,7.51199));
( (1.164,-0.179993),(6.036,7.392));
( (1.092,0.0),(6.34801,7.392));
( (0.852005,0.0),(5.868,7.392));
( (0.876007,-0.179993),(5.892,7.392));
( (0.755997,0.0),(5.964,7.392));
( (0.960007,-0.179993),(6.132,7.21201));
( (1.2,-0.179993),(6.132,7.392));
( (0.779999,0.0),(5.808,7.21201));
( (1.116,-0.179993),(6.084,7.392));
( (1.06799,-0.179993),(6.0,7.392));
( (2.472,-0.179993),(4.728,4.86));
( (1.67999,-1.21201),(4.728,4.86));
( (0.792007,-0.179993),(6.036,5.436));
( (0.972,1.06799),(6.228,4.188));
( (1.164,-0.179993),(6.408,5.436));
( (1.29601,-0.179993),(5.892,6.96001));
( (0.311996,-0.179993),(6.888,7.392));
( (0.0119934,0.0),(7.188,6.744));
( (0.479996,0.0),(6.756,6.744));
( (0.384003,-0.216003),(6.64799,6.96001));
( (0.479996,0.0),(7.008,6.744));
( (0.479996,0.0),(6.53999,6.744));
( (0.647995,0.0),(6.66,6.744));
( (0.384003,-0.216003),(7.008,6.96001));
( (0.360001,0.0),(6.84,6.744));
( (1.04401,0.0),(6.15601,6.744));
( (0.563995,-0.216003),(7.092,6.744));
( (0.432007,0.0),(7.008,6.744));
( (0.587997,0.0),(6.81599,6.744));
( (0.0359955,0.0),(7.164,6.744));
( (0.216003,-0.143997),(7.2,6.744));
( (0.384003,-0.216003),(6.81599,6.96001));
( (0.695999,0.0),(6.588,6.744));
( (0.384003,-1.476),(6.81599,6.96001));
( (0.408005,0.0),(7.06799,6.744));
( (0.684006,-0.264008),(6.51601,6.98399));
( (0.371994,0.0),(6.828,6.744));
( (0.167999,-0.216003),(7.032,6.744));
( (-0.0359955,0.0),(7.23601,6.744));
( (-0.095993,0.0),(7.29601,6.744));
( (0.264008,0.0),(6.936,6.744));
( (0.264008,0.0),(6.948,6.744));
( (0.863998,0.0),(6.34801,6.744));
( (3.06,-1.224),(5.58,7.392));
( (1.25999,-0.923996),(5.952,7.51199));
( (1.62,-1.224),(4.14,7.392));
( (1.29601,2.98801),(5.90401,7.392));
( (0.0,-1.5),(7.2,-0.899994));
( (2.256,3.564),(4.944,6.744));
( (0.539993,-0.179993),(6.72,5.448));
( (0.119995,-0.179993),(6.888,7.51199));
( (0.600006,-0.179993),(6.468,5.508));
( (0.360001,-0.179993),(6.972,7.51199));
( (0.600006,-0.179993),(6.636,5.448));
( (1.116,0.0),(6.444,7.51199));
( (0.479996,-1.752),(6.84,5.448));
( (0.179993,0.0),(6.98399,7.51199));
( (1.04401,0.0),(6.15601,7.776));
( (0.876007,-1.752),(5.16,7.776));
( (0.360001,0.0),(6.89999,7.51199));
( (1.04401,0.0),(6.15601,7.51199));
( (-0.143997,0.0),(7.392,5.448));
( (0.335999,0.0),(6.98399,5.448));
( (0.479996,-0.179993),(6.72,5.448));
( (0.108002,-1.70399),(6.72,5.448));
( (0.360001,-1.70399),(6.972,5.448));
( (0.684006,0.0),(6.84,5.448));
( (0.912003,-0.203995),(6.3,5.508));
( (0.684006,-0.179993),(6.26401,6.744));
( (0.108002,-0.179993),(6.70799,5.26801));
( (0.108002,0.0),(7.092,5.26801));
( (-0.095993,0.0),(7.29601,5.26801));
( (0.192001,0.0),(7.008,5.26801));
( (0.0720062,-1.70399),(7.092,5.26801));
( (1.092,0.0),(6.12,5.26801));
( (2.03999,-1.224),(5.448,7.392));
( (3.06,-0.923996),(4.14,7.51199));
( (1.752,-1.224),(5.16,7.392));
( (0.972,1.48801),(6.24001,3.68401));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0119934,0.0),(7.188,8.93201));
( (0.0119934,0.0),(7.188,8.93201));
( (0.0119934,0.0),(7.188,8.884));
( (0.0119934,0.0),(7.188,8.632));
( (0.0119934,0.0),(7.188,8.5));
( (0.0119934,0.0),(7.188,9.136));
( (0.384003,-2.472),(6.64799,6.96001));
( (0.479996,0.0),(6.53999,8.93201));
( (0.479996,0.0),(6.53999,8.93201));
( (0.479996,0.0),(6.53999,8.884));
( (0.479996,0.0),(6.53999,8.5));
( (1.04401,0.0),(6.15601,8.93201));
( (1.04401,0.0),(6.15601,8.93201));
( (1.04401,0.0),(6.15601,8.884));
( (1.04401,0.0),(6.15601,8.5));
( (0.479996,0.0),(7.008,6.744));
( (0.216003,-0.143997),(7.2,8.632));
( (0.384003,-0.216003),(6.81599,8.93201));
( (0.384003,-0.216003),(6.81599,8.93201));
( (0.384003,-0.216003),(6.81599,8.884));
( (0.384003,-0.216003),(6.81599,8.632));
( (0.384003,-0.216003),(6.81599,8.5));
( (0.167999,-0.216003),(7.032,8.93201));
( (0.167999,-0.216003),(7.032,8.93201));
( (0.167999,-0.216003),(7.032,8.884));
( (0.167999,-0.216003),(7.032,8.5));
( (0.264008,0.0),(6.948,8.93201));
( (0.695999,0.0),(6.564,6.744));
( (0.108002,-1.70399),(6.70799,5.26801));
( (0.972,0.0),(6.24001,5.26801));
( (0.972,-0.179993),(6.228,5.436));
( (0.0,-0.216003),(7.2,6.96001));
( (2.54401,-1.752),(4.65601,5.388));
( (0.912003,-0.587997),(6.09599,7.368));
( (0.983994,-0.335999),(6.576,7.332));
( (0.360001,-0.720001),(6.85201,7.93201));
( (0.240005,0.0),(6.96001,7.092));
( (-0.240005,-1.57201),(6.744,7.392));
( (1.23599,-0.839996),(5.964,6.96001));
( (0.647995,0.587997),(6.552,6.20399));
( (2.84399,3.564),(4.356,6.744));
( (0.972,3.564),(6.228,6.744));
( (0.095993,0.839996),(6.636,5.35201));
( (1.692,0.839996),(5.508,5.35201));
( (1.692,0.839996),(5.508,5.35201));
( (0.264008,0.0),(6.996,7.51199));
( (0.264008,0.0),(6.996,7.51199));
( (0.0,-0.216003),(7.2,6.96001));
( (0.899994,2.556),(6.3,3.636));
( (1.392,-0.839996),(5.808,6.96001));
( (1.392,-0.839996),(5.808,6.96001));
( (2.472,1.632),(4.728,3.62399));
( (3.06,-0.923996),(4.14,7.51199));
( (0.192001,-0.839996),(6.79201,6.96001));
( (1.8,1.70399),(5.39999,5.03999));
( (2.256,-1.67999),(4.944,1.5));
( (0.972,-1.67999),(6.228,1.5));
( (0.972,3.564),(6.228,6.744));
( (0.563995,0.839996),(7.104,5.35201));
( (0.432007,-0.179993),(6.76801,1.15199));
( (-0.479996,-0.179993),(9.43201,7.392));
( (0.972,0.707993),(6.228,4.188));
( (1.308,-1.752),(5.90401,5.388));
( (1.896,2.75999),(5.304,7.392));
( (1.584,6.09599),(4.74001,7.93201));
( (2.46001,6.09599),(5.616,7.93201));
( (1.23599,5.79601),(5.964,7.884));
( (1.06799,5.916),(6.144,7.632));
( (1.056,6.06),(6.144,7.02));
( (0.996002,5.616),(6.20399,7.57201));
( (2.88,6.06),(4.32001,7.5));
( (1.776,6.06),(5.424,7.5));
( (1.836,2.75999),(5.112,7.392));
( (2.37601,5.772),(4.82401,8.136));
( (2.46001,-2.472),(4.644,0.0));
( (1.776,2.664),(5.076,7.392));
( (1.14,6.09599),(6.936,7.93201));
( (2.028,-2.388),(4.40401,0.0));
( (1.23599,5.916),(5.964,8.004));
( (0.0,2.556),(7.2,3.636));
( (0.972,0.0),(6.228,5.532));
( (-0.240005,-0.720001),(8.304,7.93201));
( (-0.240005,-0.720001),(8.10001,7.93201));
( (-0.240005,-0.720001),(8.10001,7.93201));
( (0.539993,-0.179993),(6.72,7.93201));
( (0.539993,-0.179993),(6.72,7.93201));
( (0.539993,-0.179993),(6.72,7.884));
( (0.539993,-0.179993),(6.72,7.632));
( (0.539993,-0.179993),(6.72,7.5));
( (0.539993,-0.179993),(6.72,8.136));
( (0.600006,-2.472),(6.468,5.508));
( (0.600006,-0.179993),(6.636,7.93201));
( (0.600006,-0.179993),(6.636,7.93201));
( (0.600006,-0.179993),(6.636,7.884));
( (0.600006,-0.179993),(6.636,7.5));
( (1.04401,0.0),(6.15601,7.93201));
( (-0.348007,0.0),(6.744,6.744));
( (1.04401,0.0),(6.15601,7.93201));
( (1.76401,2.35201),(5.436,6.96001));
( (1.04401,0.0),(6.15601,7.884));
( (1.04401,0.0),(6.15601,7.5));
( (0.815994,-0.324005),(6.396,7.51199));
( (0.335999,0.0),(6.98399,7.632));
( (0.587997,0.0),(6.81599,6.744));
( (0.384003,-0.264008),(6.81599,7.008));
( (0.0,0.0),(6.84,6.744));
( (1.76401,2.35201),(5.436,6.96001));
( (0.479996,-0.179993),(6.72,7.93201));
( (0.479996,-0.179993),(6.72,7.93201));
( (0.479996,-0.179993),(6.72,7.884));
( (0.479996,-0.179993),(6.72,7.632));
( (0.479996,-0.179993),(6.72,7.5));
( (0.0720062,-0.179993),(7.092,5.448));
( (0.108002,-0.179993),(6.70799,7.93201));
( (0.108002,-0.179993),(6.70799,7.93201));
( (0.108002,-0.179993),(6.70799,7.884));
( (1.04401,0.0),(6.15601,5.26801));
( (0.108002,-0.179993),(6.70799,7.5));
( (0.0720062,-1.70399),(7.092,7.93201));
( (1.04401,0.0),(6.15601,7.51199));
( (0.479996,-0.287994),(6.72,5.556));
( (-0.095993,-0.179993),(7.21201,5.448));
( (0.384003,-0.179993),(7.032,7.51199));
( (-0.0480042,-1.70399),(6.72,7.51199));
( (0.0720062,-1.70399),(7.092,7.5));
( (0.0,0.0),(0.0,0.0)) ]};;
(* End Font Description*)
try remove_font fn; add_font fn
with Failure ("remove_font : font unknown") -> add_font fn;;
| null | https://raw.githubusercontent.com/mauny/the-functional-approach-to-programming/1ec8bed5d33d3a67bbd67d09afb3f5c3c8978838/cl-75/MLGRAPH.DIR/xCour-B.ml | ocaml |
Projet Formel
***********************************************************************
LIENS
***********************************************************************
Begin Font Description
End Font Description | CAML - light :
45 rue d'Ulm
75005 PARIS
France
#open "MLgraph";;
let fn =
Courier_Bold,{font_descr_filename="preloaded"; font_descr_name="Courier_Bold";
font_descr_width=7.2;
font_descr_height=12.0;
font_descr_descr= vect_of_list [
]; font_descr_descr_bbox= vect_of_list [
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (2.54401,-0.179993),(4.65601,6.864));
( (1.74001,3.564),(5.46001,6.744));
( (0.792007,-0.539993),(6.408,7.812));
( (1.104,-1.51199),(6.108,7.992));
( (0.0599976,-0.179993),(7.14,7.392));
( (0.552002,-0.179993),(6.43201,6.51601));
( (2.256,3.564),(4.944,6.744));
( (2.748,-1.224),(5.412,7.392));
( (1.78799,-1.224),(4.452,7.392));
( (1.092,2.62801),(6.108,7.21201));
( (0.972,0.0),(6.228,5.26801));
( (1.67999,-1.21201),(4.48801,1.968));
( (1.32001,2.556),(5.88,3.636));
( (2.472,-0.179993),(4.728,1.812));
( (1.25999,-0.923996),(5.952,7.51199));
( (1.164,-0.179993),(6.036,7.392));
( (1.092,0.0),(6.34801,7.392));
( (0.852005,0.0),(5.868,7.392));
( (0.876007,-0.179993),(5.892,7.392));
( (0.755997,0.0),(5.964,7.392));
( (0.960007,-0.179993),(6.132,7.21201));
( (1.2,-0.179993),(6.132,7.392));
( (0.779999,0.0),(5.808,7.21201));
( (1.116,-0.179993),(6.084,7.392));
( (1.06799,-0.179993),(6.0,7.392));
( (2.472,-0.179993),(4.728,4.86));
( (1.67999,-1.21201),(4.728,4.86));
( (0.792007,-0.179993),(6.036,5.436));
( (0.972,1.06799),(6.228,4.188));
( (1.164,-0.179993),(6.408,5.436));
( (1.29601,-0.179993),(5.892,6.96001));
( (0.311996,-0.179993),(6.888,7.392));
( (0.0119934,0.0),(7.188,6.744));
( (0.479996,0.0),(6.756,6.744));
( (0.384003,-0.216003),(6.64799,6.96001));
( (0.479996,0.0),(7.008,6.744));
( (0.479996,0.0),(6.53999,6.744));
( (0.647995,0.0),(6.66,6.744));
( (0.384003,-0.216003),(7.008,6.96001));
( (0.360001,0.0),(6.84,6.744));
( (1.04401,0.0),(6.15601,6.744));
( (0.563995,-0.216003),(7.092,6.744));
( (0.432007,0.0),(7.008,6.744));
( (0.587997,0.0),(6.81599,6.744));
( (0.0359955,0.0),(7.164,6.744));
( (0.216003,-0.143997),(7.2,6.744));
( (0.384003,-0.216003),(6.81599,6.96001));
( (0.695999,0.0),(6.588,6.744));
( (0.384003,-1.476),(6.81599,6.96001));
( (0.408005,0.0),(7.06799,6.744));
( (0.684006,-0.264008),(6.51601,6.98399));
( (0.371994,0.0),(6.828,6.744));
( (0.167999,-0.216003),(7.032,6.744));
( (-0.0359955,0.0),(7.23601,6.744));
( (-0.095993,0.0),(7.29601,6.744));
( (0.264008,0.0),(6.936,6.744));
( (0.264008,0.0),(6.948,6.744));
( (0.863998,0.0),(6.34801,6.744));
( (3.06,-1.224),(5.58,7.392));
( (1.25999,-0.923996),(5.952,7.51199));
( (1.62,-1.224),(4.14,7.392));
( (1.29601,2.98801),(5.90401,7.392));
( (0.0,-1.5),(7.2,-0.899994));
( (2.256,3.564),(4.944,6.744));
( (0.539993,-0.179993),(6.72,5.448));
( (0.119995,-0.179993),(6.888,7.51199));
( (0.600006,-0.179993),(6.468,5.508));
( (0.360001,-0.179993),(6.972,7.51199));
( (0.600006,-0.179993),(6.636,5.448));
( (1.116,0.0),(6.444,7.51199));
( (0.479996,-1.752),(6.84,5.448));
( (0.179993,0.0),(6.98399,7.51199));
( (1.04401,0.0),(6.15601,7.776));
( (0.876007,-1.752),(5.16,7.776));
( (0.360001,0.0),(6.89999,7.51199));
( (1.04401,0.0),(6.15601,7.51199));
( (-0.143997,0.0),(7.392,5.448));
( (0.335999,0.0),(6.98399,5.448));
( (0.479996,-0.179993),(6.72,5.448));
( (0.108002,-1.70399),(6.72,5.448));
( (0.360001,-1.70399),(6.972,5.448));
( (0.684006,0.0),(6.84,5.448));
( (0.912003,-0.203995),(6.3,5.508));
( (0.684006,-0.179993),(6.26401,6.744));
( (0.108002,-0.179993),(6.70799,5.26801));
( (0.108002,0.0),(7.092,5.26801));
( (-0.095993,0.0),(7.29601,5.26801));
( (0.192001,0.0),(7.008,5.26801));
( (0.0720062,-1.70399),(7.092,5.26801));
( (1.092,0.0),(6.12,5.26801));
( (2.03999,-1.224),(5.448,7.392));
( (3.06,-0.923996),(4.14,7.51199));
( (1.752,-1.224),(5.16,7.392));
( (0.972,1.48801),(6.24001,3.68401));
( (0.0,0.0),(0.0,0.0));
( (0.0,0.0),(0.0,0.0));
( (0.0119934,0.0),(7.188,8.93201));
( (0.0119934,0.0),(7.188,8.93201));
( (0.0119934,0.0),(7.188,8.884));
( (0.0119934,0.0),(7.188,8.632));
( (0.0119934,0.0),(7.188,8.5));
( (0.0119934,0.0),(7.188,9.136));
( (0.384003,-2.472),(6.64799,6.96001));
( (0.479996,0.0),(6.53999,8.93201));
( (0.479996,0.0),(6.53999,8.93201));
( (0.479996,0.0),(6.53999,8.884));
( (0.479996,0.0),(6.53999,8.5));
( (1.04401,0.0),(6.15601,8.93201));
( (1.04401,0.0),(6.15601,8.93201));
( (1.04401,0.0),(6.15601,8.884));
( (1.04401,0.0),(6.15601,8.5));
( (0.479996,0.0),(7.008,6.744));
( (0.216003,-0.143997),(7.2,8.632));
( (0.384003,-0.216003),(6.81599,8.93201));
( (0.384003,-0.216003),(6.81599,8.93201));
( (0.384003,-0.216003),(6.81599,8.884));
( (0.384003,-0.216003),(6.81599,8.632));
( (0.384003,-0.216003),(6.81599,8.5));
( (0.167999,-0.216003),(7.032,8.93201));
( (0.167999,-0.216003),(7.032,8.93201));
( (0.167999,-0.216003),(7.032,8.884));
( (0.167999,-0.216003),(7.032,8.5));
( (0.264008,0.0),(6.948,8.93201));
( (0.695999,0.0),(6.564,6.744));
( (0.108002,-1.70399),(6.70799,5.26801));
( (0.972,0.0),(6.24001,5.26801));
( (0.972,-0.179993),(6.228,5.436));
( (0.0,-0.216003),(7.2,6.96001));
( (2.54401,-1.752),(4.65601,5.388));
( (0.912003,-0.587997),(6.09599,7.368));
( (0.983994,-0.335999),(6.576,7.332));
( (0.360001,-0.720001),(6.85201,7.93201));
( (0.240005,0.0),(6.96001,7.092));
( (-0.240005,-1.57201),(6.744,7.392));
( (1.23599,-0.839996),(5.964,6.96001));
( (0.647995,0.587997),(6.552,6.20399));
( (2.84399,3.564),(4.356,6.744));
( (0.972,3.564),(6.228,6.744));
( (0.095993,0.839996),(6.636,5.35201));
( (1.692,0.839996),(5.508,5.35201));
( (1.692,0.839996),(5.508,5.35201));
( (0.264008,0.0),(6.996,7.51199));
( (0.264008,0.0),(6.996,7.51199));
( (0.0,-0.216003),(7.2,6.96001));
( (0.899994,2.556),(6.3,3.636));
( (1.392,-0.839996),(5.808,6.96001));
( (1.392,-0.839996),(5.808,6.96001));
( (2.472,1.632),(4.728,3.62399));
( (3.06,-0.923996),(4.14,7.51199));
( (0.192001,-0.839996),(6.79201,6.96001));
( (1.8,1.70399),(5.39999,5.03999));
( (2.256,-1.67999),(4.944,1.5));
( (0.972,-1.67999),(6.228,1.5));
( (0.972,3.564),(6.228,6.744));
( (0.563995,0.839996),(7.104,5.35201));
( (0.432007,-0.179993),(6.76801,1.15199));
( (-0.479996,-0.179993),(9.43201,7.392));
( (0.972,0.707993),(6.228,4.188));
( (1.308,-1.752),(5.90401,5.388));
( (1.896,2.75999),(5.304,7.392));
( (1.584,6.09599),(4.74001,7.93201));
( (2.46001,6.09599),(5.616,7.93201));
( (1.23599,5.79601),(5.964,7.884));
( (1.06799,5.916),(6.144,7.632));
( (1.056,6.06),(6.144,7.02));
( (0.996002,5.616),(6.20399,7.57201));
( (2.88,6.06),(4.32001,7.5));
( (1.776,6.06),(5.424,7.5));
( (1.836,2.75999),(5.112,7.392));
( (2.37601,5.772),(4.82401,8.136));
( (2.46001,-2.472),(4.644,0.0));
( (1.776,2.664),(5.076,7.392));
( (1.14,6.09599),(6.936,7.93201));
( (2.028,-2.388),(4.40401,0.0));
( (1.23599,5.916),(5.964,8.004));
( (0.0,2.556),(7.2,3.636));
( (0.972,0.0),(6.228,5.532));
( (-0.240005,-0.720001),(8.304,7.93201));
( (-0.240005,-0.720001),(8.10001,7.93201));
( (-0.240005,-0.720001),(8.10001,7.93201));
( (0.539993,-0.179993),(6.72,7.93201));
( (0.539993,-0.179993),(6.72,7.93201));
( (0.539993,-0.179993),(6.72,7.884));
( (0.539993,-0.179993),(6.72,7.632));
( (0.539993,-0.179993),(6.72,7.5));
( (0.539993,-0.179993),(6.72,8.136));
( (0.600006,-2.472),(6.468,5.508));
( (0.600006,-0.179993),(6.636,7.93201));
( (0.600006,-0.179993),(6.636,7.93201));
( (0.600006,-0.179993),(6.636,7.884));
( (0.600006,-0.179993),(6.636,7.5));
( (1.04401,0.0),(6.15601,7.93201));
( (-0.348007,0.0),(6.744,6.744));
( (1.04401,0.0),(6.15601,7.93201));
( (1.76401,2.35201),(5.436,6.96001));
( (1.04401,0.0),(6.15601,7.884));
( (1.04401,0.0),(6.15601,7.5));
( (0.815994,-0.324005),(6.396,7.51199));
( (0.335999,0.0),(6.98399,7.632));
( (0.587997,0.0),(6.81599,6.744));
( (0.384003,-0.264008),(6.81599,7.008));
( (0.0,0.0),(6.84,6.744));
( (1.76401,2.35201),(5.436,6.96001));
( (0.479996,-0.179993),(6.72,7.93201));
( (0.479996,-0.179993),(6.72,7.93201));
( (0.479996,-0.179993),(6.72,7.884));
( (0.479996,-0.179993),(6.72,7.632));
( (0.479996,-0.179993),(6.72,7.5));
( (0.0720062,-0.179993),(7.092,5.448));
( (0.108002,-0.179993),(6.70799,7.93201));
( (0.108002,-0.179993),(6.70799,7.93201));
( (0.108002,-0.179993),(6.70799,7.884));
( (1.04401,0.0),(6.15601,5.26801));
( (0.108002,-0.179993),(6.70799,7.5));
( (0.0720062,-1.70399),(7.092,7.93201));
( (1.04401,0.0),(6.15601,7.51199));
( (0.479996,-0.287994),(6.72,5.556));
( (-0.095993,-0.179993),(7.21201,5.448));
( (0.384003,-0.179993),(7.032,7.51199));
( (-0.0480042,-1.70399),(6.72,7.51199));
( (0.0720062,-1.70399),(7.092,7.5));
( (0.0,0.0),(0.0,0.0)) ]};;
try remove_font fn; add_font fn
with Failure ("remove_font : font unknown") -> add_font fn;;
|
7caa441c936d0cdaed72f4b8fd3fdba3697bfb0010080eb2ac225616801b82b2 | smuenzel/opile | sexpify.ml | open! Core
let mknoloc = Location.mknoloc
let loc = Location.none
open Ppxlib
module Longident = struct
module T = struct
type t = longident =
| Lident of string
| Ldot of t * string
| Lapply of t * t
[@@deriving sexp]
include (Ppxlib.Longident : module type of struct include Ppxlib.Longident end with type t := t)
end
include T
include Comparable.Make(T)
let rec of_list_rev = function
| [] -> assert false
| [ x ] -> Lident x
| x :: xs -> Ldot (of_list_rev xs, x)
let of_list l = of_list_rev (List.rev l)
let dot (a : t) (b : t) =
(flatten_exn a) @ (flatten_exn b)
|> of_list
end
let cleanup =
let replace =
Longident.Map.of_alist_exn
[ Longident.parse "Digest.t", Longident.parse "Caml_digest.t"
; Longident.parse "Type_immediacy.t", Longident.parse "Caml_type_immediacy.t"
; Longident.parse "Type_immediacy.Violation.t", Longident.parse "Caml_type_immediacy.Violation.t"
]
in
let filter_attr attrs =
List.filter attrs
~f:(fun attr ->
match attr.attr_name.txt with
| "ocaml.doc" -> false
| _ -> true
)
in
object
inherit Ast_traverse.map as super
method! longident lident =
Map.find replace lident
|> Option.value ~default:lident
method! attributes a =
let result = super#attributes a in
filter_attr result
end
type t =
{ types : core_type list String.Map.t
; modules : Longident.Set.t
; has_values : bool
}
let empty =
{ types = String.Map.empty
; modules = Longident.Set.empty
; has_values = false
}
let typename : type_declaration -> _ = function
| { ptype_name = { txt; _}; ptype_params; _} ->
let params = List.map ptype_params ~f:fst in
txt, params
let rec map_last ~f = function
| [] -> []
| [ x ] -> [ f x ]
| x :: xs -> x :: (map_last ~f xs)
let create_ocamlcommon_ident =
let global_modules = Longident.Set.of_list [ ] in
fun ~module_name ~typename ->
if Set.mem global_modules module_name
then
mknoloc
(Longident.Ldot (module_name, typename))
else
mknoloc
(Longident.Ldot ((Longident.dot (Lident "Compiler_without_sexp") module_name), typename))
let add_manifest ~module_name (td : type_declaration) =
match td.ptype_kind, td.ptype_manifest with
| (Ptype_variant _ | Ptype_record _), _ ->
let params = List.map td.ptype_params ~f:fst in
let ptype_manifest =
Ast_helper.Typ.constr
(create_ocamlcommon_ident ~module_name ~typename:td.ptype_name.txt)
params
|> Some
in
{ td with
ptype_manifest
; ptype_attributes = []
}
| Ptype_abstract, None ->
let params = List.map td.ptype_params ~f:fst in
let ptype_manifest =
Ast_helper.Typ.constr
~attrs:[Ast_helper.Attr.mk (mknoloc "sexp.opaque") (PStr [])]
(create_ocamlcommon_ident ~module_name ~typename:td.ptype_name.txt)
params
|> Some
in
{ td with
ptype_manifest
; ptype_attributes = []
}
| Ptype_abstract, _ ->
{ td with
ptype_attributes = []
}
| Ptype_open, _ ->
{ td with
ptype_attributes = []
}
let add_deriving_sexp (td : type_declaration) =
let attribute =
Ast_helper.Attr.mk (mknoloc "deriving") (PStr [%str sexp_of]);
in
{ td with ptype_attributes = [ attribute ] }
let typesubst ~module_name name params : with_constraint =
let manifest =
Ast_helper.Typ.constr
(create_ocamlcommon_ident ~module_name ~typename:name)
params
in
Pwith_typesubst
(mknoloc (Lident name), Ast_helper.Type.mk
~params:(List.map params ~f:(fun p -> p, Invariant))
~manifest (mknoloc "x"))
let modsubst ~module_name:_ subst_module_name : with_constraint =
Pwith_modsubst
( (mknoloc (Lident (Longident.last_exn subst_module_name)))
, (mknoloc subst_module_name)
)
let mk_inc ~module_name types modules =
let open Ast_helper in
let modname = mknoloc (Longident.dot (Lident "Compiler_without_sexp") module_name) in
Str.include_
(Incl.mk
(Mod.constraint_
(Mod.ident modname)
(Mty.with_
(Mty.typeof_
(Mod.structure [ Str.include_ (Incl.mk (Mod.ident modname)) ]))
((Map.fold ~init:[] ~f:(fun ~key ~data acc -> typesubst ~module_name key data :: acc) types |> List.rev)
@ (List.map ~f:(modsubst ~module_name) (Set.to_list modules))
)
)
)
)
let rec convert_sig ~module_name signature : structure_item list =
let {types; modules; has_values} , as_struct =
List.fold_map signature ~init:empty ~f:(traverse_sig ~module_name)
in
let as_struct =
List.concat as_struct
|> cleanup#structure
in
if not has_values
then as_struct
else begin
as_struct
@ [ mk_inc ~module_name types modules ]
end
and traverse_sig ~module_name ({ types; modules; has_values = _ } as acc) sig_element =
let { psig_desc; _ } = sig_element in
match psig_desc with
| Psig_type (rec_flag,sig_types) ->
let typenames = List.map sig_types ~f:typename in
let types = List.fold typenames ~init:types ~f:(fun map (key, data) -> Map.add_exn map ~key ~data) in
let str_types =
List.map sig_types ~f:(add_manifest ~module_name)
|> map_last ~f:add_deriving_sexp
|> Ast_helper.Str.type_ rec_flag
|> List.return
in
{ acc with types}, str_types
| Psig_module {pmd_name = { txt = pmd_name; _}; pmd_type = { pmty_desc = Pmty_alias alias; _} ; _ }->
let converted =
let open Ast_helper in
Str.module_ (Mb.mk (mknoloc pmd_name) (Mod.ident alias))
in
let modules =
let module_name = Longident.Ldot (module_name, pmd_name) in
let modname = Longident.dot (Lident "Compiler_without_sexp") module_name in
Set.add modules modname
in
{ acc with modules }, [ converted ]
| Psig_module {pmd_name = { txt = pmd_name; _}; pmd_type = { pmty_desc = Pmty_signature msig ; _} ; _ }->
let module_name = Longident.Ldot (module_name, pmd_name) in
let converted =
convert_sig ~module_name msig
in
let converted =
let open Ast_helper in
Str.module_ (Mb.mk (mknoloc pmd_name) (Mod.structure converted))
in
let modules =
let modname = Longident.dot (Lident "Compiler_without_sexp") module_name in
Set.add modules modname
in
{ acc with modules}, [ converted ]
| Psig_module psm ->
module_special_case ~module_name ~acc psm
| Psig_open { popen_expr = { loc = _; txt = Lident mod_ }
; popen_override = _
; popen_loc = _
; popen_attributes = _
} ->
let result =
let open Ast_helper in
Str.open_ (Opn.mk ~override:Override (Mod.ident (mknoloc (Lident mod_))))
in
acc, [ result ]
| Psig_open _
|Psig_typesubst _|Psig_typext _|Psig_exception _
|Psig_modsubst _|Psig_recmodule _|Psig_modtype _
|Psig_include _|Psig_class _|Psig_class_type _|Psig_attribute _
|Psig_extension (_, _) ->
acc, []
| Psig_value _ ->
{ acc with has_values = true }, []
and module_special_case ~module_name ~acc psm =
let p =
let open Ast_pattern in
pmty_with
(pmty_ident __)
(pwith_type
(lident __)
(type_declaration
~name:__
~params:nil
~cstrs:nil
~kind:ptype_abstract
~private_:public
~manifest:(some __)
)
^:: nil)
in
match psm with
| {pmd_name = { txt = pmd_name; _}; pmd_type; pmd_loc; _ }->
Ast_pattern.parse p pmd_loc pmd_type
~on_error:(fun () ->
(acc, []))
(fun left right _ keytype ->
match left, right with
| Ldot (Lident ("Map"), "S"), _ ->
let open Ast_helper in
let modname =
Longident.dot (Lident "Compiler_without_sexp") (Ldot (module_name, pmd_name))
in
let expr =
Str.module_
(Mb.mk
(mknoloc pmd_name)
(Mod.structure
([ Str.include_ (Ast_helper.Incl.mk (Mod.ident (mknoloc modname)))
]
@ [%str
let sexp_of_t sexp_of_a t =
fold (fun key data acc ->
(key,data) :: acc) t []
|> [%sexp_of: ([%t keytype] * a) list]
]
)
)
)
in
let modules = Set.add acc.modules modname in
{ acc with modules}
, [ expr ]
| Ldot (Lident ("Set"), "S"), _ ->
let open Ast_helper in
let modname =
Longident.dot (Lident "Compiler_without_sexp") (Ldot (module_name, pmd_name))
in
let expr =
Str.module_
(Mb.mk
(mknoloc pmd_name)
(Mod.structure
([ Str.include_ (Ast_helper.Incl.mk (Mod.ident (mknoloc modname)))
]
@ [%str
let sexp_of_t t =
fold (fun key acc ->
key :: acc) t []
|> [%sexp_of: [%t keytype] list]
]
)
)
)
in
let modules = Set.add acc.modules modname in
{ acc with modules}
, [ expr ]
| _ ->
acc, [])
let sexpify_file filename =
let module_name =
(String.chop_suffix_exn ~suffix:".mli" (Filename.basename filename)
|> String.capitalize
|> Longident.Lident
)
in
let interface =
In_channel.with_file filename
~f:(fun channel ->
let lexbuf = Lexing.from_channel channel in
Parse.interface lexbuf
)
in
let result =
[%str open! Core ]
@ (convert_sig ~module_name interface)
in
Format.fprintf
Format.str_formatter "(* This file is generated from %s using sexpify *)\n" filename;
result
|> Pprintast.structure Format.str_formatter;
Format.flush_str_formatter ()
let ocamlformat string =
let process = Unix.create_process ~prog:"ocamlformat" ~args:[ "-p"; "janestreet"; "--impl"; "-" ] in
let c = Unix.out_channel_of_descr process.stdin in
Out_channel.output_string c string;
Out_channel.close c;
In_channel.input_all (Unix.in_channel_of_descr process.stdout)
let command : Command.t =
let open Command.Let_syntax in
Command.basic
~summary:""
[%map_open
let filename = anon ("FILENAME" %: string)
in
fun () ->
sexpify_file filename
|> ocamlformat
|> print_endline
]
let () =
Command.run command
| null | https://raw.githubusercontent.com/smuenzel/opile/190ca86df6440550d0fddf0222e16ba2e52bca64/sexpify/sexpify.ml | ocaml | open! Core
let mknoloc = Location.mknoloc
let loc = Location.none
open Ppxlib
module Longident = struct
module T = struct
type t = longident =
| Lident of string
| Ldot of t * string
| Lapply of t * t
[@@deriving sexp]
include (Ppxlib.Longident : module type of struct include Ppxlib.Longident end with type t := t)
end
include T
include Comparable.Make(T)
let rec of_list_rev = function
| [] -> assert false
| [ x ] -> Lident x
| x :: xs -> Ldot (of_list_rev xs, x)
let of_list l = of_list_rev (List.rev l)
let dot (a : t) (b : t) =
(flatten_exn a) @ (flatten_exn b)
|> of_list
end
let cleanup =
let replace =
Longident.Map.of_alist_exn
[ Longident.parse "Digest.t", Longident.parse "Caml_digest.t"
; Longident.parse "Type_immediacy.t", Longident.parse "Caml_type_immediacy.t"
; Longident.parse "Type_immediacy.Violation.t", Longident.parse "Caml_type_immediacy.Violation.t"
]
in
let filter_attr attrs =
List.filter attrs
~f:(fun attr ->
match attr.attr_name.txt with
| "ocaml.doc" -> false
| _ -> true
)
in
object
inherit Ast_traverse.map as super
method! longident lident =
Map.find replace lident
|> Option.value ~default:lident
method! attributes a =
let result = super#attributes a in
filter_attr result
end
type t =
{ types : core_type list String.Map.t
; modules : Longident.Set.t
; has_values : bool
}
let empty =
{ types = String.Map.empty
; modules = Longident.Set.empty
; has_values = false
}
let typename : type_declaration -> _ = function
| { ptype_name = { txt; _}; ptype_params; _} ->
let params = List.map ptype_params ~f:fst in
txt, params
let rec map_last ~f = function
| [] -> []
| [ x ] -> [ f x ]
| x :: xs -> x :: (map_last ~f xs)
let create_ocamlcommon_ident =
let global_modules = Longident.Set.of_list [ ] in
fun ~module_name ~typename ->
if Set.mem global_modules module_name
then
mknoloc
(Longident.Ldot (module_name, typename))
else
mknoloc
(Longident.Ldot ((Longident.dot (Lident "Compiler_without_sexp") module_name), typename))
let add_manifest ~module_name (td : type_declaration) =
match td.ptype_kind, td.ptype_manifest with
| (Ptype_variant _ | Ptype_record _), _ ->
let params = List.map td.ptype_params ~f:fst in
let ptype_manifest =
Ast_helper.Typ.constr
(create_ocamlcommon_ident ~module_name ~typename:td.ptype_name.txt)
params
|> Some
in
{ td with
ptype_manifest
; ptype_attributes = []
}
| Ptype_abstract, None ->
let params = List.map td.ptype_params ~f:fst in
let ptype_manifest =
Ast_helper.Typ.constr
~attrs:[Ast_helper.Attr.mk (mknoloc "sexp.opaque") (PStr [])]
(create_ocamlcommon_ident ~module_name ~typename:td.ptype_name.txt)
params
|> Some
in
{ td with
ptype_manifest
; ptype_attributes = []
}
| Ptype_abstract, _ ->
{ td with
ptype_attributes = []
}
| Ptype_open, _ ->
{ td with
ptype_attributes = []
}
let add_deriving_sexp (td : type_declaration) =
let attribute =
Ast_helper.Attr.mk (mknoloc "deriving") (PStr [%str sexp_of]);
in
{ td with ptype_attributes = [ attribute ] }
let typesubst ~module_name name params : with_constraint =
let manifest =
Ast_helper.Typ.constr
(create_ocamlcommon_ident ~module_name ~typename:name)
params
in
Pwith_typesubst
(mknoloc (Lident name), Ast_helper.Type.mk
~params:(List.map params ~f:(fun p -> p, Invariant))
~manifest (mknoloc "x"))
let modsubst ~module_name:_ subst_module_name : with_constraint =
Pwith_modsubst
( (mknoloc (Lident (Longident.last_exn subst_module_name)))
, (mknoloc subst_module_name)
)
let mk_inc ~module_name types modules =
let open Ast_helper in
let modname = mknoloc (Longident.dot (Lident "Compiler_without_sexp") module_name) in
Str.include_
(Incl.mk
(Mod.constraint_
(Mod.ident modname)
(Mty.with_
(Mty.typeof_
(Mod.structure [ Str.include_ (Incl.mk (Mod.ident modname)) ]))
((Map.fold ~init:[] ~f:(fun ~key ~data acc -> typesubst ~module_name key data :: acc) types |> List.rev)
@ (List.map ~f:(modsubst ~module_name) (Set.to_list modules))
)
)
)
)
let rec convert_sig ~module_name signature : structure_item list =
let {types; modules; has_values} , as_struct =
List.fold_map signature ~init:empty ~f:(traverse_sig ~module_name)
in
let as_struct =
List.concat as_struct
|> cleanup#structure
in
if not has_values
then as_struct
else begin
as_struct
@ [ mk_inc ~module_name types modules ]
end
and traverse_sig ~module_name ({ types; modules; has_values = _ } as acc) sig_element =
let { psig_desc; _ } = sig_element in
match psig_desc with
| Psig_type (rec_flag,sig_types) ->
let typenames = List.map sig_types ~f:typename in
let types = List.fold typenames ~init:types ~f:(fun map (key, data) -> Map.add_exn map ~key ~data) in
let str_types =
List.map sig_types ~f:(add_manifest ~module_name)
|> map_last ~f:add_deriving_sexp
|> Ast_helper.Str.type_ rec_flag
|> List.return
in
{ acc with types}, str_types
| Psig_module {pmd_name = { txt = pmd_name; _}; pmd_type = { pmty_desc = Pmty_alias alias; _} ; _ }->
let converted =
let open Ast_helper in
Str.module_ (Mb.mk (mknoloc pmd_name) (Mod.ident alias))
in
let modules =
let module_name = Longident.Ldot (module_name, pmd_name) in
let modname = Longident.dot (Lident "Compiler_without_sexp") module_name in
Set.add modules modname
in
{ acc with modules }, [ converted ]
| Psig_module {pmd_name = { txt = pmd_name; _}; pmd_type = { pmty_desc = Pmty_signature msig ; _} ; _ }->
let module_name = Longident.Ldot (module_name, pmd_name) in
let converted =
convert_sig ~module_name msig
in
let converted =
let open Ast_helper in
Str.module_ (Mb.mk (mknoloc pmd_name) (Mod.structure converted))
in
let modules =
let modname = Longident.dot (Lident "Compiler_without_sexp") module_name in
Set.add modules modname
in
{ acc with modules}, [ converted ]
| Psig_module psm ->
module_special_case ~module_name ~acc psm
| Psig_open { popen_expr = { loc = _; txt = Lident mod_ }
; popen_override = _
; popen_loc = _
; popen_attributes = _
} ->
let result =
let open Ast_helper in
Str.open_ (Opn.mk ~override:Override (Mod.ident (mknoloc (Lident mod_))))
in
acc, [ result ]
| Psig_open _
|Psig_typesubst _|Psig_typext _|Psig_exception _
|Psig_modsubst _|Psig_recmodule _|Psig_modtype _
|Psig_include _|Psig_class _|Psig_class_type _|Psig_attribute _
|Psig_extension (_, _) ->
acc, []
| Psig_value _ ->
{ acc with has_values = true }, []
and module_special_case ~module_name ~acc psm =
let p =
let open Ast_pattern in
pmty_with
(pmty_ident __)
(pwith_type
(lident __)
(type_declaration
~name:__
~params:nil
~cstrs:nil
~kind:ptype_abstract
~private_:public
~manifest:(some __)
)
^:: nil)
in
match psm with
| {pmd_name = { txt = pmd_name; _}; pmd_type; pmd_loc; _ }->
Ast_pattern.parse p pmd_loc pmd_type
~on_error:(fun () ->
(acc, []))
(fun left right _ keytype ->
match left, right with
| Ldot (Lident ("Map"), "S"), _ ->
let open Ast_helper in
let modname =
Longident.dot (Lident "Compiler_without_sexp") (Ldot (module_name, pmd_name))
in
let expr =
Str.module_
(Mb.mk
(mknoloc pmd_name)
(Mod.structure
([ Str.include_ (Ast_helper.Incl.mk (Mod.ident (mknoloc modname)))
]
@ [%str
let sexp_of_t sexp_of_a t =
fold (fun key data acc ->
(key,data) :: acc) t []
|> [%sexp_of: ([%t keytype] * a) list]
]
)
)
)
in
let modules = Set.add acc.modules modname in
{ acc with modules}
, [ expr ]
| Ldot (Lident ("Set"), "S"), _ ->
let open Ast_helper in
let modname =
Longident.dot (Lident "Compiler_without_sexp") (Ldot (module_name, pmd_name))
in
let expr =
Str.module_
(Mb.mk
(mknoloc pmd_name)
(Mod.structure
([ Str.include_ (Ast_helper.Incl.mk (Mod.ident (mknoloc modname)))
]
@ [%str
let sexp_of_t t =
fold (fun key acc ->
key :: acc) t []
|> [%sexp_of: [%t keytype] list]
]
)
)
)
in
let modules = Set.add acc.modules modname in
{ acc with modules}
, [ expr ]
| _ ->
acc, [])
let sexpify_file filename =
let module_name =
(String.chop_suffix_exn ~suffix:".mli" (Filename.basename filename)
|> String.capitalize
|> Longident.Lident
)
in
let interface =
In_channel.with_file filename
~f:(fun channel ->
let lexbuf = Lexing.from_channel channel in
Parse.interface lexbuf
)
in
let result =
[%str open! Core ]
@ (convert_sig ~module_name interface)
in
Format.fprintf
Format.str_formatter "(* This file is generated from %s using sexpify *)\n" filename;
result
|> Pprintast.structure Format.str_formatter;
Format.flush_str_formatter ()
let ocamlformat string =
let process = Unix.create_process ~prog:"ocamlformat" ~args:[ "-p"; "janestreet"; "--impl"; "-" ] in
let c = Unix.out_channel_of_descr process.stdin in
Out_channel.output_string c string;
Out_channel.close c;
In_channel.input_all (Unix.in_channel_of_descr process.stdout)
let command : Command.t =
let open Command.Let_syntax in
Command.basic
~summary:""
[%map_open
let filename = anon ("FILENAME" %: string)
in
fun () ->
sexpify_file filename
|> ocamlformat
|> print_endline
]
let () =
Command.run command
|
|
dc32822c8dd9d9c8889f1ad2a3b20cd19a3277d6fa1cda5c33181dc436da1a93 | mstksg/inCode | TagIndex.hs | {-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Blog.View.TagIndex where
import Blog.Types
import Blog.Util
import Blog.Util.Tag
import Blog.View
import Blog.View.Archive
import Control.Monad
import Data.String
import Text.Blaze.Html5 ((!))
import qualified Data.Text as T
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
data TagIndexInfo = TII
{ tiiType :: TagType
, tiiTags :: [(Tag, Maybe Entry)]
, tiiRecents :: [Entry]
}
viewTagIndex :: (?config :: Config) => TagIndexInfo -> H.Html
viewTagIndex TII{..} = do
H.div ! A.class_ "archive-sidebar unit one-of-four" $
viewArchiveSidebar tiiRecents (Just (AIndTagged tiiType))
H.section ! A.class_ "archive-section unit three-of-four" ! mainSection $ do
H.header ! A.class_ "tile" $
H.h1 $
case tiiType of
GeneralTag -> "Tags"
CategoryTag -> "Categories"
SeriesTag -> "Series"
H.ul ! A.class_ ulClass $
if null tiiTags
then
"No entries yet for this tag!"
else
mapM_ (uncurry (tagIndexLi tiiType)) tiiTags
where
ulClass = case tiiType of
GeneralTag -> "tag-index tile tag-list"
CategoryTag -> "category-index"
SeriesTag -> "series-index tile"
tagIndexLi
:: (?config :: Config)
=> TagType
-> Tag
-> Maybe Entry
-> H.Html
tagIndexLi tt t@Tag{..} recent =
H.li ! A.class_ liClass $
case tt of
GeneralTag ->
H.a ! A.href (H.textValue $ renderUrl (T.pack (tagUrl t)))
! A.class_ "tag-a-tag"
$ do
H.toHtml (tagPrettyLabel t)
H.preEscapedToHtml (" " :: T.Text)
"(" :: H.Html
H.toHtml (show (length tagEntries))
")" :: H.Html
_ -> do
H.header $ do
H.h2 $
H.a ! A.href (H.textValue $ renderUrl (T.pack (tagUrl t)))
$ H.toHtml (tagPrettyLabel t)
H.div ! A.class_ "tag-entry-count" $
H.toHtml $
case tt of
CategoryTag -> "> " ++ show (length tagEntries) ++ " entries"
SeriesTag -> "(" ++ show (length tagEntries) ++ " entries)"
_ -> ""
H.div ! A.class_ "tag-description" $
sequence_ (htmlDescription t)
H.footer $
forM_ recent $ \Entry{..} -> do
H.div $ do
H.span ! A.class_ "recent-link" $ do
H.preEscapedToHtml ("Most recent — " :: T.Text)
H.a ! A.href (fromString $ renderUrl' entryCanonical) $
H.toHtml entryTitle
forM_ entryPostTime $ \posted ->
H.span ! A.class_ "recent-time" $ do
"(" :: H.Html
H.time
! A.datetime (H.textValue $ T.pack $ renderDatetimeTime posted)
! A.pubdate ""
! A.class_ "pubdate"
$ H.toHtml (renderShortFriendlyTime posted)
")" :: H.Html
where
liClass = case tt of
CategoryTag -> "tile"
_ -> ""
| null | https://raw.githubusercontent.com/mstksg/inCode/e1f80a3dfd83eaa2b817dc922fd7f331cd1ece8a/src/Blog/View/TagIndex.hs | haskell | # LANGUAGE ImplicitParams #
# LANGUAGE OverloadedStrings # | # LANGUAGE RecordWildCards #
module Blog.View.TagIndex where
import Blog.Types
import Blog.Util
import Blog.Util.Tag
import Blog.View
import Blog.View.Archive
import Control.Monad
import Data.String
import Text.Blaze.Html5 ((!))
import qualified Data.Text as T
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
data TagIndexInfo = TII
{ tiiType :: TagType
, tiiTags :: [(Tag, Maybe Entry)]
, tiiRecents :: [Entry]
}
viewTagIndex :: (?config :: Config) => TagIndexInfo -> H.Html
viewTagIndex TII{..} = do
H.div ! A.class_ "archive-sidebar unit one-of-four" $
viewArchiveSidebar tiiRecents (Just (AIndTagged tiiType))
H.section ! A.class_ "archive-section unit three-of-four" ! mainSection $ do
H.header ! A.class_ "tile" $
H.h1 $
case tiiType of
GeneralTag -> "Tags"
CategoryTag -> "Categories"
SeriesTag -> "Series"
H.ul ! A.class_ ulClass $
if null tiiTags
then
"No entries yet for this tag!"
else
mapM_ (uncurry (tagIndexLi tiiType)) tiiTags
where
ulClass = case tiiType of
GeneralTag -> "tag-index tile tag-list"
CategoryTag -> "category-index"
SeriesTag -> "series-index tile"
tagIndexLi
:: (?config :: Config)
=> TagType
-> Tag
-> Maybe Entry
-> H.Html
tagIndexLi tt t@Tag{..} recent =
H.li ! A.class_ liClass $
case tt of
GeneralTag ->
H.a ! A.href (H.textValue $ renderUrl (T.pack (tagUrl t)))
! A.class_ "tag-a-tag"
$ do
H.toHtml (tagPrettyLabel t)
H.preEscapedToHtml (" " :: T.Text)
"(" :: H.Html
H.toHtml (show (length tagEntries))
")" :: H.Html
_ -> do
H.header $ do
H.h2 $
H.a ! A.href (H.textValue $ renderUrl (T.pack (tagUrl t)))
$ H.toHtml (tagPrettyLabel t)
H.div ! A.class_ "tag-entry-count" $
H.toHtml $
case tt of
CategoryTag -> "> " ++ show (length tagEntries) ++ " entries"
SeriesTag -> "(" ++ show (length tagEntries) ++ " entries)"
_ -> ""
H.div ! A.class_ "tag-description" $
sequence_ (htmlDescription t)
H.footer $
forM_ recent $ \Entry{..} -> do
H.div $ do
H.span ! A.class_ "recent-link" $ do
H.preEscapedToHtml ("Most recent — " :: T.Text)
H.a ! A.href (fromString $ renderUrl' entryCanonical) $
H.toHtml entryTitle
forM_ entryPostTime $ \posted ->
H.span ! A.class_ "recent-time" $ do
"(" :: H.Html
H.time
! A.datetime (H.textValue $ T.pack $ renderDatetimeTime posted)
! A.pubdate ""
! A.class_ "pubdate"
$ H.toHtml (renderShortFriendlyTime posted)
")" :: H.Html
where
liClass = case tt of
CategoryTag -> "tile"
_ -> ""
|
ddc5df331693e483bb38e86ae8e508ad9d18269ab7ae896776c9afc29f5810ea | billstclair/trubanc-lisp | packages.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
$ Header : /usr / local / cvsrep / hunchentoot / packages.lisp , v 1.34 2008/02/13 16:02:18 edi Exp $
Copyright ( c ) 2004 - 2009 , Dr. . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :cl-user)
(defpackage "HUNCHENTOOT"
(:nicknames "TBNL")
(:use :cl :cl-ppcre :chunga :flexi-streams :url-rewrite)
(:shadow "DEFCONSTANT"
"URL-ENCODE")
;; see ASDF system definition
(:import-from :hunchentoot-asd :*hunchentoot-version*)
#+:lispworks
(:import-from :lw "WITH-UNIQUE-NAMES" "WHEN-LET")
(:export "*ACCEPTOR*"
"*ACCESS-LOG-PATHNAME*"
"*APPROVED-RETURN-CODES*"
#+:lispworks
"*CLEANUP-FUNCTION*"
#+:lispworks
"*CLEANUP-INTERVAL*"
"*CONTENT-TYPES-FOR-URL-REWRITE*"
"*DEFAULT-CONNECTION-TIMEOUT*"
"*DEFAULT-CONTENT-TYPE*"
"*DEFAULT-HANDLER*"
"*DISPATCH-TABLE*"
"*FILE-UPLOAD-HOOK*"
"*HANDLE-HTTP-ERRORS-P*"
"*HEADER-STREAM*"
"*HTTP-ERROR-HANDLER*"
"*HUNCHENTOOT-DEFAULT-EXTERNAL-FORMAT*"
"*LISP-ERRORS-LOG-LEVEL*"
"*LISP-WARNINGS-LOG-LEVEL*"
"*LISTENER*"
"*LOG-LISP-ERRORS-P*"
"*LOG-LISP-WARNINGS-P*"
"*MESSAGE-LOG-PATHNAME*"
"*METHODS-FOR-POST-PARAMETERS*"
"*REPLY*"
"*REQUEST*"
"*REWRITE-FOR-SESSION-URLS*"
"*SESSION*"
"*SESSION-GC-FREQUENCY*"
"*SESSION-MAX-TIME*"
"*SESSION-REMOVAL-HOOK*"
"*SESSION-SECRET*"
"*SHOW-LISP-ERRORS-P*"
"*TMP-DIRECTORY*"
"*USE-REMOTE-ADDR-FOR-SESSIONS*"
"*USE-USER-AGENT-FOR-SESSIONS*"
"+HTTP-ACCEPTED+"
"+HTTP-AUTHORIZATION-REQUIRED+"
"+HTTP-BAD-GATEWAY+"
"+HTTP-BAD-REQUEST+"
"+HTTP-CONFLICT+"
"+HTTP-CONTINUE+"
"+HTTP-CREATED+"
"+HTTP-EXPECTATION-FAILED+"
"+HTTP-FAILED-DEPENDENCY+"
"+HTTP-FORBIDDEN+"
"+HTTP-GATEWAY-TIME-OUT+"
"+HTTP-GONE+"
"+HTTP-INTERNAL-SERVER-ERROR+"
"+HTTP-LENGTH-REQUIRED+"
"+HTTP-METHOD-NOT-ALLOWED+"
"+HTTP-MOVED-PERMANENTLY+"
"+HTTP-MOVED-TEMPORARILY+"
"+HTTP-MULTI-STATUS+"
"+HTTP-MULTIPLE-CHOICES+"
"+HTTP-NO-CONTENT+"
"+HTTP-NON-AUTHORITATIVE-INFORMATION+"
"+HTTP-NOT-ACCEPTABLE+"
"+HTTP-NOT-FOUND+"
"+HTTP-NOT-IMPLEMENTED+"
"+HTTP-NOT-MODIFIED+"
"+HTTP-OK+"
"+HTTP-PARTIAL-CONTENT+"
"+HTTP-PAYMENT-REQUIRED+"
"+HTTP-PRECONDITION-FAILED+"
"+HTTP-PROXY-AUTHENTICATION-REQUIRED+"
"+HTTP-REQUEST-ENTITY-TOO-LARGE+"
"+HTTP-REQUEST-TIME-OUT+"
"+HTTP-REQUEST-URI-TOO-LARGE+"
"+HTTP-REQUESTED-RANGE-NOT-SATISFIABLE+"
"+HTTP-RESET-CONTENT+"
"+HTTP-SEE-OTHER+"
"+HTTP-SERVICE-UNAVAILABLE+"
"+HTTP-SWITCHING-PROTOCOLS+"
"+HTTP-TEMPORARY-REDIRECT+"
"+HTTP-UNSUPPORTED-MEDIA-TYPE+"
"+HTTP-USE-PROXY+"
"+HTTP-VERSION-NOT-SUPPORTED+"
"ABORT-REQUEST-HANDLER"
"ACCEPTOR"
"ACCEPTOR-ACCESS-LOGGER"
"ACCEPTOR-ADDRESS"
"ACCEPT-CONNECTIONS"
"ACCEPTOR-REQUEST-DISPATCHER"
"ACCEPTOR-INPUT-CHUNKING-P"
"ACCEPTOR-MESSAGE-LOGGER"
"ACCEPTOR-NAME"
"ACCEPTOR-OUTPUT-CHUNKING-P"
"ACCEPTOR-PERSISTENT-CONNECTIONS-P"
"ACCEPTOR-PORT"
"ACCEPTOR-READ-TIMEOUT"
"ACCEPTOR-REPLY-CLASS"
"ACCEPTOR-REQUEST-CLASS"
"ACCEPTOR-SSL-P"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-CERTIFICATE-FILE"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-PRIVATEKEY-FILE"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-PRIVATEKEY-PASSWORD"
"ACCEPTOR-WRITE-TIMEOUT"
"AUTHORIZATION"
"AUX-REQUEST-VALUE"
"CONTENT-LENGTH"
"CONTENT-LENGTH*"
"CONTENT-TYPE"
"CONTENT-TYPE*"
"COOKIE-DOMAIN"
"COOKIE-EXPIRES"
"COOKIE-HTTP-ONLY"
"COOKIE-IN"
"COOKIE-NAME"
"COOKIE-OUT"
"COOKIE-PATH"
"COOKIE-SECURE"
"COOKIE-VALUE"
"COOKIES-IN"
"COOKIES-IN*"
"COOKIES-OUT"
"COOKIES-OUT*"
"CREATE-FOLDER-DISPATCHER-AND-HANDLER"
"CREATE-PREFIX-DISPATCHER"
"CREATE-REGEX-DISPATCHER"
"CREATE-STATIC-FILE-DISPATCHER-AND-HANDLER"
"DEFAULT-DISPATCHER"
"DEFINE-EASY-HANDLER"
"DELETE-AUX-REQUEST-VALUE"
"DELETE-SESSION-VALUE"
"DISPATCH-EASY-HANDLERS"
"ESCAPE-FOR-HTML"
"EXECUTE-ACCEPTOR"
"GET-PARAMETER"
"GET-PARAMETERS"
"GET-PARAMETERS*"
"HANDLE-INCOMING-CONNECTION"
"HANDLE-IF-MODIFIED-SINCE"
"HANDLE-STATIC-FILE"
"HEADER-IN"
"HEADER-IN*"
"HEADER-OUT"
"HEADERS-IN"
"HEADERS-IN*"
"HEADERS-OUT"
"HEADERS-OUT*"
"HOST"
"HTTP-TOKEN-P"
"HUNCHENTOOT-CONDITION"
"HUNCHENTOOT-ERROR"
"HUNCHENTOOT-WARNING"
"INITIALIZE-CONNECTION-STREAM"
"LOG-MESSAGE"
"MIME-TYPE"
"NEXT-SESSION-ID"
"NO-CACHE"
"ONE-THREAD-PER-CONNECTION-TASKMASTER"
"PARAMETER"
"PARAMETER-ERROR"
"POST-PARAMETER"
"POST-PARAMETERS"
"POST-PARAMETERS*"
"PROCESS-CONNECTION"
"PROCESS-REQUEST"
"QUERY-STRING"
"QUERY-STRING*"
"RAW-POST-DATA"
"REAL-REMOTE-ADDR"
"REASON-PHRASE"
"RECOMPUTE-REQUEST-PARAMETERS"
"REDIRECT"
"REFERER"
"REMOTE-ADDR"
"REMOTE-ADDR*"
"REMOTE-PORT"
"REMOTE-PORT*"
"REMOVE-SESSION"
"REPLY"
"REPLY-EXTERNAL-FORMAT"
"REPLY-EXTERNAL-FORMAT*"
"REQUEST"
"REQUEST-ACCEPTOR"
"REQUEST-METHOD"
"REQUEST-METHOD*"
"REQUEST-URI"
"REQUEST-URI*"
"REQUIRE-AUTHORIZATION"
"RESET-CONNECTION-STREAM"
"RESET-SESSIONS"
"RESET-SESSION-SECRET"
"RETURN-CODE"
"RETURN-CODE*"
"RFC-1123-DATE"
"SCRIPT-NAME"
"SCRIPT-NAME*"
"SEND-HEADERS"
"SERVER-PROTOCOL"
"SERVER-PROTOCOL*"
"SESSION"
"SESSION-COOKIE-NAME"
"SESSION-COOKIE-VALUE"
"SESSION-CREATED"
"SESSION-DB"
"SESSION-DB-LOCK"
"SESSION-GC"
"SESSION-MAX-TIME"
"SESSION-REMOTE-ADDR"
"SESSION-TOO-OLD-P"
"SESSION-USER-AGENT"
"SESSION-VALUE"
"SESSION-VERIFY"
"SET-COOKIE"
"SET-COOKIE*"
"SHUTDOWN"
"SINGLE-THREADED-TASKMASTER"
#-:hunchentoot-no-ssl "SSL-ACCEPTOR"
"SSL-P"
"START"
"START-LISTENING"
"START-SESSION"
"STOP"
"TASKMASTER"
"TASKMASTER-ACCEPTOR"
"URL-DECODE"
"URL-ENCODE"
"USER-AGENT"))
| null | https://raw.githubusercontent.com/billstclair/trubanc-lisp/5436d2eca5b1ed10bc47eec7080f6cb90f98ca65/systems/hunchentoot-1.0.0/packages.lisp | lisp | Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
see ASDF system definition | $ Header : /usr / local / cvsrep / hunchentoot / packages.lisp , v 1.34 2008/02/13 16:02:18 edi Exp $
Copyright ( c ) 2004 - 2009 , Dr. . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :cl-user)
(defpackage "HUNCHENTOOT"
(:nicknames "TBNL")
(:use :cl :cl-ppcre :chunga :flexi-streams :url-rewrite)
(:shadow "DEFCONSTANT"
"URL-ENCODE")
(:import-from :hunchentoot-asd :*hunchentoot-version*)
#+:lispworks
(:import-from :lw "WITH-UNIQUE-NAMES" "WHEN-LET")
(:export "*ACCEPTOR*"
"*ACCESS-LOG-PATHNAME*"
"*APPROVED-RETURN-CODES*"
#+:lispworks
"*CLEANUP-FUNCTION*"
#+:lispworks
"*CLEANUP-INTERVAL*"
"*CONTENT-TYPES-FOR-URL-REWRITE*"
"*DEFAULT-CONNECTION-TIMEOUT*"
"*DEFAULT-CONTENT-TYPE*"
"*DEFAULT-HANDLER*"
"*DISPATCH-TABLE*"
"*FILE-UPLOAD-HOOK*"
"*HANDLE-HTTP-ERRORS-P*"
"*HEADER-STREAM*"
"*HTTP-ERROR-HANDLER*"
"*HUNCHENTOOT-DEFAULT-EXTERNAL-FORMAT*"
"*LISP-ERRORS-LOG-LEVEL*"
"*LISP-WARNINGS-LOG-LEVEL*"
"*LISTENER*"
"*LOG-LISP-ERRORS-P*"
"*LOG-LISP-WARNINGS-P*"
"*MESSAGE-LOG-PATHNAME*"
"*METHODS-FOR-POST-PARAMETERS*"
"*REPLY*"
"*REQUEST*"
"*REWRITE-FOR-SESSION-URLS*"
"*SESSION*"
"*SESSION-GC-FREQUENCY*"
"*SESSION-MAX-TIME*"
"*SESSION-REMOVAL-HOOK*"
"*SESSION-SECRET*"
"*SHOW-LISP-ERRORS-P*"
"*TMP-DIRECTORY*"
"*USE-REMOTE-ADDR-FOR-SESSIONS*"
"*USE-USER-AGENT-FOR-SESSIONS*"
"+HTTP-ACCEPTED+"
"+HTTP-AUTHORIZATION-REQUIRED+"
"+HTTP-BAD-GATEWAY+"
"+HTTP-BAD-REQUEST+"
"+HTTP-CONFLICT+"
"+HTTP-CONTINUE+"
"+HTTP-CREATED+"
"+HTTP-EXPECTATION-FAILED+"
"+HTTP-FAILED-DEPENDENCY+"
"+HTTP-FORBIDDEN+"
"+HTTP-GATEWAY-TIME-OUT+"
"+HTTP-GONE+"
"+HTTP-INTERNAL-SERVER-ERROR+"
"+HTTP-LENGTH-REQUIRED+"
"+HTTP-METHOD-NOT-ALLOWED+"
"+HTTP-MOVED-PERMANENTLY+"
"+HTTP-MOVED-TEMPORARILY+"
"+HTTP-MULTI-STATUS+"
"+HTTP-MULTIPLE-CHOICES+"
"+HTTP-NO-CONTENT+"
"+HTTP-NON-AUTHORITATIVE-INFORMATION+"
"+HTTP-NOT-ACCEPTABLE+"
"+HTTP-NOT-FOUND+"
"+HTTP-NOT-IMPLEMENTED+"
"+HTTP-NOT-MODIFIED+"
"+HTTP-OK+"
"+HTTP-PARTIAL-CONTENT+"
"+HTTP-PAYMENT-REQUIRED+"
"+HTTP-PRECONDITION-FAILED+"
"+HTTP-PROXY-AUTHENTICATION-REQUIRED+"
"+HTTP-REQUEST-ENTITY-TOO-LARGE+"
"+HTTP-REQUEST-TIME-OUT+"
"+HTTP-REQUEST-URI-TOO-LARGE+"
"+HTTP-REQUESTED-RANGE-NOT-SATISFIABLE+"
"+HTTP-RESET-CONTENT+"
"+HTTP-SEE-OTHER+"
"+HTTP-SERVICE-UNAVAILABLE+"
"+HTTP-SWITCHING-PROTOCOLS+"
"+HTTP-TEMPORARY-REDIRECT+"
"+HTTP-UNSUPPORTED-MEDIA-TYPE+"
"+HTTP-USE-PROXY+"
"+HTTP-VERSION-NOT-SUPPORTED+"
"ABORT-REQUEST-HANDLER"
"ACCEPTOR"
"ACCEPTOR-ACCESS-LOGGER"
"ACCEPTOR-ADDRESS"
"ACCEPT-CONNECTIONS"
"ACCEPTOR-REQUEST-DISPATCHER"
"ACCEPTOR-INPUT-CHUNKING-P"
"ACCEPTOR-MESSAGE-LOGGER"
"ACCEPTOR-NAME"
"ACCEPTOR-OUTPUT-CHUNKING-P"
"ACCEPTOR-PERSISTENT-CONNECTIONS-P"
"ACCEPTOR-PORT"
"ACCEPTOR-READ-TIMEOUT"
"ACCEPTOR-REPLY-CLASS"
"ACCEPTOR-REQUEST-CLASS"
"ACCEPTOR-SSL-P"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-CERTIFICATE-FILE"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-PRIVATEKEY-FILE"
#-:hunchentoot-no-ssl "ACCEPTOR-SSL-PRIVATEKEY-PASSWORD"
"ACCEPTOR-WRITE-TIMEOUT"
"AUTHORIZATION"
"AUX-REQUEST-VALUE"
"CONTENT-LENGTH"
"CONTENT-LENGTH*"
"CONTENT-TYPE"
"CONTENT-TYPE*"
"COOKIE-DOMAIN"
"COOKIE-EXPIRES"
"COOKIE-HTTP-ONLY"
"COOKIE-IN"
"COOKIE-NAME"
"COOKIE-OUT"
"COOKIE-PATH"
"COOKIE-SECURE"
"COOKIE-VALUE"
"COOKIES-IN"
"COOKIES-IN*"
"COOKIES-OUT"
"COOKIES-OUT*"
"CREATE-FOLDER-DISPATCHER-AND-HANDLER"
"CREATE-PREFIX-DISPATCHER"
"CREATE-REGEX-DISPATCHER"
"CREATE-STATIC-FILE-DISPATCHER-AND-HANDLER"
"DEFAULT-DISPATCHER"
"DEFINE-EASY-HANDLER"
"DELETE-AUX-REQUEST-VALUE"
"DELETE-SESSION-VALUE"
"DISPATCH-EASY-HANDLERS"
"ESCAPE-FOR-HTML"
"EXECUTE-ACCEPTOR"
"GET-PARAMETER"
"GET-PARAMETERS"
"GET-PARAMETERS*"
"HANDLE-INCOMING-CONNECTION"
"HANDLE-IF-MODIFIED-SINCE"
"HANDLE-STATIC-FILE"
"HEADER-IN"
"HEADER-IN*"
"HEADER-OUT"
"HEADERS-IN"
"HEADERS-IN*"
"HEADERS-OUT"
"HEADERS-OUT*"
"HOST"
"HTTP-TOKEN-P"
"HUNCHENTOOT-CONDITION"
"HUNCHENTOOT-ERROR"
"HUNCHENTOOT-WARNING"
"INITIALIZE-CONNECTION-STREAM"
"LOG-MESSAGE"
"MIME-TYPE"
"NEXT-SESSION-ID"
"NO-CACHE"
"ONE-THREAD-PER-CONNECTION-TASKMASTER"
"PARAMETER"
"PARAMETER-ERROR"
"POST-PARAMETER"
"POST-PARAMETERS"
"POST-PARAMETERS*"
"PROCESS-CONNECTION"
"PROCESS-REQUEST"
"QUERY-STRING"
"QUERY-STRING*"
"RAW-POST-DATA"
"REAL-REMOTE-ADDR"
"REASON-PHRASE"
"RECOMPUTE-REQUEST-PARAMETERS"
"REDIRECT"
"REFERER"
"REMOTE-ADDR"
"REMOTE-ADDR*"
"REMOTE-PORT"
"REMOTE-PORT*"
"REMOVE-SESSION"
"REPLY"
"REPLY-EXTERNAL-FORMAT"
"REPLY-EXTERNAL-FORMAT*"
"REQUEST"
"REQUEST-ACCEPTOR"
"REQUEST-METHOD"
"REQUEST-METHOD*"
"REQUEST-URI"
"REQUEST-URI*"
"REQUIRE-AUTHORIZATION"
"RESET-CONNECTION-STREAM"
"RESET-SESSIONS"
"RESET-SESSION-SECRET"
"RETURN-CODE"
"RETURN-CODE*"
"RFC-1123-DATE"
"SCRIPT-NAME"
"SCRIPT-NAME*"
"SEND-HEADERS"
"SERVER-PROTOCOL"
"SERVER-PROTOCOL*"
"SESSION"
"SESSION-COOKIE-NAME"
"SESSION-COOKIE-VALUE"
"SESSION-CREATED"
"SESSION-DB"
"SESSION-DB-LOCK"
"SESSION-GC"
"SESSION-MAX-TIME"
"SESSION-REMOTE-ADDR"
"SESSION-TOO-OLD-P"
"SESSION-USER-AGENT"
"SESSION-VALUE"
"SESSION-VERIFY"
"SET-COOKIE"
"SET-COOKIE*"
"SHUTDOWN"
"SINGLE-THREADED-TASKMASTER"
#-:hunchentoot-no-ssl "SSL-ACCEPTOR"
"SSL-P"
"START"
"START-LISTENING"
"START-SESSION"
"STOP"
"TASKMASTER"
"TASKMASTER-ACCEPTOR"
"URL-DECODE"
"URL-ENCODE"
"USER-AGENT"))
|
d8e3b8ca656fbebd610392519d4ce50aa025476accd9545bce89508964150dc7 | dmitryvk/sbcl-win32-threads | low.lisp | ;;;; This file contains portable versions of low-level functions and macros
;;;; which are ripe for implementation specific customization. None of the code
in this file * has * to be customized for a particular Common Lisp
;;;; implementation. Moreover, in some implementations it may not make any
;;;; sense to customize some of this code.
;;;;
;;;; The original version was intended to support portable customization to
;;;; lotso different Lisp implementations. This functionality is gone in the
current version , and it now runs only under SBCL . ( Now that ANSI Common
Lisp has mixed CLOS into the insides of the system ( e.g. error handling
;;;; and printing) so deeply that it's not very meaningful to bootstrap Common
Lisp without CLOS , the old functionality is of dubious use . -- WHN
19981108 )
This software is part of the SBCL system . See the README file for more
;;;; information.
This software is derived from software originally released by Xerox
;;;; Corporation. Copyright and release statements follow. Later modifications
;;;; to the software are in the public domain and are provided with
;;;; absolutely no warranty. See the COPYING and CREDITS files for more
;;;; information.
copyright information from original PCL sources :
;;;;
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
;;;; All rights reserved.
;;;;
;;;; Use and copying of this software and preparation of derivative works based
;;;; upon this software are permitted. Any distribution of this software or
derivative works must comply with all applicable United States export
;;;; control laws.
;;;;
This software is made available AS IS , and Xerox Corporation makes no
;;;; warranty about the software, its performance or its conformity to any
;;;; specification.
(in-package "SB-PCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *optimize-speed*
'(optimize (speed 3) (safety 0)))
EVAL - WHEN
(defmacro dotimes-fixnum ((var count &optional (result nil)) &body body)
`(dotimes (,var (the fixnum ,count) ,result)
(declare (fixnum ,var))
,@body))
(declaim (inline random-fixnum))
(defun random-fixnum ()
(random (1+ most-positive-fixnum)))
(defconstant n-fixnum-bits #.(integer-length most-positive-fixnum))
;;; Lambda which executes its body (or not) randomly. Used to drop
;;; random cache entries.
(defmacro randomly-punting-lambda (lambda-list &body body)
(with-unique-names (drops drop-pos)
`(let ((,drops (random-fixnum))
(,drop-pos n-fixnum-bits))
(declare (fixnum ,drops)
(type (integer 0 #.n-fixnum-bits) ,drop-pos))
(lambda ,lambda-list
(when (logbitp (the unsigned-byte (decf ,drop-pos)) ,drops)
(locally ,@body))
(when (zerop ,drop-pos)
(setf ,drops (random-fixnum)
,drop-pos n-fixnum-bits))))))
early definition of WRAPPER
;;;;
Most WRAPPER stuff is defined later , but the DEFSTRUCT itself
is here early so that things like ( TYPEP .. ' WRAPPER ) can be
;;;; compiled efficiently.
Note that for SBCL , as for CMU CL , the WRAPPER of a built - in or
structure class will be some other kind of SB - KERNEL : LAYOUT , but
this should n't matter , since the only two slots that WRAPPER adds
;;; are meaningless in those cases.
(defstruct (wrapper
(:include layout
: In CMU CL , the initialization default
for LAYOUT - INVALID was NIL . In , that has
changed to : UNINITIALIZED , but PCL code might
;; still expect NIL for the initialization
default of WRAPPER - INVALID . Instead of trying
to find out , I just overrode the LAYOUT
default here . -- WHN 19991204
(invalid nil)
;; This allows quick testing of wrapperness.
(for-std-class-p t))
(:constructor make-wrapper-internal)
(:copier nil))
(instance-slots-layout nil :type list)
(class-slots nil :type list))
#-sb-fluid (declaim (sb-ext:freeze-type wrapper))
PCL 's view of funcallable instances
(!defstruct-with-alternate-metaclass standard-funcallable-instance
: Note that neither of these slots is ever accessed by its
;; accessor name as of sbcl-0.pre7.63. Presumably everything works
by puns based on absolute locations . Fun fun fun .. -- WHN 2001 - 10 - 30
:slot-names (clos-slots name hash-code)
:boa-constructor %make-standard-funcallable-instance
:superclass-name function
:metaclass-name standard-classoid
:metaclass-constructor make-standard-classoid
:dd-type funcallable-structure
;; Only internal implementation code will access these, and these
;; accesses (slot readers in particular) could easily be a
;; bottleneck, so it seems reasonable to suppress runtime type
;; checks.
;;
( Except note above that these accessors are n't used at all
;; (!) as of sbcl-0.pre7.63, so for now it's academic.)
:runtime-type-checks-p nil)
(import 'sb-kernel:funcallable-instance-p)
(defun set-funcallable-instance-function (fin new-value)
(declare (type function new-value))
(aver (funcallable-instance-p fin))
(setf (funcallable-instance-fun fin) new-value))
;;; FIXME: these macros should just go away. It's not clear whether
;;; the inline functions defined by
! - WITH - ALTERNATE - METACLASS are as efficient as they could
;;; be; ordinary defstruct accessors are defined as source transforms.
(defun fsc-instance-p (fin)
(funcallable-instance-p fin))
(define-compiler-macro fsc-instance-p (fin)
`(funcallable-instance-p ,fin))
(defmacro fsc-instance-wrapper (fin)
`(%funcallable-instance-layout ,fin))
(defmacro fsc-instance-slots (fin)
`(%funcallable-instance-info ,fin 1))
(defmacro fsc-instance-hash (fin)
`(%funcallable-instance-info ,fin 3))
(declaim (inline clos-slots-ref (setf clos-slots-ref)))
(declaim (ftype (function (simple-vector index) t) clos-slots-ref))
(defun clos-slots-ref (slots index)
(svref slots index))
(declaim (ftype (function (t simple-vector index) t) (setf clos-slots-ref)))
(defun (setf clos-slots-ref) (new-value slots index)
(setf (svref slots index) new-value))
Note on implementation under CMU CL > = 17 and SBCL : STD - INSTANCE - P
;;; is only used to discriminate between functions (including FINs)
;;; and normal instances, so we can return true on structures also. A
;;; few uses of (OR STD-INSTANCE-P FSC-INSTANCE-P) are changed to
PCL - INSTANCE - P.
(defun std-instance-p (x)
(%instancep x))
(define-compiler-macro std-instance-p (x)
`(%instancep ,x))
;; a temporary definition used for debugging the bootstrap
#+sb-show
(defun print-std-instance (instance stream depth)
(declare (ignore depth))
(print-unreadable-object (instance stream :type t :identity t)
(let ((class (class-of instance)))
(when (or (eq class (find-class 'standard-class nil))
(eq class (find-class 'funcallable-standard-class nil))
(eq class (find-class 'built-in-class nil)))
(princ (early-class-name instance) stream)))))
;;; This is the value that we stick into a slot to tell us that it is
;;; unbound. It may seem gross, but for performance reasons, we make
;;; this an interned symbol. That means that the fast check to see
whether a slot is unbound is to say ( EQ < val > ' .. SLOT - UNBOUND .. ) .
;;; That is considerably faster than looking at the value of a special
;;; variable. Be careful, there are places in the code which actually
use .. SLOT - UNBOUND .. rather than this variable . So much for
;;; modularity..
;;;
FIXME : Now that we 're tightly integrated into SBCL , we could use
the SBCL built - in unbound value token instead . Perhaps if we did
so it would be a good idea to define collections of CLOS slots as
;;; a new type of heap object, instead of using bare SIMPLE-VECTOR, in
;;; order to avoid problems (in the debugger if nowhere else) with
;;; SIMPLE-VECTORs some of whose elements are unbound tokens.
(defconstant +slot-unbound+ '..slot-unbound..)
(defmacro %allocate-static-slot-storage--class (no-of-slots)
`(make-array ,no-of-slots :initial-element +slot-unbound+))
(defmacro std-instance-class (instance)
`(wrapper-class* (std-instance-wrapper ,instance)))
;;; When given a funcallable instance, SET-FUN-NAME *must* side-effect
;;; that FIN to give it the name. When given any other kind of
;;; function SET-FUN-NAME is allowed to return a new function which is
;;; "the same" except that it has the name.
;;;
;;; In all cases, SET-FUN-NAME must return the new (or same)
;;; function. (Unlike other functions to set stuff, it does not return
;;; the new value.)
(defun set-fun-name (fun new-name)
#+sb-doc
"Set the name of a compiled function object. Return the function."
(when (valid-function-name-p fun)
(setq fun (fdefinition fun)))
(typecase fun
(%method-function (setf (%method-function-name fun) new-name))
#+sb-eval
(sb-eval:interpreted-function
(setf (sb-eval:interpreted-function-name fun) new-name))
: probably a generic function ...
(cond ((if (eq **boot-state** 'complete)
(typep fun 'generic-function)
(eq (class-of fun) *the-class-standard-generic-function*))
(setf (%funcallable-instance-info fun 2) new-name))
(t
(bug "unanticipated function type")))))
Fixup name - to - function mappings in cases where the function
has n't been defined by DEFUN . ( FIXME : is this right ? This logic
comes from CMUCL ) . -- CSR , 2004 - 12 - 31
(when (and (consp new-name)
(member (car new-name) '(slow-method fast-method slot-accessor)))
(setf (fdefinition new-name) fun))
fun)
;;; FIXME: probably no longer needed after init
(defmacro precompile-random-code-segments (&optional system)
`(progn
(eval-when (:compile-toplevel)
(update-dispatch-dfuns))
(precompile-function-generators ,system)
(precompile-dfun-constructors ,system)
(precompile-ctors)))
;;; This definition is for interpreted code.
(defun pcl-instance-p (x)
(typep (layout-of x) 'wrapper))
CMU CL comment :
;;; We define this as STANDARD-INSTANCE, since we're going to
;;; clobber the layout with some standard-instance layout as soon as
;;; we make it, and we want the accessor to still be type-correct.
#|
(defstruct (standard-instance
(:predicate nil)
(:constructor %%allocate-instance--class ())
(:copier nil)
(:alternate-metaclass instance
cl:standard-class
make-standard-class))
(slots nil))
|#
(!defstruct-with-alternate-metaclass standard-instance
:slot-names (slots hash-code)
:boa-constructor %make-standard-instance
:superclass-name t
:metaclass-name standard-classoid
:metaclass-constructor make-standard-classoid
:dd-type structure
:runtime-type-checks-p nil)
;;; Both of these operations "work" on structures, which allows the above
;;; weakening of STD-INSTANCE-P.
(defmacro std-instance-slots (x) `(%instance-ref ,x 1))
(defmacro std-instance-wrapper (x) `(%instance-layout ,x))
: This one does n't " work " on structures . However , we
ensure , in SXHASH and friends , never to call it on structures .
(defmacro std-instance-hash (x) `(%instance-ref ,x 2))
;;; FIXME: These functions are called every place we do a
;;; CALL-NEXT-METHOD, and probably other places too. It's likely worth
;;; selectively optimizing them with DEFTRANSFORMs and stuff, rather
;;; than just indiscriminately expanding them inline everywhere.
(declaim (inline get-slots get-slots-or-nil))
(declaim (ftype (function (t) simple-vector) get-slots))
(declaim (ftype (function (t) (or simple-vector null)) get-slots-or-nil))
(defun get-slots (instance)
(if (std-instance-p instance)
(std-instance-slots instance)
(fsc-instance-slots instance)))
(defun get-slots-or-nil (instance)
Suppress a code - deletion note . FIXME : doing the FIXME above ,
integrating PCL more with the compiler , would remove the need for
;; this icky stuff.
(declare (optimize (inhibit-warnings 3)))
(when (pcl-instance-p instance)
(get-slots instance)))
(defmacro get-wrapper (inst)
(once-only ((wrapper `(wrapper-of ,inst)))
`(progn
(aver (typep ,wrapper 'wrapper))
,wrapper)))
;;; FIXME: could be an inline function or ordinary function (like many
;;; other things around here)
(defmacro get-instance-wrapper-or-nil (inst)
(once-only ((wrapper `(wrapper-of ,inst)))
`(if (typep ,wrapper 'wrapper)
,wrapper
nil)))
support for useful hashing of PCL instances
(defvar *instance-hash-code-random-state* (make-random-state))
(defun get-instance-hash-code ()
;; ANSI SXHASH wants us to make a good-faith effort to produce
;; hash-codes that are well distributed within the range of
non - negative fixnums , and this operation does that , unlike
;; the sbcl<=0.8.16 implementation of this operation as
( INCF COUNTER ) .
;;
;; Hopefully there was no virtue to the old counter implementation
that I am insufficiently insightful to insee . -- WHN 2004 - 10 - 28
(random most-positive-fixnum
*instance-hash-code-random-state*))
(defun sb-impl::sxhash-instance (x)
(cond
((std-instance-p x) (std-instance-hash x))
((fsc-instance-p x) (fsc-instance-hash x))
(t (bug "SXHASH-INSTANCE called on some weird thing: ~S" x))))
;;;; structure-instance stuff
;;;;
FIXME : Now that the code is SBCL - only , this extra layer of
;;;; abstraction around our native structure representation doesn't
;;;; seem to add anything useful, and could probably go away.
;;; The definition of STRUCTURE-TYPE-P was moved to early-low.lisp.
(defun structure-type-slot-description-list (type)
(let* ((dd (find-defstruct-description type))
(include (dd-include dd))
(all-slots (dd-slots dd)))
(multiple-value-bind (super slot-overrides)
(if (consp include)
(values (car include) (mapcar #'car (cdr include)))
(values include nil))
(let ((included-slots
(when super
(dd-slots (find-defstruct-description super)))))
(loop for slot = (pop all-slots)
for included-slot = (pop included-slots)
while slot
when (or (not included-slot)
(member (dsd-name included-slot) slot-overrides :test #'eq))
collect slot)))))
(defun structure-slotd-name (slotd)
(dsd-name slotd))
(defun structure-slotd-accessor-symbol (slotd)
(dsd-accessor-name slotd))
(defun structure-slotd-reader-function (slotd)
(fdefinition (dsd-accessor-name slotd)))
(defun structure-slotd-writer-function (type slotd)
(if (dsd-read-only slotd)
(let ((dd (find-defstruct-description type)))
(coerce (slot-setter-lambda-form dd slotd) 'function))
(fdefinition `(setf ,(dsd-accessor-name slotd)))))
(defun structure-slotd-type (slotd)
(dsd-type slotd))
(defun structure-slotd-init-form (slotd)
(dsd-default slotd))
;;; method function stuff.
;;;
PCL historically included a so - called method - fast - function , which
;;; is essentially a method function but with (a) a precomputed
;;; continuation for CALL-NEXT-METHOD and (b) a permutation vector for
slot access . [ FIXME : see if we can understand these two
;;; optimizations before commit. ] However, the presence of the
;;; fast-function meant that we violated AMOP and the effect of the
;;; :FUNCTION initarg, and furthermore got to potentially confusing
;;; situations where the function and the fast-function got out of
;;; sync, so that calling (method-function method) with the defined
;;; protocol would do different things from (call-method method) in
;;; method combination.
;;;
;;; So we define this internal method function structure, which we use
;;; when we create a method function ourselves. This means that we
;;; can hang the various bits of information that we want off the
;;; method function itself, and also that if a user overrides method
;;; function creation there is no danger of having the system get
;;; confused.
(!defstruct-with-alternate-metaclass %method-function
:slot-names (fast-function name)
:boa-constructor %make-method-function
:superclass-name function
:metaclass-name static-classoid
:metaclass-constructor make-static-classoid
:dd-type funcallable-structure)
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/pcl/low.lisp | lisp | This file contains portable versions of low-level functions and macros
which are ripe for implementation specific customization. None of the code
implementation. Moreover, in some implementations it may not make any
sense to customize some of this code.
The original version was intended to support portable customization to
lotso different Lisp implementations. This functionality is gone in the
and printing) so deeply that it's not very meaningful to bootstrap Common
information.
Corporation. Copyright and release statements follow. Later modifications
to the software are in the public domain and are provided with
absolutely no warranty. See the COPYING and CREDITS files for more
information.
All rights reserved.
Use and copying of this software and preparation of derivative works based
upon this software are permitted. Any distribution of this software or
control laws.
warranty about the software, its performance or its conformity to any
specification.
Lambda which executes its body (or not) randomly. Used to drop
random cache entries.
compiled efficiently.
are meaningless in those cases.
still expect NIL for the initialization
This allows quick testing of wrapperness.
accessor name as of sbcl-0.pre7.63. Presumably everything works
Only internal implementation code will access these, and these
accesses (slot readers in particular) could easily be a
bottleneck, so it seems reasonable to suppress runtime type
checks.
(!) as of sbcl-0.pre7.63, so for now it's academic.)
FIXME: these macros should just go away. It's not clear whether
the inline functions defined by
be; ordinary defstruct accessors are defined as source transforms.
is only used to discriminate between functions (including FINs)
and normal instances, so we can return true on structures also. A
few uses of (OR STD-INSTANCE-P FSC-INSTANCE-P) are changed to
a temporary definition used for debugging the bootstrap
This is the value that we stick into a slot to tell us that it is
unbound. It may seem gross, but for performance reasons, we make
this an interned symbol. That means that the fast check to see
That is considerably faster than looking at the value of a special
variable. Be careful, there are places in the code which actually
modularity..
a new type of heap object, instead of using bare SIMPLE-VECTOR, in
order to avoid problems (in the debugger if nowhere else) with
SIMPLE-VECTORs some of whose elements are unbound tokens.
When given a funcallable instance, SET-FUN-NAME *must* side-effect
that FIN to give it the name. When given any other kind of
function SET-FUN-NAME is allowed to return a new function which is
"the same" except that it has the name.
In all cases, SET-FUN-NAME must return the new (or same)
function. (Unlike other functions to set stuff, it does not return
the new value.)
FIXME: probably no longer needed after init
This definition is for interpreted code.
We define this as STANDARD-INSTANCE, since we're going to
clobber the layout with some standard-instance layout as soon as
we make it, and we want the accessor to still be type-correct.
(defstruct (standard-instance
(:predicate nil)
(:constructor %%allocate-instance--class ())
(:copier nil)
(:alternate-metaclass instance
cl:standard-class
make-standard-class))
(slots nil))
Both of these operations "work" on structures, which allows the above
weakening of STD-INSTANCE-P.
FIXME: These functions are called every place we do a
CALL-NEXT-METHOD, and probably other places too. It's likely worth
selectively optimizing them with DEFTRANSFORMs and stuff, rather
than just indiscriminately expanding them inline everywhere.
this icky stuff.
FIXME: could be an inline function or ordinary function (like many
other things around here)
ANSI SXHASH wants us to make a good-faith effort to produce
hash-codes that are well distributed within the range of
the sbcl<=0.8.16 implementation of this operation as
Hopefully there was no virtue to the old counter implementation
structure-instance stuff
abstraction around our native structure representation doesn't
seem to add anything useful, and could probably go away.
The definition of STRUCTURE-TYPE-P was moved to early-low.lisp.
method function stuff.
is essentially a method function but with (a) a precomputed
continuation for CALL-NEXT-METHOD and (b) a permutation vector for
optimizations before commit. ] However, the presence of the
fast-function meant that we violated AMOP and the effect of the
:FUNCTION initarg, and furthermore got to potentially confusing
situations where the function and the fast-function got out of
sync, so that calling (method-function method) with the defined
protocol would do different things from (call-method method) in
method combination.
So we define this internal method function structure, which we use
when we create a method function ourselves. This means that we
can hang the various bits of information that we want off the
method function itself, and also that if a user overrides method
function creation there is no danger of having the system get
confused. | in this file * has * to be customized for a particular Common Lisp
current version , and it now runs only under SBCL . ( Now that ANSI Common
Lisp has mixed CLOS into the insides of the system ( e.g. error handling
Lisp without CLOS , the old functionality is of dubious use . -- WHN
19981108 )
This software is part of the SBCL system . See the README file for more
This software is derived from software originally released by Xerox
copyright information from original PCL sources :
Copyright ( c ) 1985 , 1986 , 1987 , 1988 , 1989 , 1990 Xerox Corporation .
derivative works must comply with all applicable United States export
This software is made available AS IS , and Xerox Corporation makes no
(in-package "SB-PCL")
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *optimize-speed*
'(optimize (speed 3) (safety 0)))
EVAL - WHEN
(defmacro dotimes-fixnum ((var count &optional (result nil)) &body body)
`(dotimes (,var (the fixnum ,count) ,result)
(declare (fixnum ,var))
,@body))
(declaim (inline random-fixnum))
(defun random-fixnum ()
(random (1+ most-positive-fixnum)))
(defconstant n-fixnum-bits #.(integer-length most-positive-fixnum))
(defmacro randomly-punting-lambda (lambda-list &body body)
(with-unique-names (drops drop-pos)
`(let ((,drops (random-fixnum))
(,drop-pos n-fixnum-bits))
(declare (fixnum ,drops)
(type (integer 0 #.n-fixnum-bits) ,drop-pos))
(lambda ,lambda-list
(when (logbitp (the unsigned-byte (decf ,drop-pos)) ,drops)
(locally ,@body))
(when (zerop ,drop-pos)
(setf ,drops (random-fixnum)
,drop-pos n-fixnum-bits))))))
early definition of WRAPPER
Most WRAPPER stuff is defined later , but the DEFSTRUCT itself
is here early so that things like ( TYPEP .. ' WRAPPER ) can be
Note that for SBCL , as for CMU CL , the WRAPPER of a built - in or
structure class will be some other kind of SB - KERNEL : LAYOUT , but
this should n't matter , since the only two slots that WRAPPER adds
(defstruct (wrapper
(:include layout
: In CMU CL , the initialization default
for LAYOUT - INVALID was NIL . In , that has
changed to : UNINITIALIZED , but PCL code might
default of WRAPPER - INVALID . Instead of trying
to find out , I just overrode the LAYOUT
default here . -- WHN 19991204
(invalid nil)
(for-std-class-p t))
(:constructor make-wrapper-internal)
(:copier nil))
(instance-slots-layout nil :type list)
(class-slots nil :type list))
#-sb-fluid (declaim (sb-ext:freeze-type wrapper))
PCL 's view of funcallable instances
(!defstruct-with-alternate-metaclass standard-funcallable-instance
: Note that neither of these slots is ever accessed by its
by puns based on absolute locations . Fun fun fun .. -- WHN 2001 - 10 - 30
:slot-names (clos-slots name hash-code)
:boa-constructor %make-standard-funcallable-instance
:superclass-name function
:metaclass-name standard-classoid
:metaclass-constructor make-standard-classoid
:dd-type funcallable-structure
( Except note above that these accessors are n't used at all
:runtime-type-checks-p nil)
(import 'sb-kernel:funcallable-instance-p)
(defun set-funcallable-instance-function (fin new-value)
(declare (type function new-value))
(aver (funcallable-instance-p fin))
(setf (funcallable-instance-fun fin) new-value))
! - WITH - ALTERNATE - METACLASS are as efficient as they could
(defun fsc-instance-p (fin)
(funcallable-instance-p fin))
(define-compiler-macro fsc-instance-p (fin)
`(funcallable-instance-p ,fin))
(defmacro fsc-instance-wrapper (fin)
`(%funcallable-instance-layout ,fin))
(defmacro fsc-instance-slots (fin)
`(%funcallable-instance-info ,fin 1))
(defmacro fsc-instance-hash (fin)
`(%funcallable-instance-info ,fin 3))
(declaim (inline clos-slots-ref (setf clos-slots-ref)))
(declaim (ftype (function (simple-vector index) t) clos-slots-ref))
(defun clos-slots-ref (slots index)
(svref slots index))
(declaim (ftype (function (t simple-vector index) t) (setf clos-slots-ref)))
(defun (setf clos-slots-ref) (new-value slots index)
(setf (svref slots index) new-value))
Note on implementation under CMU CL > = 17 and SBCL : STD - INSTANCE - P
PCL - INSTANCE - P.
(defun std-instance-p (x)
(%instancep x))
(define-compiler-macro std-instance-p (x)
`(%instancep ,x))
#+sb-show
(defun print-std-instance (instance stream depth)
(declare (ignore depth))
(print-unreadable-object (instance stream :type t :identity t)
(let ((class (class-of instance)))
(when (or (eq class (find-class 'standard-class nil))
(eq class (find-class 'funcallable-standard-class nil))
(eq class (find-class 'built-in-class nil)))
(princ (early-class-name instance) stream)))))
whether a slot is unbound is to say ( EQ < val > ' .. SLOT - UNBOUND .. ) .
use .. SLOT - UNBOUND .. rather than this variable . So much for
FIXME : Now that we 're tightly integrated into SBCL , we could use
the SBCL built - in unbound value token instead . Perhaps if we did
so it would be a good idea to define collections of CLOS slots as
(defconstant +slot-unbound+ '..slot-unbound..)
(defmacro %allocate-static-slot-storage--class (no-of-slots)
`(make-array ,no-of-slots :initial-element +slot-unbound+))
(defmacro std-instance-class (instance)
`(wrapper-class* (std-instance-wrapper ,instance)))
(defun set-fun-name (fun new-name)
#+sb-doc
"Set the name of a compiled function object. Return the function."
(when (valid-function-name-p fun)
(setq fun (fdefinition fun)))
(typecase fun
(%method-function (setf (%method-function-name fun) new-name))
#+sb-eval
(sb-eval:interpreted-function
(setf (sb-eval:interpreted-function-name fun) new-name))
: probably a generic function ...
(cond ((if (eq **boot-state** 'complete)
(typep fun 'generic-function)
(eq (class-of fun) *the-class-standard-generic-function*))
(setf (%funcallable-instance-info fun 2) new-name))
(t
(bug "unanticipated function type")))))
Fixup name - to - function mappings in cases where the function
has n't been defined by DEFUN . ( FIXME : is this right ? This logic
comes from CMUCL ) . -- CSR , 2004 - 12 - 31
(when (and (consp new-name)
(member (car new-name) '(slow-method fast-method slot-accessor)))
(setf (fdefinition new-name) fun))
fun)
(defmacro precompile-random-code-segments (&optional system)
`(progn
(eval-when (:compile-toplevel)
(update-dispatch-dfuns))
(precompile-function-generators ,system)
(precompile-dfun-constructors ,system)
(precompile-ctors)))
(defun pcl-instance-p (x)
(typep (layout-of x) 'wrapper))
CMU CL comment :
(!defstruct-with-alternate-metaclass standard-instance
:slot-names (slots hash-code)
:boa-constructor %make-standard-instance
:superclass-name t
:metaclass-name standard-classoid
:metaclass-constructor make-standard-classoid
:dd-type structure
:runtime-type-checks-p nil)
(defmacro std-instance-slots (x) `(%instance-ref ,x 1))
(defmacro std-instance-wrapper (x) `(%instance-layout ,x))
: This one does n't " work " on structures . However , we
ensure , in SXHASH and friends , never to call it on structures .
(defmacro std-instance-hash (x) `(%instance-ref ,x 2))
(declaim (inline get-slots get-slots-or-nil))
(declaim (ftype (function (t) simple-vector) get-slots))
(declaim (ftype (function (t) (or simple-vector null)) get-slots-or-nil))
(defun get-slots (instance)
(if (std-instance-p instance)
(std-instance-slots instance)
(fsc-instance-slots instance)))
(defun get-slots-or-nil (instance)
Suppress a code - deletion note . FIXME : doing the FIXME above ,
integrating PCL more with the compiler , would remove the need for
(declare (optimize (inhibit-warnings 3)))
(when (pcl-instance-p instance)
(get-slots instance)))
(defmacro get-wrapper (inst)
(once-only ((wrapper `(wrapper-of ,inst)))
`(progn
(aver (typep ,wrapper 'wrapper))
,wrapper)))
(defmacro get-instance-wrapper-or-nil (inst)
(once-only ((wrapper `(wrapper-of ,inst)))
`(if (typep ,wrapper 'wrapper)
,wrapper
nil)))
support for useful hashing of PCL instances
(defvar *instance-hash-code-random-state* (make-random-state))
(defun get-instance-hash-code ()
non - negative fixnums , and this operation does that , unlike
( INCF COUNTER ) .
that I am insufficiently insightful to insee . -- WHN 2004 - 10 - 28
(random most-positive-fixnum
*instance-hash-code-random-state*))
(defun sb-impl::sxhash-instance (x)
(cond
((std-instance-p x) (std-instance-hash x))
((fsc-instance-p x) (fsc-instance-hash x))
(t (bug "SXHASH-INSTANCE called on some weird thing: ~S" x))))
FIXME : Now that the code is SBCL - only , this extra layer of
(defun structure-type-slot-description-list (type)
(let* ((dd (find-defstruct-description type))
(include (dd-include dd))
(all-slots (dd-slots dd)))
(multiple-value-bind (super slot-overrides)
(if (consp include)
(values (car include) (mapcar #'car (cdr include)))
(values include nil))
(let ((included-slots
(when super
(dd-slots (find-defstruct-description super)))))
(loop for slot = (pop all-slots)
for included-slot = (pop included-slots)
while slot
when (or (not included-slot)
(member (dsd-name included-slot) slot-overrides :test #'eq))
collect slot)))))
(defun structure-slotd-name (slotd)
(dsd-name slotd))
(defun structure-slotd-accessor-symbol (slotd)
(dsd-accessor-name slotd))
(defun structure-slotd-reader-function (slotd)
(fdefinition (dsd-accessor-name slotd)))
(defun structure-slotd-writer-function (type slotd)
(if (dsd-read-only slotd)
(let ((dd (find-defstruct-description type)))
(coerce (slot-setter-lambda-form dd slotd) 'function))
(fdefinition `(setf ,(dsd-accessor-name slotd)))))
(defun structure-slotd-type (slotd)
(dsd-type slotd))
(defun structure-slotd-init-form (slotd)
(dsd-default slotd))
PCL historically included a so - called method - fast - function , which
slot access . [ FIXME : see if we can understand these two
(!defstruct-with-alternate-metaclass %method-function
:slot-names (fast-function name)
:boa-constructor %make-method-function
:superclass-name function
:metaclass-name static-classoid
:metaclass-constructor make-static-classoid
:dd-type funcallable-structure)
|
62d23f877b1d9dfb41796d1a7ec18db3968395ff8a2b8ede973da32183e6dddf | rowangithub/DOrder | nested.ml | let rec loopb i j n =
if j < n then
(assert (0<= j && j <=n && 0 <= i && i <= n);
loopb i (j+1) n)
else ()
let rec loopa i n =
if i < n then
(loopb i 0 n;
loopa (i+1) n)
else ()
let main n =
if n <= 0 then ()
else
loopa 0 n
let _ = main 1
let _ = main (-1) | null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/tests/folprograms/mcmc/nested.ml | ocaml | let rec loopb i j n =
if j < n then
(assert (0<= j && j <=n && 0 <= i && i <= n);
loopb i (j+1) n)
else ()
let rec loopa i n =
if i < n then
(loopb i 0 n;
loopa (i+1) n)
else ()
let main n =
if n <= 0 then ()
else
loopa 0 n
let _ = main 1
let _ = main (-1) |
|
318eb65f2ccb390b95b23c9b62fe3439dc71efc3e01fc9f367225bb1dede43a2 | montyly/gueb | gueb_type.ml | type addr = (int*int)
type call_id = int
type call_name = string
type call_string = ((addr) * call_name * call_id)
type call_stack = call_string list
type basic_block = (int * (int list))
type edge = (int * int)
let compare_call_string ((addr,it),name,id) ((addr2,it2),name2,id2) =
match Pervasives.compare addr addr2 with
| 0 -> begin match Pervasives.compare it it2 with
| 0 ->
begin
match Pervasives.compare name name2
with 0 -> Pervasives.compare id id2
| l -> l
end
| l -> l
end
| l -> l
let rec compare_call_stack s1 s2 =
match s1,s2 with
| [] , [] -> 0
| [] , _ -> 1
| _ , [] -> (-1)
| hd::tl, hd2::tl2 ->
match compare_call_string hd hd2 with
| 0 -> compare_call_stack tl tl2
| l -> l
let pp_call_string ((addr,it),f,_) = Printf.sprintf "0x%x:%d:%s" addr it f
let pp_call_stack cs =
String.concat " " (List.map (fun x -> pp_call_string x) cs)
| null | https://raw.githubusercontent.com/montyly/gueb/45f496a5a1e8e908e562928762ece304c2408c3a/src/gueb_type.ml | ocaml | type addr = (int*int)
type call_id = int
type call_name = string
type call_string = ((addr) * call_name * call_id)
type call_stack = call_string list
type basic_block = (int * (int list))
type edge = (int * int)
let compare_call_string ((addr,it),name,id) ((addr2,it2),name2,id2) =
match Pervasives.compare addr addr2 with
| 0 -> begin match Pervasives.compare it it2 with
| 0 ->
begin
match Pervasives.compare name name2
with 0 -> Pervasives.compare id id2
| l -> l
end
| l -> l
end
| l -> l
let rec compare_call_stack s1 s2 =
match s1,s2 with
| [] , [] -> 0
| [] , _ -> 1
| _ , [] -> (-1)
| hd::tl, hd2::tl2 ->
match compare_call_string hd hd2 with
| 0 -> compare_call_stack tl tl2
| l -> l
let pp_call_string ((addr,it),f,_) = Printf.sprintf "0x%x:%d:%s" addr it f
let pp_call_stack cs =
String.concat " " (List.map (fun x -> pp_call_string x) cs)
|
|
6e6c8100aada4e90004db1f35dbba9a2a44b5fd406917a521d17fb5faba800a9 | haskell/cabal | Compiler.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Compiler
Copyright : 2003 - 2004
-- License : BSD3
--
-- Maintainer :
-- Portability : portable
--
-- This should be a much more sophisticated abstraction than it is. Currently
-- it's just a bit of data about the compiler, like its flavour and name and
-- version. The reason it's just data is because currently it has to be in
-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The
-- only interesting bit of info it contains is a mapping between language
-- extensions and compiler command line flags. This module also defines a
' PackageDB ' type which is used to refer to package databases . Most compilers
only know about a single global package collection but GHC has a global and
-- per-user one and it lets you create arbitrary other package databases. We do
-- not yet fully support this latter feature.
module Distribution.Simple.Compiler (
* implementations
module Distribution.Compiler,
Compiler(..),
showCompilerId, showCompilerIdWithAbi,
compilerFlavor, compilerVersion,
compilerCompatFlavor,
compilerCompatVersion,
compilerInfo,
-- * Support for package databases
PackageDB(..),
PackageDBStack,
registrationPackageDB,
absolutePackageDBPaths,
absolutePackageDBPath,
-- * Support for optimisation levels
OptimisationLevel(..),
flagToOptimisationLevel,
-- * Support for debug info levels
DebugInfoLevel(..),
flagToDebugInfoLevel,
-- * Support for language extensions
CompilerFlag,
languageToFlags,
unsupportedLanguages,
extensionsToFlags,
unsupportedExtensions,
parmakeSupported,
reexportedModulesSupported,
renamingPackageFlagsSupported,
unifiedIPIDRequired,
packageKeySupported,
unitIdSupported,
coverageSupported,
profilingSupported,
backpackSupported,
arResponseFilesSupported,
arDashLSupported,
libraryDynDirSupported,
libraryVisibilitySupported,
-- * Support for profiling detail levels
ProfDetailLevel(..),
knownProfDetailLevels,
flagToProfDetailLevel,
showProfDetailLevel,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Pretty
import Distribution.Compiler
import Distribution.Version
import Language.Haskell.Extension
import Distribution.Simple.Utils
import qualified Data.Map as Map (lookup)
import System.Directory (canonicalizePath)
data Compiler = Compiler {
compilerId :: CompilerId,
-- ^ Compiler flavour and version.
compilerAbiTag :: AbiTag,
^ Tag for distinguishing incompatible ABI 's on the same
-- architecture/os.
compilerCompat :: [CompilerId],
-- ^ Other implementations that this compiler claims to be
-- compatible with.
compilerLanguages :: [(Language, CompilerFlag)],
-- ^ Supported language standards.
compilerExtensions :: [(Extension, Maybe CompilerFlag)],
-- ^ Supported extensions.
compilerProperties :: Map String String
-- ^ A key-value map for properties not covered by the above fields.
}
deriving (Eq, Generic, Typeable, Show, Read)
instance Binary Compiler
instance Structured Compiler
showCompilerId :: Compiler -> String
showCompilerId = prettyShow . compilerId
showCompilerIdWithAbi :: Compiler -> String
showCompilerIdWithAbi comp =
prettyShow (compilerId comp) ++
case compilerAbiTag comp of
NoAbiTag -> []
AbiTag xs -> '-':xs
compilerFlavor :: Compiler -> CompilerFlavor
compilerFlavor = (\(CompilerId f _) -> f) . compilerId
compilerVersion :: Compiler -> Version
compilerVersion = (\(CompilerId _ v) -> v) . compilerId
-- | Is this compiler compatible with the compiler flavour we're interested in?
--
For example this checks if the compiler is actually GHC or is another
compiler that claims to be compatible with some version of GHC , e.g. GHCJS .
--
> if compilerCompatFlavor GHC compiler then ... else ...
--
compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool
compilerCompatFlavor flavor comp =
flavor == compilerFlavor comp
|| flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ]
-- | Is this compiler compatible with the compiler flavour we're interested in,
-- and if so what version does it claim to be compatible with.
--
For example this checks if the compiler is actually GHC-7.x or is another
compiler that claims to be compatible with some GHC-7.x version .
--
> case compilerCompatVersion GHC compiler of
> Just ( Version ( 7 : _ ) ) - > ...
-- > _ -> ...
--
compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version
compilerCompatVersion flavor comp
| compilerFlavor comp == flavor = Just (compilerVersion comp)
| otherwise =
listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ]
compilerInfo :: Compiler -> CompilerInfo
compilerInfo c = CompilerInfo (compilerId c)
(compilerAbiTag c)
(Just . compilerCompat $ c)
(Just . map fst . compilerLanguages $ c)
(Just . map fst . compilerExtensions $ c)
-- ------------------------------------------------------------
-- * Package databases
-- ------------------------------------------------------------
-- |Some compilers have a notion of a database of available packages.
-- For some there is just one global db of packages, other compilers
-- support a per-user or an arbitrary db specified at some location in
-- the file system. This can be used to build isolated environments of
-- packages, for example to build a collection of related packages
-- without installing them globally.
--
data PackageDB = GlobalPackageDB
| UserPackageDB
| SpecificPackageDB FilePath
deriving (Eq, Generic, Ord, Show, Read, Typeable)
instance Binary PackageDB
instance Structured PackageDB
-- | We typically get packages from several databases, and stack them
-- together. This type lets us be explicit about that stacking. For example
-- typical stacks include:
--
-- > [GlobalPackageDB]
> [ GlobalPackageDB , UserPackageDB ]
> [ GlobalPackageDB , SpecificPackageDB " package.conf.inplace " ]
--
-- Note that the 'GlobalPackageDB' is invariably at the bottom since it
-- contains the rts, base and other special compiler-specific packages.
--
-- We are not restricted to using just the above combinations. In particular
-- we can use several custom package dbs and the user package db together.
--
-- When it comes to writing, the top most (last) package is used.
--
type PackageDBStack = [PackageDB]
-- | Return the package that we should register into. This is the package db at
-- the top of the stack.
--
registrationPackageDB :: PackageDBStack -> PackageDB
registrationPackageDB dbs = case safeLast dbs of
Nothing -> error "internal error: empty package db set"
Just p -> p
-- | Make package paths absolute
absolutePackageDBPaths :: PackageDBStack -> IO PackageDBStack
absolutePackageDBPaths = traverse absolutePackageDBPath
absolutePackageDBPath :: PackageDB -> IO PackageDB
absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB
absolutePackageDBPath UserPackageDB = return UserPackageDB
absolutePackageDBPath (SpecificPackageDB db) =
SpecificPackageDB `liftM` canonicalizePath db
-- ------------------------------------------------------------
-- * Optimisation levels
-- ------------------------------------------------------------
-- | Some compilers support optimising. Some have different levels.
-- For compilers that do not the level is just capped to the level
-- they do support.
--
data OptimisationLevel = NoOptimisation
| NormalOptimisation
| MaximumOptimisation
deriving (Bounded, Enum, Eq, Generic, Read, Show, Typeable)
instance Binary OptimisationLevel
instance Structured OptimisationLevel
flagToOptimisationLevel :: Maybe String -> OptimisationLevel
flagToOptimisationLevel Nothing = NormalOptimisation
flagToOptimisationLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: OptimisationLevel)
&& i <= fromEnum (maxBound :: OptimisationLevel)
-> toEnum i
| otherwise -> error $ "Bad optimisation level: " ++ show i
++ ". Valid values are 0..2"
_ -> error $ "Can't parse optimisation level " ++ s
-- ------------------------------------------------------------
-- * Debug info levels
-- ------------------------------------------------------------
-- | Some compilers support emitting debug info. Some have different
-- levels. For compilers that do not the level is just capped to the
-- level they do support.
--
data DebugInfoLevel = NoDebugInfo
| MinimalDebugInfo
| NormalDebugInfo
| MaximalDebugInfo
deriving (Bounded, Enum, Eq, Generic, Read, Show, Typeable)
instance Binary DebugInfoLevel
instance Structured DebugInfoLevel
flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel
flagToDebugInfoLevel Nothing = NormalDebugInfo
flagToDebugInfoLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: DebugInfoLevel)
&& i <= fromEnum (maxBound :: DebugInfoLevel)
-> toEnum i
| otherwise -> error $ "Bad debug info level: " ++ show i
++ ". Valid values are 0..3"
_ -> error $ "Can't parse debug info level " ++ s
-- ------------------------------------------------------------
-- * Languages and Extensions
-- ------------------------------------------------------------
unsupportedLanguages :: Compiler -> [Language] -> [Language]
unsupportedLanguages comp langs =
[ lang | lang <- langs
, isNothing (languageToFlag comp lang) ]
languageToFlags :: Compiler -> Maybe Language -> [CompilerFlag]
languageToFlags comp = filter (not . null)
. catMaybes . map (languageToFlag comp)
. maybe [Haskell98] (\x->[x])
languageToFlag :: Compiler -> Language -> Maybe CompilerFlag
languageToFlag comp ext = lookup ext (compilerLanguages comp)
-- |For the given compiler, return the extensions it does not support.
unsupportedExtensions :: Compiler -> [Extension] -> [Extension]
unsupportedExtensions comp exts =
[ ext | ext <- exts
, isNothing (extensionToFlag' comp ext) ]
type CompilerFlag = String
-- |For the given compiler, return the flags for the supported extensions.
extensionsToFlags :: Compiler -> [Extension] -> [CompilerFlag]
extensionsToFlags comp = nub . filter (not . null)
. catMaybes . map (extensionToFlag comp)
-- | Looks up the flag for a given extension, for a given compiler.
-- Ignores the subtlety of extensions which lack associated flags.
extensionToFlag :: Compiler -> Extension -> Maybe CompilerFlag
extensionToFlag comp ext = join (extensionToFlag' comp ext)
-- | Looks up the flag for a given extension, for a given compiler.
-- However, the extension may be valid for the compiler but not have a flag.
For example , NondecreasingIndentation is enabled by default on GHC 7.0.4 ,
-- hence it is considered a supported extension but not an accepted flag.
--
-- The outer layer of Maybe indicates whether the extensions is supported, while
-- the inner layer indicates whether it has a flag.
When building strings , it is often more convenient to use ' extensionToFlag ' ,
-- which ignores the difference.
extensionToFlag' :: Compiler -> Extension -> Maybe (Maybe CompilerFlag)
extensionToFlag' comp ext = lookup ext (compilerExtensions comp)
-- | Does this compiler support parallel --make mode?
parmakeSupported :: Compiler -> Bool
parmakeSupported = ghcSupported "Support parallel --make"
-- | Does this compiler support reexported-modules?
reexportedModulesSupported :: Compiler -> Bool
reexportedModulesSupported = ghcSupported "Support reexported-modules"
-- | Does this compiler support thinning/renaming on package flags?
renamingPackageFlagsSupported :: Compiler -> Bool
renamingPackageFlagsSupported = ghcSupported
"Support thinning and renaming package flags"
| Does this compiler have unified ( so no package keys )
unifiedIPIDRequired :: Compiler -> Bool
unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs"
-- | Does this compiler support package keys?
packageKeySupported :: Compiler -> Bool
packageKeySupported = ghcSupported "Uses package keys"
-- | Does this compiler support unit IDs?
unitIdSupported :: Compiler -> Bool
unitIdSupported = ghcSupported "Uses unit IDs"
| Does this compiler support Backpack ?
backpackSupported :: Compiler -> Bool
backpackSupported = ghcSupported "Support Backpack"
-- | Does this compiler support a package database entry with:
-- "dynamic-library-dirs"?
libraryDynDirSupported :: Compiler -> Bool
libraryDynDirSupported comp = case compilerFlavor comp of
GHC ->
-- Not just v >= mkVersion [8,0,1,20161022], as there
are many GHC 8.1 nightlies which do n't support this .
((v >= mkVersion [8,0,1,20161022] && v < mkVersion [8,1]) ||
v >= mkVersion [8,1,20161021])
_ -> False
where
v = compilerVersion comp
-- | Does this compiler's "ar" command supports response file
-- arguments (i.e. @file-style arguments).
arResponseFilesSupported :: Compiler -> Bool
arResponseFilesSupported = ghcSupported "ar supports at file"
| Does this compiler 's " ar " command support -L flag ,
-- which compels the archiver to add an input archive's members
-- rather than adding the archive itself.
arDashLSupported :: Compiler -> Bool
arDashLSupported = ghcSupported "ar supports -L"
| Does this compiler support Haskell program coverage ?
coverageSupported :: Compiler -> Bool
coverageSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
_ -> False
-- | Does this compiler support profiling?
profilingSupported :: Compiler -> Bool
profilingSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
_ -> False
-- | Does this compiler support a package database entry with:
-- "visibility"?
libraryVisibilitySupported :: Compiler -> Bool
libraryVisibilitySupported comp = case compilerFlavor comp of
GHC -> v >= mkVersion [8,8]
_ -> False
where
v = compilerVersion comp
| Utility function for GHC only features
ghcSupported :: String -> Compiler -> Bool
ghcSupported key comp =
case compilerFlavor comp of
GHC -> checkProp
GHCJS -> checkProp
_ -> False
where checkProp =
case Map.lookup key (compilerProperties comp) of
Just "YES" -> True
_ -> False
-- ------------------------------------------------------------
-- * Profiling detail level
-- ------------------------------------------------------------
| Some compilers ( notably GHC ) support profiling and can instrument
-- programs so the system can account costs to different functions. There are
-- different levels of detail that can be used for this accounting.
-- For compilers that do not support this notion or the particular detail
-- levels, this is either ignored or just capped to some similar level
-- they do support.
--
data ProfDetailLevel = ProfDetailNone
| ProfDetailDefault
| ProfDetailExportedFunctions
| ProfDetailToplevelFunctions
| ProfDetailAllFunctions
| ProfDetailTopLate
| ProfDetailOther String
deriving (Eq, Generic, Read, Show, Typeable)
instance Binary ProfDetailLevel
instance Structured ProfDetailLevel
flagToProfDetailLevel :: String -> ProfDetailLevel
flagToProfDetailLevel "" = ProfDetailDefault
flagToProfDetailLevel s =
case lookup (lowercase s)
[ (name, value)
| (primary, aliases, value) <- knownProfDetailLevels
, name <- primary : aliases ]
of Just value -> value
Nothing -> ProfDetailOther s
knownProfDetailLevels :: [(String, [String], ProfDetailLevel)]
knownProfDetailLevels =
[ ("default", [], ProfDetailDefault)
, ("none", [], ProfDetailNone)
, ("exported-functions", ["exported"], ProfDetailExportedFunctions)
, ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions)
, ("all-functions", ["all"], ProfDetailAllFunctions)
, ("late-toplevel", ["late"], ProfDetailTopLate)
]
showProfDetailLevel :: ProfDetailLevel -> String
showProfDetailLevel dl = case dl of
ProfDetailNone -> "none"
ProfDetailDefault -> "default"
ProfDetailExportedFunctions -> "exported-functions"
ProfDetailToplevelFunctions -> "toplevel-functions"
ProfDetailAllFunctions -> "all-functions"
ProfDetailTopLate -> "late-toplevel"
ProfDetailOther other -> other
| null | https://raw.githubusercontent.com/haskell/cabal/ed314bc2e8f7c96929ff362047b4b22a764e48cd/Cabal/src/Distribution/Simple/Compiler.hs | haskell | # LANGUAGE DeriveDataTypeable #
---------------------------------------------------------------------------
|
Module : Distribution.Simple.Compiler
License : BSD3
Maintainer :
Portability : portable
This should be a much more sophisticated abstraction than it is. Currently
it's just a bit of data about the compiler, like its flavour and name and
version. The reason it's just data is because currently it has to be in
'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The
only interesting bit of info it contains is a mapping between language
extensions and compiler command line flags. This module also defines a
per-user one and it lets you create arbitrary other package databases. We do
not yet fully support this latter feature.
* Support for package databases
* Support for optimisation levels
* Support for debug info levels
* Support for language extensions
* Support for profiling detail levels
^ Compiler flavour and version.
architecture/os.
^ Other implementations that this compiler claims to be
compatible with.
^ Supported language standards.
^ Supported extensions.
^ A key-value map for properties not covered by the above fields.
| Is this compiler compatible with the compiler flavour we're interested in?
| Is this compiler compatible with the compiler flavour we're interested in,
and if so what version does it claim to be compatible with.
> _ -> ...
------------------------------------------------------------
* Package databases
------------------------------------------------------------
|Some compilers have a notion of a database of available packages.
For some there is just one global db of packages, other compilers
support a per-user or an arbitrary db specified at some location in
the file system. This can be used to build isolated environments of
packages, for example to build a collection of related packages
without installing them globally.
| We typically get packages from several databases, and stack them
together. This type lets us be explicit about that stacking. For example
typical stacks include:
> [GlobalPackageDB]
Note that the 'GlobalPackageDB' is invariably at the bottom since it
contains the rts, base and other special compiler-specific packages.
We are not restricted to using just the above combinations. In particular
we can use several custom package dbs and the user package db together.
When it comes to writing, the top most (last) package is used.
| Return the package that we should register into. This is the package db at
the top of the stack.
| Make package paths absolute
------------------------------------------------------------
* Optimisation levels
------------------------------------------------------------
| Some compilers support optimising. Some have different levels.
For compilers that do not the level is just capped to the level
they do support.
------------------------------------------------------------
* Debug info levels
------------------------------------------------------------
| Some compilers support emitting debug info. Some have different
levels. For compilers that do not the level is just capped to the
level they do support.
------------------------------------------------------------
* Languages and Extensions
------------------------------------------------------------
|For the given compiler, return the extensions it does not support.
|For the given compiler, return the flags for the supported extensions.
| Looks up the flag for a given extension, for a given compiler.
Ignores the subtlety of extensions which lack associated flags.
| Looks up the flag for a given extension, for a given compiler.
However, the extension may be valid for the compiler but not have a flag.
hence it is considered a supported extension but not an accepted flag.
The outer layer of Maybe indicates whether the extensions is supported, while
the inner layer indicates whether it has a flag.
which ignores the difference.
| Does this compiler support parallel --make mode?
| Does this compiler support reexported-modules?
| Does this compiler support thinning/renaming on package flags?
| Does this compiler support package keys?
| Does this compiler support unit IDs?
| Does this compiler support a package database entry with:
"dynamic-library-dirs"?
Not just v >= mkVersion [8,0,1,20161022], as there
| Does this compiler's "ar" command supports response file
arguments (i.e. @file-style arguments).
which compels the archiver to add an input archive's members
rather than adding the archive itself.
| Does this compiler support profiling?
| Does this compiler support a package database entry with:
"visibility"?
------------------------------------------------------------
* Profiling detail level
------------------------------------------------------------
programs so the system can account costs to different functions. There are
different levels of detail that can be used for this accounting.
For compilers that do not support this notion or the particular detail
levels, this is either ignored or just capped to some similar level
they do support.
| # LANGUAGE DeriveGeneric #
Copyright : 2003 - 2004
' PackageDB ' type which is used to refer to package databases . Most compilers
only know about a single global package collection but GHC has a global and
module Distribution.Simple.Compiler (
* implementations
module Distribution.Compiler,
Compiler(..),
showCompilerId, showCompilerIdWithAbi,
compilerFlavor, compilerVersion,
compilerCompatFlavor,
compilerCompatVersion,
compilerInfo,
PackageDB(..),
PackageDBStack,
registrationPackageDB,
absolutePackageDBPaths,
absolutePackageDBPath,
OptimisationLevel(..),
flagToOptimisationLevel,
DebugInfoLevel(..),
flagToDebugInfoLevel,
CompilerFlag,
languageToFlags,
unsupportedLanguages,
extensionsToFlags,
unsupportedExtensions,
parmakeSupported,
reexportedModulesSupported,
renamingPackageFlagsSupported,
unifiedIPIDRequired,
packageKeySupported,
unitIdSupported,
coverageSupported,
profilingSupported,
backpackSupported,
arResponseFilesSupported,
arDashLSupported,
libraryDynDirSupported,
libraryVisibilitySupported,
ProfDetailLevel(..),
knownProfDetailLevels,
flagToProfDetailLevel,
showProfDetailLevel,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Pretty
import Distribution.Compiler
import Distribution.Version
import Language.Haskell.Extension
import Distribution.Simple.Utils
import qualified Data.Map as Map (lookup)
import System.Directory (canonicalizePath)
data Compiler = Compiler {
compilerId :: CompilerId,
compilerAbiTag :: AbiTag,
^ Tag for distinguishing incompatible ABI 's on the same
compilerCompat :: [CompilerId],
compilerLanguages :: [(Language, CompilerFlag)],
compilerExtensions :: [(Extension, Maybe CompilerFlag)],
compilerProperties :: Map String String
}
deriving (Eq, Generic, Typeable, Show, Read)
instance Binary Compiler
instance Structured Compiler
showCompilerId :: Compiler -> String
showCompilerId = prettyShow . compilerId
showCompilerIdWithAbi :: Compiler -> String
showCompilerIdWithAbi comp =
prettyShow (compilerId comp) ++
case compilerAbiTag comp of
NoAbiTag -> []
AbiTag xs -> '-':xs
compilerFlavor :: Compiler -> CompilerFlavor
compilerFlavor = (\(CompilerId f _) -> f) . compilerId
compilerVersion :: Compiler -> Version
compilerVersion = (\(CompilerId _ v) -> v) . compilerId
For example this checks if the compiler is actually GHC or is another
compiler that claims to be compatible with some version of GHC , e.g. GHCJS .
> if compilerCompatFlavor GHC compiler then ... else ...
compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool
compilerCompatFlavor flavor comp =
flavor == compilerFlavor comp
|| flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ]
For example this checks if the compiler is actually GHC-7.x or is another
compiler that claims to be compatible with some GHC-7.x version .
> case compilerCompatVersion GHC compiler of
> Just ( Version ( 7 : _ ) ) - > ...
compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version
compilerCompatVersion flavor comp
| compilerFlavor comp == flavor = Just (compilerVersion comp)
| otherwise =
listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ]
compilerInfo :: Compiler -> CompilerInfo
compilerInfo c = CompilerInfo (compilerId c)
(compilerAbiTag c)
(Just . compilerCompat $ c)
(Just . map fst . compilerLanguages $ c)
(Just . map fst . compilerExtensions $ c)
data PackageDB = GlobalPackageDB
| UserPackageDB
| SpecificPackageDB FilePath
deriving (Eq, Generic, Ord, Show, Read, Typeable)
instance Binary PackageDB
instance Structured PackageDB
> [ GlobalPackageDB , UserPackageDB ]
> [ GlobalPackageDB , SpecificPackageDB " package.conf.inplace " ]
type PackageDBStack = [PackageDB]
registrationPackageDB :: PackageDBStack -> PackageDB
registrationPackageDB dbs = case safeLast dbs of
Nothing -> error "internal error: empty package db set"
Just p -> p
absolutePackageDBPaths :: PackageDBStack -> IO PackageDBStack
absolutePackageDBPaths = traverse absolutePackageDBPath
absolutePackageDBPath :: PackageDB -> IO PackageDB
absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB
absolutePackageDBPath UserPackageDB = return UserPackageDB
absolutePackageDBPath (SpecificPackageDB db) =
SpecificPackageDB `liftM` canonicalizePath db
data OptimisationLevel = NoOptimisation
| NormalOptimisation
| MaximumOptimisation
deriving (Bounded, Enum, Eq, Generic, Read, Show, Typeable)
instance Binary OptimisationLevel
instance Structured OptimisationLevel
flagToOptimisationLevel :: Maybe String -> OptimisationLevel
flagToOptimisationLevel Nothing = NormalOptimisation
flagToOptimisationLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: OptimisationLevel)
&& i <= fromEnum (maxBound :: OptimisationLevel)
-> toEnum i
| otherwise -> error $ "Bad optimisation level: " ++ show i
++ ". Valid values are 0..2"
_ -> error $ "Can't parse optimisation level " ++ s
data DebugInfoLevel = NoDebugInfo
| MinimalDebugInfo
| NormalDebugInfo
| MaximalDebugInfo
deriving (Bounded, Enum, Eq, Generic, Read, Show, Typeable)
instance Binary DebugInfoLevel
instance Structured DebugInfoLevel
flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel
flagToDebugInfoLevel Nothing = NormalDebugInfo
flagToDebugInfoLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: DebugInfoLevel)
&& i <= fromEnum (maxBound :: DebugInfoLevel)
-> toEnum i
| otherwise -> error $ "Bad debug info level: " ++ show i
++ ". Valid values are 0..3"
_ -> error $ "Can't parse debug info level " ++ s
unsupportedLanguages :: Compiler -> [Language] -> [Language]
unsupportedLanguages comp langs =
[ lang | lang <- langs
, isNothing (languageToFlag comp lang) ]
languageToFlags :: Compiler -> Maybe Language -> [CompilerFlag]
languageToFlags comp = filter (not . null)
. catMaybes . map (languageToFlag comp)
. maybe [Haskell98] (\x->[x])
languageToFlag :: Compiler -> Language -> Maybe CompilerFlag
languageToFlag comp ext = lookup ext (compilerLanguages comp)
unsupportedExtensions :: Compiler -> [Extension] -> [Extension]
unsupportedExtensions comp exts =
[ ext | ext <- exts
, isNothing (extensionToFlag' comp ext) ]
type CompilerFlag = String
extensionsToFlags :: Compiler -> [Extension] -> [CompilerFlag]
extensionsToFlags comp = nub . filter (not . null)
. catMaybes . map (extensionToFlag comp)
extensionToFlag :: Compiler -> Extension -> Maybe CompilerFlag
extensionToFlag comp ext = join (extensionToFlag' comp ext)
For example , NondecreasingIndentation is enabled by default on GHC 7.0.4 ,
When building strings , it is often more convenient to use ' extensionToFlag ' ,
extensionToFlag' :: Compiler -> Extension -> Maybe (Maybe CompilerFlag)
extensionToFlag' comp ext = lookup ext (compilerExtensions comp)
parmakeSupported :: Compiler -> Bool
parmakeSupported = ghcSupported "Support parallel --make"
reexportedModulesSupported :: Compiler -> Bool
reexportedModulesSupported = ghcSupported "Support reexported-modules"
renamingPackageFlagsSupported :: Compiler -> Bool
renamingPackageFlagsSupported = ghcSupported
"Support thinning and renaming package flags"
| Does this compiler have unified ( so no package keys )
unifiedIPIDRequired :: Compiler -> Bool
unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs"
packageKeySupported :: Compiler -> Bool
packageKeySupported = ghcSupported "Uses package keys"
unitIdSupported :: Compiler -> Bool
unitIdSupported = ghcSupported "Uses unit IDs"
| Does this compiler support Backpack ?
backpackSupported :: Compiler -> Bool
backpackSupported = ghcSupported "Support Backpack"
libraryDynDirSupported :: Compiler -> Bool
libraryDynDirSupported comp = case compilerFlavor comp of
GHC ->
are many GHC 8.1 nightlies which do n't support this .
((v >= mkVersion [8,0,1,20161022] && v < mkVersion [8,1]) ||
v >= mkVersion [8,1,20161021])
_ -> False
where
v = compilerVersion comp
arResponseFilesSupported :: Compiler -> Bool
arResponseFilesSupported = ghcSupported "ar supports at file"
| Does this compiler 's " ar " command support -L flag ,
arDashLSupported :: Compiler -> Bool
arDashLSupported = ghcSupported "ar supports -L"
| Does this compiler support Haskell program coverage ?
coverageSupported :: Compiler -> Bool
coverageSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
_ -> False
profilingSupported :: Compiler -> Bool
profilingSupported comp =
case compilerFlavor comp of
GHC -> True
GHCJS -> True
_ -> False
libraryVisibilitySupported :: Compiler -> Bool
libraryVisibilitySupported comp = case compilerFlavor comp of
GHC -> v >= mkVersion [8,8]
_ -> False
where
v = compilerVersion comp
| Utility function for GHC only features
ghcSupported :: String -> Compiler -> Bool
ghcSupported key comp =
case compilerFlavor comp of
GHC -> checkProp
GHCJS -> checkProp
_ -> False
where checkProp =
case Map.lookup key (compilerProperties comp) of
Just "YES" -> True
_ -> False
| Some compilers ( notably GHC ) support profiling and can instrument
data ProfDetailLevel = ProfDetailNone
| ProfDetailDefault
| ProfDetailExportedFunctions
| ProfDetailToplevelFunctions
| ProfDetailAllFunctions
| ProfDetailTopLate
| ProfDetailOther String
deriving (Eq, Generic, Read, Show, Typeable)
instance Binary ProfDetailLevel
instance Structured ProfDetailLevel
flagToProfDetailLevel :: String -> ProfDetailLevel
flagToProfDetailLevel "" = ProfDetailDefault
flagToProfDetailLevel s =
case lookup (lowercase s)
[ (name, value)
| (primary, aliases, value) <- knownProfDetailLevels
, name <- primary : aliases ]
of Just value -> value
Nothing -> ProfDetailOther s
knownProfDetailLevels :: [(String, [String], ProfDetailLevel)]
knownProfDetailLevels =
[ ("default", [], ProfDetailDefault)
, ("none", [], ProfDetailNone)
, ("exported-functions", ["exported"], ProfDetailExportedFunctions)
, ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions)
, ("all-functions", ["all"], ProfDetailAllFunctions)
, ("late-toplevel", ["late"], ProfDetailTopLate)
]
showProfDetailLevel :: ProfDetailLevel -> String
showProfDetailLevel dl = case dl of
ProfDetailNone -> "none"
ProfDetailDefault -> "default"
ProfDetailExportedFunctions -> "exported-functions"
ProfDetailToplevelFunctions -> "toplevel-functions"
ProfDetailAllFunctions -> "all-functions"
ProfDetailTopLate -> "late-toplevel"
ProfDetailOther other -> other
|
0c62e4ccc99c51d8cd7b57459f5362a100b5fcf3bd0a53c438203ad59ebcb169 | eeng/shevek | topn.clj | (ns shevek.engine.druid-native.solver.topn
(:require [shevek.driver.druid :refer [send-query]]
[shevek.engine.druid-native.solver.common :refer [dimension-spec dimension-order add-common-fields]]
[shevek.engine.utils :refer [time-zone defaultLimit]]
[shevek.domain.dimension :refer [sort-by-same?]]))
(defn- generate-metric-field [{:keys [sort-by] :as dim} measures]
(let [descending (or (nil? (:descending sort-by)) (:descending sort-by))
field (if (sort-by-same? dim)
{:type "dimension" :ordering (dimension-order sort-by)}
{:type "numeric" :metric (or (:name sort-by) (-> measures first :name))})]
(if (or (and (sort-by-same? dim) (not descending))
(and (not (sort-by-same? dim)) descending))
field
{:type "inverted" :metric field})))
(defn to-druid-query [{:keys [cube dimension measures filters] :as q}]
(-> {:queryType "topN"
:dataSource cube
:dimension (dimension-spec dimension q)
:metric (generate-metric-field dimension measures)
:threshold (dimension :limit (or defaultLimit))
:granularity "all"}
(add-common-fields q)))
(defn from-druid-results [results]
(-> results first :result))
(defn topn-query [driver q]
(->> (to-druid-query q)
(send-query driver)
(from-druid-results)))
| null | https://raw.githubusercontent.com/eeng/shevek/7783b8037303b8dd5f320f35edee3bfbb2b41c02/src/clj/shevek/engine/druid_native/solver/topn.clj | clojure | (ns shevek.engine.druid-native.solver.topn
(:require [shevek.driver.druid :refer [send-query]]
[shevek.engine.druid-native.solver.common :refer [dimension-spec dimension-order add-common-fields]]
[shevek.engine.utils :refer [time-zone defaultLimit]]
[shevek.domain.dimension :refer [sort-by-same?]]))
(defn- generate-metric-field [{:keys [sort-by] :as dim} measures]
(let [descending (or (nil? (:descending sort-by)) (:descending sort-by))
field (if (sort-by-same? dim)
{:type "dimension" :ordering (dimension-order sort-by)}
{:type "numeric" :metric (or (:name sort-by) (-> measures first :name))})]
(if (or (and (sort-by-same? dim) (not descending))
(and (not (sort-by-same? dim)) descending))
field
{:type "inverted" :metric field})))
(defn to-druid-query [{:keys [cube dimension measures filters] :as q}]
(-> {:queryType "topN"
:dataSource cube
:dimension (dimension-spec dimension q)
:metric (generate-metric-field dimension measures)
:threshold (dimension :limit (or defaultLimit))
:granularity "all"}
(add-common-fields q)))
(defn from-druid-results [results]
(-> results first :result))
(defn topn-query [driver q]
(->> (to-druid-query q)
(send-query driver)
(from-druid-results)))
|
|
9bcbff1d5d7e406e501fcdab7f63184b666891265b93ea4439f63f85fe3fd761 | rill-event-sourcing/rill | generic_test_base.clj | (ns rill.event-store.generic-test-base
(:require [rill.event-store :as store]
[rill.event-stream :as stream]
[rill.temp-store :refer [comparable-message messages=]]
[rill.event-channel :refer [event-channel]]
[clojure.core.async :as async]
[rill.uuid :refer [new-id]]
[clojure.test :refer [is testing]]
[rill.message :as message :refer [defevent]]))
(defevent TestEvent :v s/Int)
(def events (map test-event (range 7)))
(def other-events (map test-event (range 3)))
(defn basic-examples [store]
(is (= (store/retrieve-events store "foo") stream/empty-stream)
"retrieving a non-existing stream returns the empty stream")
(is (store/append-events store "my-stream" stream/empty-stream-version (take 3 events))
"can push onto a non-existing stream using the empty stream")
(is (not (store/append-events store "my-stream" stream/empty-stream-version (drop 3 events)))
"needs the current stream to add events to an existing stream")
(let [s (store/retrieve-events store "my-stream")]
(is (messages= (take 3 events)
s)
"returns successfully appended events in chronological order")
(is (store/append-events store "my-stream" (+ stream/empty-stream-version (count s)) (drop 3 events)))
(is (messages= events
(store/retrieve-events store "my-stream")))
(is (every? (fn [e]
(= "my-stream" (:rill.message/stream-id e)))
(store/retrieve-events store stream/all-events-stream-id))))
(let [s (store/retrieve-events store "my-other-stream")]
(testing "event store handles each stream independently"
(is (= stream/empty-stream s))
(is (store/append-events store "my-other-stream" stream/empty-stream-version other-events))
(is (messages= other-events
(store/retrieve-events store "my-other-stream")))
(is (messages= events
(store/retrieve-events store "my-stream")))))
(is (= (range (count events))
(map message/number (store/retrieve-events store "my-stream"))
(map message/cursor (store/retrieve-events store "my-stream")))
"incremental message numbers from 0 ...")
(let [e (nth (store/retrieve-events store "my-stream") 3)]
(is (messages= (drop 4 events)
(store/retrieve-events-since store "my-stream" e 0))))
(testing "any-version"
(let [old-events (store/retrieve-events store "my-stream")
new-events (map test-event (range 1000 1005))]
(is (store/append-events store "my-stream" stream/any-stream-version new-events))
(is (messages= (concat old-events new-events)
(store/retrieve-events store "my-stream"))))))
(defonce big-blob (repeat 1000 "BLOB"))
(defn sequential-appends [store]
(testing "sequential appends"
(let [stream-ids (repeatedly 4 new-id)
events (map test-event (range 100))]
(mapv (fn [id es]
(is (store/append-events store id -1 es)))
stream-ids (partition-all 25 events))
(is (= (map :v events)
(map :v (store/retrieve-events store stream/all-events-stream-id)))))))
(defn concurrent-small-appends
[store]
(testing "many concurrent small events"
(let [num-streams 20
events-per-stream 1000
total-events (* num-streams events-per-stream)
stream-ids (map #(str "stream-" %) (range num-streams))
insert-chans (map-indexed (fn [stream-num stream-id]
(async/thread
(dotimes [i events-per-stream]
(is (store/append-events store stream-id
(if (even? stream-num) (dec i) stream/any-stream-version)
[(test-event [stream-id i])])))))
stream-ids)
listener-chan (event-channel store stream/all-events-stream-id -1 0)
_ (println "Sending" (* num-streams events-per-stream) "events...")
update-counts (fn [counts e]
(if (vector? (:v e))
(-> counts
(update-in [(first (:v e))] inc)
(assoc (str "last-seen-" (first (:v e))) (second (:v e)))
(update-in [:total] inc))
counts))
update-previous (fn [prev {[stream-id num :as v] :v}]
(if (vector? v)
(do (is (= {:stream stream-id :num (dec num)}
{:stream stream-id :num (prev stream-id)})
"consecutive events in source stream")
(assoc prev stream-id num))
prev))
[out previous] (loop [channels (vec (cons listener-chan insert-chans))
counts (into {:total 0} (map #(vector % 0) stream-ids))
previous (into {} (map #(vector % -1) stream-ids))]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (update-counts counts e) (update-previous previous e))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
[counts previous]
(recur new-chans counts previous))))))
_ (println "Inserted all events, waiting for last" (- total-events (:total out)) "events")
out (loop [channels [(async/timeout (* 60 1000)) listener-chan]
counts out
previous previous]
(let [[e c] (async/alts!! channels)]
(if e
(let [new-counts (update-counts counts e)]
(if (= (:total new-counts) total-events)
new-counts
(recur channels new-counts (update-previous previous e))))
counts)))]
(is (= (into {:total (* events-per-stream num-streams)}
(mapcat #(vector [% events-per-stream]
[(str "last-seen-" %) (dec events-per-stream)])
stream-ids))
out)))))
(defn chunked-appends
[store]
(testing "many concurrent small events in chunks"
(let [num-streams 20
events-per-stream 1000
total-events (* num-streams events-per-stream)
events-per-chunk 8
stream-ids (map #(str "stream-" %) (range num-streams))
insert-chans (map-indexed (fn [stream-num stream-id]
(async/thread
(dotimes [i (/ events-per-stream events-per-chunk)]
(is (store/append-events store stream-id
(if (even? stream-num) (dec (* i events-per-chunk)) stream/any-stream-version)
(mapv (fn [chunk-i]
(test-event [stream-id (+ (* events-per-chunk i) chunk-i)]))
(range events-per-chunk)))))))
stream-ids)
listener-chan (event-channel store stream/all-events-stream-id -1 0)
_ (println "Sending" (* num-streams events-per-stream) "events...")
update-counts (fn [counts e]
(if (vector? (:v e))
(-> counts
(update-in [(first (:v e))] inc)
(assoc (str "last-seen-" (first (:v e))) (second (:v e)))
(update-in [:total] inc))
counts))
update-previous (fn [prev {[stream-id num :as v] :v}]
(if (vector? v)
(do (is (= {:stream stream-id :num (dec num)}
{:stream stream-id :num (prev stream-id)})
"consecutive events in source stream")
(assoc prev stream-id num))
prev))
[out previous] (loop [channels (vec (cons listener-chan insert-chans))
counts (into {:total 0} (map #(vector % 0) stream-ids))
previous (into {} (map #(vector % -1) stream-ids))]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (update-counts counts e) (update-previous previous e))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
[counts previous]
(recur new-chans counts previous))))))
_ (println "Inserted all events, waiting for last" (- total-events (:total out)) "events")
out (loop [channels [(async/timeout (* 30 1000)) listener-chan]
counts out
previous previous]
(let [[e c] (async/alts!! channels)]
(if e
(let [new-counts (update-counts counts e)]
(if (= (:total new-counts) total-events)
new-counts
(recur channels new-counts (update-previous previous e))))
counts)))]
(is (= out (into {:total (* events-per-stream num-streams)}
(mapcat #(vector [% events-per-stream]
[(str "last-seen-" %) (dec events-per-stream)])
stream-ids)))))))
(defn concurrent-mix
[store]
(testing "concurrent mix of large and small events"
(let [big-id (new-id)
small-id (new-id)
big-chan (async/thread
(dorun (map-indexed (fn [i e]
(store/append-events store big-id (dec i) [e]))
(map (fn [i]
(test-event {:big i
:blob big-blob})) (range 100))))
(Thread/sleep 5000))
small-chan (async/thread
(dorun (map-indexed (fn [i e]
(store/append-events store small-id (dec i) [e]))
(map (fn [i]
(test-event {:small i}))
(range 1000))))
(Thread/sleep 5000))
listener-chan (event-channel store stream/all-events-stream-id -1 0)
out (loop [channels [big-chan small-chan listener-chan]
counts {:big 0
:small 0}]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (cond (:big (:v e))
(update-in counts [:big] inc)
(:small (:v e))
(update-in counts [:small] inc)
:else
counts))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
counts
(recur new-chans counts))))))]
(is (= out {:big 100
:small 1000})))))
(defn test-store [create-store-fn]
(doseq [t [basic-examples sequential-appends concurrent-small-appends chunked-appends concurrent-mix]]
(t (create-store-fn))))
| null | https://raw.githubusercontent.com/rill-event-sourcing/rill/711d08e52bd331cdc2255199069b2d0aca69e8b0/test/rill/event_store/generic_test_base.clj | clojure | (ns rill.event-store.generic-test-base
(:require [rill.event-store :as store]
[rill.event-stream :as stream]
[rill.temp-store :refer [comparable-message messages=]]
[rill.event-channel :refer [event-channel]]
[clojure.core.async :as async]
[rill.uuid :refer [new-id]]
[clojure.test :refer [is testing]]
[rill.message :as message :refer [defevent]]))
(defevent TestEvent :v s/Int)
(def events (map test-event (range 7)))
(def other-events (map test-event (range 3)))
(defn basic-examples [store]
(is (= (store/retrieve-events store "foo") stream/empty-stream)
"retrieving a non-existing stream returns the empty stream")
(is (store/append-events store "my-stream" stream/empty-stream-version (take 3 events))
"can push onto a non-existing stream using the empty stream")
(is (not (store/append-events store "my-stream" stream/empty-stream-version (drop 3 events)))
"needs the current stream to add events to an existing stream")
(let [s (store/retrieve-events store "my-stream")]
(is (messages= (take 3 events)
s)
"returns successfully appended events in chronological order")
(is (store/append-events store "my-stream" (+ stream/empty-stream-version (count s)) (drop 3 events)))
(is (messages= events
(store/retrieve-events store "my-stream")))
(is (every? (fn [e]
(= "my-stream" (:rill.message/stream-id e)))
(store/retrieve-events store stream/all-events-stream-id))))
(let [s (store/retrieve-events store "my-other-stream")]
(testing "event store handles each stream independently"
(is (= stream/empty-stream s))
(is (store/append-events store "my-other-stream" stream/empty-stream-version other-events))
(is (messages= other-events
(store/retrieve-events store "my-other-stream")))
(is (messages= events
(store/retrieve-events store "my-stream")))))
(is (= (range (count events))
(map message/number (store/retrieve-events store "my-stream"))
(map message/cursor (store/retrieve-events store "my-stream")))
"incremental message numbers from 0 ...")
(let [e (nth (store/retrieve-events store "my-stream") 3)]
(is (messages= (drop 4 events)
(store/retrieve-events-since store "my-stream" e 0))))
(testing "any-version"
(let [old-events (store/retrieve-events store "my-stream")
new-events (map test-event (range 1000 1005))]
(is (store/append-events store "my-stream" stream/any-stream-version new-events))
(is (messages= (concat old-events new-events)
(store/retrieve-events store "my-stream"))))))
(defonce big-blob (repeat 1000 "BLOB"))
(defn sequential-appends [store]
(testing "sequential appends"
(let [stream-ids (repeatedly 4 new-id)
events (map test-event (range 100))]
(mapv (fn [id es]
(is (store/append-events store id -1 es)))
stream-ids (partition-all 25 events))
(is (= (map :v events)
(map :v (store/retrieve-events store stream/all-events-stream-id)))))))
(defn concurrent-small-appends
[store]
(testing "many concurrent small events"
(let [num-streams 20
events-per-stream 1000
total-events (* num-streams events-per-stream)
stream-ids (map #(str "stream-" %) (range num-streams))
insert-chans (map-indexed (fn [stream-num stream-id]
(async/thread
(dotimes [i events-per-stream]
(is (store/append-events store stream-id
(if (even? stream-num) (dec i) stream/any-stream-version)
[(test-event [stream-id i])])))))
stream-ids)
listener-chan (event-channel store stream/all-events-stream-id -1 0)
_ (println "Sending" (* num-streams events-per-stream) "events...")
update-counts (fn [counts e]
(if (vector? (:v e))
(-> counts
(update-in [(first (:v e))] inc)
(assoc (str "last-seen-" (first (:v e))) (second (:v e)))
(update-in [:total] inc))
counts))
update-previous (fn [prev {[stream-id num :as v] :v}]
(if (vector? v)
(do (is (= {:stream stream-id :num (dec num)}
{:stream stream-id :num (prev stream-id)})
"consecutive events in source stream")
(assoc prev stream-id num))
prev))
[out previous] (loop [channels (vec (cons listener-chan insert-chans))
counts (into {:total 0} (map #(vector % 0) stream-ids))
previous (into {} (map #(vector % -1) stream-ids))]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (update-counts counts e) (update-previous previous e))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
[counts previous]
(recur new-chans counts previous))))))
_ (println "Inserted all events, waiting for last" (- total-events (:total out)) "events")
out (loop [channels [(async/timeout (* 60 1000)) listener-chan]
counts out
previous previous]
(let [[e c] (async/alts!! channels)]
(if e
(let [new-counts (update-counts counts e)]
(if (= (:total new-counts) total-events)
new-counts
(recur channels new-counts (update-previous previous e))))
counts)))]
(is (= (into {:total (* events-per-stream num-streams)}
(mapcat #(vector [% events-per-stream]
[(str "last-seen-" %) (dec events-per-stream)])
stream-ids))
out)))))
(defn chunked-appends
[store]
(testing "many concurrent small events in chunks"
(let [num-streams 20
events-per-stream 1000
total-events (* num-streams events-per-stream)
events-per-chunk 8
stream-ids (map #(str "stream-" %) (range num-streams))
insert-chans (map-indexed (fn [stream-num stream-id]
(async/thread
(dotimes [i (/ events-per-stream events-per-chunk)]
(is (store/append-events store stream-id
(if (even? stream-num) (dec (* i events-per-chunk)) stream/any-stream-version)
(mapv (fn [chunk-i]
(test-event [stream-id (+ (* events-per-chunk i) chunk-i)]))
(range events-per-chunk)))))))
stream-ids)
listener-chan (event-channel store stream/all-events-stream-id -1 0)
_ (println "Sending" (* num-streams events-per-stream) "events...")
update-counts (fn [counts e]
(if (vector? (:v e))
(-> counts
(update-in [(first (:v e))] inc)
(assoc (str "last-seen-" (first (:v e))) (second (:v e)))
(update-in [:total] inc))
counts))
update-previous (fn [prev {[stream-id num :as v] :v}]
(if (vector? v)
(do (is (= {:stream stream-id :num (dec num)}
{:stream stream-id :num (prev stream-id)})
"consecutive events in source stream")
(assoc prev stream-id num))
prev))
[out previous] (loop [channels (vec (cons listener-chan insert-chans))
counts (into {:total 0} (map #(vector % 0) stream-ids))
previous (into {} (map #(vector % -1) stream-ids))]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (update-counts counts e) (update-previous previous e))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
[counts previous]
(recur new-chans counts previous))))))
_ (println "Inserted all events, waiting for last" (- total-events (:total out)) "events")
out (loop [channels [(async/timeout (* 30 1000)) listener-chan]
counts out
previous previous]
(let [[e c] (async/alts!! channels)]
(if e
(let [new-counts (update-counts counts e)]
(if (= (:total new-counts) total-events)
new-counts
(recur channels new-counts (update-previous previous e))))
counts)))]
(is (= out (into {:total (* events-per-stream num-streams)}
(mapcat #(vector [% events-per-stream]
[(str "last-seen-" %) (dec events-per-stream)])
stream-ids)))))))
(defn concurrent-mix
[store]
(testing "concurrent mix of large and small events"
(let [big-id (new-id)
small-id (new-id)
big-chan (async/thread
(dorun (map-indexed (fn [i e]
(store/append-events store big-id (dec i) [e]))
(map (fn [i]
(test-event {:big i
:blob big-blob})) (range 100))))
(Thread/sleep 5000))
small-chan (async/thread
(dorun (map-indexed (fn [i e]
(store/append-events store small-id (dec i) [e]))
(map (fn [i]
(test-event {:small i}))
(range 1000))))
(Thread/sleep 5000))
listener-chan (event-channel store stream/all-events-stream-id -1 0)
out (loop [channels [big-chan small-chan listener-chan]
counts {:big 0
:small 0}]
(let [[e c] (async/alts!! channels)]
(if e
(recur channels (cond (:big (:v e))
(update-in counts [:big] inc)
(:small (:v e))
(update-in counts [:small] inc)
:else
counts))
(let [new-chans (vec (remove #(= c %) channels))]
(if (= 1 (count new-chans))
counts
(recur new-chans counts))))))]
(is (= out {:big 100
:small 1000})))))
(defn test-store [create-store-fn]
(doseq [t [basic-examples sequential-appends concurrent-small-appends chunked-appends concurrent-mix]]
(t (create-store-fn))))
|
|
9b02ca45e8ccb39d93c780749b9d6fe86b0e82e6a3fc900209a0614ada015e52 | freizl/dive-into-haskell | Example.hs | module Example where
import Lib
xs = [2..]
name = "hello haskell"
main = do
print (head xs)
print (foo 8 9)
| null | https://raw.githubusercontent.com/freizl/dive-into-haskell/b18a6bfe212db6c3a5d707b4a640170b8bcf9330/codes/stg-to-js/src/Example.hs | haskell | module Example where
import Lib
xs = [2..]
name = "hello haskell"
main = do
print (head xs)
print (foo 8 9)
|
|
4bdf69c6bc9898e6f4a83668752d02c2bbc94e72eeca4df193991530d22c1070 | bobzhang/fan | epN.mli |
(** Ast Utilities for [Astfn.ep] *)
open Astfn
val tuple_of_number : ep -> int -> ep
val of_vstr_number : string -> int -> ep
(** used by [Derive.exp_of_ctyp] to generate patterns *)
val gen_tuple_n :
?cons_transform:(string -> string) -> arity:int -> string -> int -> ep
val mk_record : ?arity:int -> Ctyp.col list -> ep
val mk_tuple : arity:int -> number:int -> ep
*
A very naive lifting . It does not do any parsing at all
It is applied to both exp and pat
{ [
of_str " ` A " ;
Vrn " A " || Vrn " A "
of_str " A " ;
ExId ( Uid " A " )
of_str " abs " ;
ExId ( Lid " abs " )
of_str " & & " ;
ExId ( Lid " & & " )
] }
A very naive lifting. It does not do any parsing at all
It is applied to both exp and pat
{[
of_str "`A";
Vrn "A" || Vrn "A"
of_str "A";
ExId (Uid "A")
of_str "abs";
ExId (Lid "abs")
of_str "&&";
ExId (Lid "&&")
]}
*)
val of_str: string -> ep
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/main/epN.mli | ocaml | * Ast Utilities for [Astfn.ep]
* used by [Derive.exp_of_ctyp] to generate patterns |
open Astfn
val tuple_of_number : ep -> int -> ep
val of_vstr_number : string -> int -> ep
val gen_tuple_n :
?cons_transform:(string -> string) -> arity:int -> string -> int -> ep
val mk_record : ?arity:int -> Ctyp.col list -> ep
val mk_tuple : arity:int -> number:int -> ep
*
A very naive lifting . It does not do any parsing at all
It is applied to both exp and pat
{ [
of_str " ` A " ;
Vrn " A " || Vrn " A "
of_str " A " ;
ExId ( Uid " A " )
of_str " abs " ;
ExId ( Lid " abs " )
of_str " & & " ;
ExId ( Lid " & & " )
] }
A very naive lifting. It does not do any parsing at all
It is applied to both exp and pat
{[
of_str "`A";
Vrn "A" || Vrn "A"
of_str "A";
ExId (Uid "A")
of_str "abs";
ExId (Lid "abs")
of_str "&&";
ExId (Lid "&&")
]}
*)
val of_str: string -> ep
|
ab717365ca8c676fc84107e8a47d56bc0fadbaf4569fc7f5ccb47fc4472d891e | juxt/clip | edn_test.clj | (ns juxt.clip.edn-test
(:require
[clojure.test :refer [deftest is are]]
[juxt.clip.edn :as clip.edn]))
(defn foo [])
(deftest analyze-test
(is (= {:components {}}
(clip.edn/analyze
{:components {}})))
(is (= {:components {}
:executor #'foo}
(clip.edn/analyze
{:components {}
:executor `foo}))))
| null | https://raw.githubusercontent.com/juxt/clip/c40c288f2de9295af34fc1e352687ac64ad986a7/test/juxt/clip/edn_test.clj | clojure | (ns juxt.clip.edn-test
(:require
[clojure.test :refer [deftest is are]]
[juxt.clip.edn :as clip.edn]))
(defn foo [])
(deftest analyze-test
(is (= {:components {}}
(clip.edn/analyze
{:components {}})))
(is (= {:components {}
:executor #'foo}
(clip.edn/analyze
{:components {}
:executor `foo}))))
|
|
0403f0528d200ddaa74cafc83d42562c638e0b8e77302a737f25273d21131b71 | haskell/haskell-language-server | DestructAllMany.hs | data ABC = A | B | C
many :: () -> Either a b -> Bool -> Maybe ABC -> ABC -> ()
many u e b mabc abc = _
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/f3ad27ba1634871b2240b8cd7de9f31b91a2e502/plugins/hls-tactics-plugin/new/test/golden/DestructAllMany.hs | haskell | data ABC = A | B | C
many :: () -> Either a b -> Bool -> Maybe ABC -> ABC -> ()
many u e b mabc abc = _
|
|
db0ec036d22d4efaf4d92522bff0505426a59cd51b6b2b24e429291f8f0ea552 | benedekfazekas/mranderson | move_test.clj | (ns mranderson.move-test
(:require [mranderson.move :as sut]
[clojure.test :as t]
[clojure.java.io :as io]
[rewrite-clj.zip :as z])
(:import [java.io File]))
(def ex-a-4
"(comment \"foobar comment here\")
(ns example.a.four)
(defn foo []
(println \"nuf said\"))
(deftype FourType [field])
(deftype FooType [])")
(def ex-5
"(ns example.five
(:import [example.with_dash.six SomeType SomeRecord]))
(defn- use-type-record []
(SomeType. :type)
(SomeRecord. :record))")
(def ex-3
"(ns example.three
(:require [example.five :as five]))
(defn use-ex-six-fully-qualified []
(example.with_dash.six.SomeType. :type)
(example.with_dash.six.SomeRecord. :record))")
(def ex-2
"(ns
^{:added \"0.0.1\"}
example.two
(:require [example.three :as three]
[example.a.four :as four]
[example.a
[foo]
[bar]])
(:import [example.a.four FourType]
example.a.four.FooType))
(defn foo []
(example.a.four/foo))
(defn cljs-foo
\"This is valid in cljs i am told.\"
[]
(example.a.four.foo))
(def delayed-four
(do
(require 'example.a.four)
(resolve 'example.a.four/foo)))
(defn my-four-type
^example.a.four.FourType
[^example.a.four.FourType t]
t)")
(def ex-1
"(ns example.one
(:require [example.two :as two]
[example.three :as three]))
(defn foo [m]
(:example.a.four/bar m)
(example.a.four/foo))")
(def ex-6-with-dash
"(ns example.with-dash.six)
(deftype SomeType [field])
(defrecord SomeRecord [field])")
(def ex-edn
"{:foo \"bar\"}")
(def ex-cljc
"(ns example.cross
#?@(:clj
[(:require [example.seven :as seven-clj])]
:cljs
[(:require [example.seven :as seven-cljs])]))")
(def ex-cljc-expected
"(ns example.cross
#?@(:clj
[(:require [example.clj.seven :as seven-clj])]
:cljs
[(:require [example.cljs.seven :as seven-cljs])]))")
(def ex-seven-clj "(ns example.seven)")
(def ex-seven-cljs "(ns example.seven)")
(def ex-data-readers
"{xml/ns clojure.data.xml.name/uri-symbol
xml/element clojure.data.xml.node/tagged-element}")
(def ex-data-readers-expected
"{xml/ns clojure.data.xml.name/uri-symbol
xml/element clojure.moved.data.xml.node/tagged-element}")
(def medley-user-example
"(ns example.user.medley
(:require [medley.core :as medley]))")
(def medley-stub "(ns medley.core)")
(def medley-stub-moved-expected "(ns ^{:inlined true} moved.medley.core)")
(def medley-user-expected
"(ns example.user.medley
(:require [moved.medley.core :as medley]))")
(def example-eight
"(ns example.eight)
(deftype EightType [])
(deftype TypeEight [])")
(def example-nine
"(ns example.nine
(:import [example.eight EightType]
example.eight.TypeEight)
(:require [example.eight :as eight]))")
(def example-nine-expected
"(ns example.nine
(:import [with_dash.example.eight EightType]
with_dash.example.eight.TypeEight)
(:require [with-dash.example.eight :as eight]))")
(def example-meta-kw1
"(ns ^:some-meta example.metakw1)")
(def example-meta-kw2
"(ns ^:some-meta example.metakw2)")
(def expected-moved-metakw1
"(ns ^:some-meta moved.metakw1)")
(def expected-moved-metakw2-watermark
"(ns ^{:some-meta true :inlined true} moved.metakw2)")
(def example-meta-map1
"(ns ^{:one 1 :zeta 42 :two 2} example.metamap1)")
(def example-meta-map2
"(ns ^{:one 1 :zeta 42 :two 2} example.metamap2)")
(def expected-moved-metamap1
"(ns ^{:one 1 :zeta 42 :two 2} moved.metamap1)")
(def expected-moved-metamap2-watermark
"(ns ^{:one 1 :zeta 42 :two 2 :inlined true} moved.metamap2)")
(defn- create-temp-dir! [dir-name]
(let [temp-file (File/createTempFile dir-name nil)]
(.delete temp-file)
(.mkdirs temp-file)
temp-file))
(defn- create-source-file! ^File [^File file ^String content]
(.delete file)
(.mkdirs (.getParentFile file))
(.createNewFile file)
(spit file content)
file)
;; this test is a slightly rewritten version of the original test for c.t.namespace.move from
(t/deftest move-ns-test
(let [temp-dir (create-temp-dir! "tools-namespace-t-move-ns")
src-dir (io/file temp-dir "src")
example-dir (io/file temp-dir "src" "example")
a-dir (io/file temp-dir "src" "example" "a")
with-dash-dir (io/file temp-dir "src" "example" "with_dash")
file-one (create-source-file! (io/file example-dir "one.clj") ex-1)
file-two (create-source-file! (io/file example-dir "two.clj") ex-2)
file-three (create-source-file! (io/file example-dir "three.clj") ex-3)
old-file-four (create-source-file! (io/file a-dir "four.clj") ex-a-4)
new-file-four (io/file example-dir "b" "four.clj")
file-five (create-source-file! (io/file example-dir "five.clj") ex-5)
old-file-six (create-source-file! (io/file with-dash-dir "six.clj") ex-6-with-dash)
new-file-six (io/file example-dir "prefix" "with_dash" "six.clj")
file-edn (create-source-file! (io/file example-dir "edn.clj") ex-edn)
file-cljc (create-source-file! (io/file example-dir "cross.cljc") ex-cljc)
file-data-readers (create-source-file! (io/file example-dir "data_readers.cljc") ex-data-readers)
medley-dir (io/file src-dir "medley")
file-medley-user (create-source-file! (io/file example-dir "user" "medley.clj") medley-user-example)
file-medley-stub-moved (io/file src-dir "moved" "medley" "core.clj")
file-nine (create-source-file! (io/file example-dir "nine.clj") example-nine)
file-three-last-modified (.lastModified file-three)
file-moved-metakw1 (io/file src-dir "moved" "metakw1.clj")
file-moved-metakw2 (io/file src-dir "moved" "metakw2.clj")
file-moved-metamap1 (io/file src-dir "moved" "metamap1.clj")
file-moved-metamap2 (io/file src-dir "moved" "metamap2.clj")]
(create-source-file! (io/file example-dir "seven.clj") ex-seven-clj)
(create-source-file! (io/file example-dir "seven.cljs") ex-seven-cljs)
(create-source-file! (io/file medley-dir "core.clj") medley-stub)
(create-source-file! (io/file example-dir "eight.clj") example-eight)
(create-source-file! (io/file example-dir "metakw1.clj") example-meta-kw1)
(create-source-file! (io/file example-dir "metakw2.clj") example-meta-kw2)
(create-source-file! (io/file example-dir "metamap1.clj") example-meta-map1)
(create-source-file! (io/file example-dir "metamap2.clj") example-meta-map2)
(Thread/sleep 1500) ;; ensure file timestamps are different
(t/testing "move ns simple case, no dash, no deftype, defrecord"
(sut/move-ns 'example.a.four 'example.b.four src-dir ".clj" [src-dir] nil)
;; (println "affected after move")
( doseq [ a [ file - one file - two new - file - four ] ]
;; (println (.getAbsolutePath a))
;; (prn (slurp a)))
;; (println "unaffected after move")
( doseq [ a [ file - three file - edn ] ]
;; (println (.getAbsolutePath a))
;; (prn (slurp a)))
(t/is (.exists new-file-four)
"new file should exist")
(t/is (not (.exists old-file-four))
"old file should not exist")
(t/is (not (.exists (.getParentFile old-file-four)))
"old empty directory should not exist")
(t/is (= file-three-last-modified (.lastModified file-three))
"unaffected file should not have been modified")
(t/is (not-any? #(.contains (slurp %) "example.a.four")
[file-one file-two file-three new-file-four])
"affected files should not refer to old ns")
(t/is (.contains (slurp file-one) "(example.b.four/foo)")
"file with a reference to ns in body should refer with a symbol")
(t/is (every? #(.contains (slurp %) "example.b.four")
[file-one file-two new-file-four])
"affected files should refer to new ns")
(t/is (= 9 (count (re-seq #"example.b.four" (slurp file-two))))
"all occurances of old ns should be replace with new")
(t/is (re-find #"\(:example.b.four/" (slurp file-one))
"type of occurence is retained if keyword")
(t/is (re-find #"\[example\.b\s*\[foo\]\s*\[bar\]\]" (slurp file-two))
"prefixes should be replaced")
(t/is (= ex-data-readers (slurp file-data-readers))
"cljc file w/o ns macro is unchanged")
(t/is (= ex-edn (slurp file-edn))
"clj file wo/ ns macro is unchanged"))
(t/testing "testing import deftype no dash, dash in the prefix"
(sut/move-ns 'example.eight 'with-dash.example.eight src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-nine) example-nine-expected)))
(t/testing "move ns with dash, deftype, defrecord, import"
(sut/move-ns 'example.with-dash.six 'example.prefix.with-dash.six src-dir ".clj" [src-dir] :inlined)
(t/is (.contains (slurp new-file-six) ":inlined true")
"file that was moved should have :inlined metadata watermark")
(t/is (not-any? #(.contains (slurp %) ":inlined true")
[file-one file-two file-three file-five file-nine])
"files that were not moved should not have :inlined metadata watermark")
;; (println "affected after move")
( doseq [ a [ file - three file - five new - file - six new - file - four ] ]
;; (println (.getAbsolutePath a))
;; (prn (slurp a)))
(t/is (.exists new-file-six)
"new file should exist")
(t/is (not (.exists old-file-six))
"old file should not exist")
(t/is (not-any? #(.contains (slurp %) "example.with_dash.six")
[file-five file-three])
"affected files should not refer to old ns in imports or body")
(t/is (every? #(.contains (slurp %) "example.prefix.with_dash.six")
[file-five file-three])
"affected files should refer to new ns"))
(t/testing "testing cljc file using :clj/cljs macros in require depending on same ns in clj and cljs"
(sut/move-ns 'example.seven 'example.clj.seven src-dir ".clj" [src-dir] nil)
(sut/move-ns 'example.seven 'example.cljs.seven src-dir ".cljs" [src-dir] nil)
(t/is (= (slurp file-cljc) ex-cljc-expected)))
(t/testing "testing alias is first section of two section namespace"
(sut/move-ns 'medley.core 'moved.medley.core src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-medley-stub-moved) medley-stub-moved-expected))
(t/is (= (slurp file-medley-user) medley-user-expected)))
(t/testing "testing cljc file without ns macro, with a replacement"
(create-source-file! (io/file (io/file temp-dir "src" "clojure" "data" "xml") "node.clj") "(ns clojure.data.xml.node)")
(sut/move-ns 'clojure.data.xml.node 'clojure.moved.data.xml.node src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-data-readers) ex-data-readers-expected)))
(t/testing "namespace metadata correct on ns move"
(sut/move-ns 'example.metakw1 'moved.metakw1 src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-moved-metakw1) expected-moved-metakw1))
(sut/move-ns 'example.metakw2 'moved.metakw2 src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-moved-metakw2) expected-moved-metakw2-watermark))
(sut/move-ns 'example.metamap1 'moved.metamap1 src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-moved-metamap1) expected-moved-metamap1))
(sut/move-ns 'example.metamap2 'moved.metamap2 src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-moved-metamap2) expected-moved-metamap2-watermark)))))
(defn- s-rename-ns
"A litle helper for rename-ns-test"
[s old-ns-name new-ns-name add-meta-kw]
(-> s
z/of-string
(sut/rename-ns old-ns-name new-ns-name add-meta-kw)
z/root-string))
(t/deftest rename-ns-test
(t/is (= (s-rename-ns "(ns ^{:spam true} foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:spam true :mranderson/zing true} bar)")
"adds new meta to existing meta map")
(t/is (= (s-rename-ns "(ns foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:mranderson/zing true} bar)")
"adds new meta when no existing meta")
(t/is (= (s-rename-ns "(ns foo)" 'foo 'bar nil)
"(ns bar)")
"renames when no new or existing meta")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'foo 'bar nil)
"(ns ^:boop bar)")
"renames when no new meta")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:boop true :mranderson/zing true} bar)")
"renames when new meta and existing meta is kw form")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'nope 'bar :mranderson/zing)
"(ns ^:boop foo)")
"does not rename or adjust meta when old-ns-name is does not match cur ns name")
(t/is (= (s-rename-ns "(ns)" 'foo 'bar :mranderson/zing)
"(ns)")
"empty ns is unaffected")
(t/is (= (s-rename-ns "(ns #_ skipped foo)" 'foo 'bar nil)
"(ns #_ skipped bar)")
"uneval node is skipped")
(t/is (= (s-rename-ns "(ns #_#_ skip1 skip2 ^:boop #_ skip3 foo)" 'foo 'bar :mranderson/zing)
"(ns #_#_ skip1 skip2 ^{:boop true :mranderson/zing true} #_ skip3 bar)")
"uneval nodes are skipped"))
| null | https://raw.githubusercontent.com/benedekfazekas/mranderson/5bacf056ebe1feda9112c9df3aba5197fc40150e/test/mranderson/move_test.clj | clojure | this test is a slightly rewritten version of the original test for c.t.namespace.move from
ensure file timestamps are different
(println "affected after move")
(println (.getAbsolutePath a))
(prn (slurp a)))
(println "unaffected after move")
(println (.getAbsolutePath a))
(prn (slurp a)))
(println "affected after move")
(println (.getAbsolutePath a))
(prn (slurp a))) | (ns mranderson.move-test
(:require [mranderson.move :as sut]
[clojure.test :as t]
[clojure.java.io :as io]
[rewrite-clj.zip :as z])
(:import [java.io File]))
(def ex-a-4
"(comment \"foobar comment here\")
(ns example.a.four)
(defn foo []
(println \"nuf said\"))
(deftype FourType [field])
(deftype FooType [])")
(def ex-5
"(ns example.five
(:import [example.with_dash.six SomeType SomeRecord]))
(defn- use-type-record []
(SomeType. :type)
(SomeRecord. :record))")
(def ex-3
"(ns example.three
(:require [example.five :as five]))
(defn use-ex-six-fully-qualified []
(example.with_dash.six.SomeType. :type)
(example.with_dash.six.SomeRecord. :record))")
(def ex-2
"(ns
^{:added \"0.0.1\"}
example.two
(:require [example.three :as three]
[example.a.four :as four]
[example.a
[foo]
[bar]])
(:import [example.a.four FourType]
example.a.four.FooType))
(defn foo []
(example.a.four/foo))
(defn cljs-foo
\"This is valid in cljs i am told.\"
[]
(example.a.four.foo))
(def delayed-four
(do
(require 'example.a.four)
(resolve 'example.a.four/foo)))
(defn my-four-type
^example.a.four.FourType
[^example.a.four.FourType t]
t)")
(def ex-1
"(ns example.one
(:require [example.two :as two]
[example.three :as three]))
(defn foo [m]
(:example.a.four/bar m)
(example.a.four/foo))")
(def ex-6-with-dash
"(ns example.with-dash.six)
(deftype SomeType [field])
(defrecord SomeRecord [field])")
(def ex-edn
"{:foo \"bar\"}")
(def ex-cljc
"(ns example.cross
#?@(:clj
[(:require [example.seven :as seven-clj])]
:cljs
[(:require [example.seven :as seven-cljs])]))")
(def ex-cljc-expected
"(ns example.cross
#?@(:clj
[(:require [example.clj.seven :as seven-clj])]
:cljs
[(:require [example.cljs.seven :as seven-cljs])]))")
(def ex-seven-clj "(ns example.seven)")
(def ex-seven-cljs "(ns example.seven)")
(def ex-data-readers
"{xml/ns clojure.data.xml.name/uri-symbol
xml/element clojure.data.xml.node/tagged-element}")
(def ex-data-readers-expected
"{xml/ns clojure.data.xml.name/uri-symbol
xml/element clojure.moved.data.xml.node/tagged-element}")
(def medley-user-example
"(ns example.user.medley
(:require [medley.core :as medley]))")
(def medley-stub "(ns medley.core)")
(def medley-stub-moved-expected "(ns ^{:inlined true} moved.medley.core)")
(def medley-user-expected
"(ns example.user.medley
(:require [moved.medley.core :as medley]))")
(def example-eight
"(ns example.eight)
(deftype EightType [])
(deftype TypeEight [])")
(def example-nine
"(ns example.nine
(:import [example.eight EightType]
example.eight.TypeEight)
(:require [example.eight :as eight]))")
(def example-nine-expected
"(ns example.nine
(:import [with_dash.example.eight EightType]
with_dash.example.eight.TypeEight)
(:require [with-dash.example.eight :as eight]))")
(def example-meta-kw1
"(ns ^:some-meta example.metakw1)")
(def example-meta-kw2
"(ns ^:some-meta example.metakw2)")
(def expected-moved-metakw1
"(ns ^:some-meta moved.metakw1)")
(def expected-moved-metakw2-watermark
"(ns ^{:some-meta true :inlined true} moved.metakw2)")
(def example-meta-map1
"(ns ^{:one 1 :zeta 42 :two 2} example.metamap1)")
(def example-meta-map2
"(ns ^{:one 1 :zeta 42 :two 2} example.metamap2)")
(def expected-moved-metamap1
"(ns ^{:one 1 :zeta 42 :two 2} moved.metamap1)")
(def expected-moved-metamap2-watermark
"(ns ^{:one 1 :zeta 42 :two 2 :inlined true} moved.metamap2)")
(defn- create-temp-dir! [dir-name]
(let [temp-file (File/createTempFile dir-name nil)]
(.delete temp-file)
(.mkdirs temp-file)
temp-file))
(defn- create-source-file! ^File [^File file ^String content]
(.delete file)
(.mkdirs (.getParentFile file))
(.createNewFile file)
(spit file content)
file)
(t/deftest move-ns-test
(let [temp-dir (create-temp-dir! "tools-namespace-t-move-ns")
src-dir (io/file temp-dir "src")
example-dir (io/file temp-dir "src" "example")
a-dir (io/file temp-dir "src" "example" "a")
with-dash-dir (io/file temp-dir "src" "example" "with_dash")
file-one (create-source-file! (io/file example-dir "one.clj") ex-1)
file-two (create-source-file! (io/file example-dir "two.clj") ex-2)
file-three (create-source-file! (io/file example-dir "three.clj") ex-3)
old-file-four (create-source-file! (io/file a-dir "four.clj") ex-a-4)
new-file-four (io/file example-dir "b" "four.clj")
file-five (create-source-file! (io/file example-dir "five.clj") ex-5)
old-file-six (create-source-file! (io/file with-dash-dir "six.clj") ex-6-with-dash)
new-file-six (io/file example-dir "prefix" "with_dash" "six.clj")
file-edn (create-source-file! (io/file example-dir "edn.clj") ex-edn)
file-cljc (create-source-file! (io/file example-dir "cross.cljc") ex-cljc)
file-data-readers (create-source-file! (io/file example-dir "data_readers.cljc") ex-data-readers)
medley-dir (io/file src-dir "medley")
file-medley-user (create-source-file! (io/file example-dir "user" "medley.clj") medley-user-example)
file-medley-stub-moved (io/file src-dir "moved" "medley" "core.clj")
file-nine (create-source-file! (io/file example-dir "nine.clj") example-nine)
file-three-last-modified (.lastModified file-three)
file-moved-metakw1 (io/file src-dir "moved" "metakw1.clj")
file-moved-metakw2 (io/file src-dir "moved" "metakw2.clj")
file-moved-metamap1 (io/file src-dir "moved" "metamap1.clj")
file-moved-metamap2 (io/file src-dir "moved" "metamap2.clj")]
(create-source-file! (io/file example-dir "seven.clj") ex-seven-clj)
(create-source-file! (io/file example-dir "seven.cljs") ex-seven-cljs)
(create-source-file! (io/file medley-dir "core.clj") medley-stub)
(create-source-file! (io/file example-dir "eight.clj") example-eight)
(create-source-file! (io/file example-dir "metakw1.clj") example-meta-kw1)
(create-source-file! (io/file example-dir "metakw2.clj") example-meta-kw2)
(create-source-file! (io/file example-dir "metamap1.clj") example-meta-map1)
(create-source-file! (io/file example-dir "metamap2.clj") example-meta-map2)
(t/testing "move ns simple case, no dash, no deftype, defrecord"
(sut/move-ns 'example.a.four 'example.b.four src-dir ".clj" [src-dir] nil)
( doseq [ a [ file - one file - two new - file - four ] ]
( doseq [ a [ file - three file - edn ] ]
(t/is (.exists new-file-four)
"new file should exist")
(t/is (not (.exists old-file-four))
"old file should not exist")
(t/is (not (.exists (.getParentFile old-file-four)))
"old empty directory should not exist")
(t/is (= file-three-last-modified (.lastModified file-three))
"unaffected file should not have been modified")
(t/is (not-any? #(.contains (slurp %) "example.a.four")
[file-one file-two file-three new-file-four])
"affected files should not refer to old ns")
(t/is (.contains (slurp file-one) "(example.b.four/foo)")
"file with a reference to ns in body should refer with a symbol")
(t/is (every? #(.contains (slurp %) "example.b.four")
[file-one file-two new-file-four])
"affected files should refer to new ns")
(t/is (= 9 (count (re-seq #"example.b.four" (slurp file-two))))
"all occurances of old ns should be replace with new")
(t/is (re-find #"\(:example.b.four/" (slurp file-one))
"type of occurence is retained if keyword")
(t/is (re-find #"\[example\.b\s*\[foo\]\s*\[bar\]\]" (slurp file-two))
"prefixes should be replaced")
(t/is (= ex-data-readers (slurp file-data-readers))
"cljc file w/o ns macro is unchanged")
(t/is (= ex-edn (slurp file-edn))
"clj file wo/ ns macro is unchanged"))
(t/testing "testing import deftype no dash, dash in the prefix"
(sut/move-ns 'example.eight 'with-dash.example.eight src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-nine) example-nine-expected)))
(t/testing "move ns with dash, deftype, defrecord, import"
(sut/move-ns 'example.with-dash.six 'example.prefix.with-dash.six src-dir ".clj" [src-dir] :inlined)
(t/is (.contains (slurp new-file-six) ":inlined true")
"file that was moved should have :inlined metadata watermark")
(t/is (not-any? #(.contains (slurp %) ":inlined true")
[file-one file-two file-three file-five file-nine])
"files that were not moved should not have :inlined metadata watermark")
( doseq [ a [ file - three file - five new - file - six new - file - four ] ]
(t/is (.exists new-file-six)
"new file should exist")
(t/is (not (.exists old-file-six))
"old file should not exist")
(t/is (not-any? #(.contains (slurp %) "example.with_dash.six")
[file-five file-three])
"affected files should not refer to old ns in imports or body")
(t/is (every? #(.contains (slurp %) "example.prefix.with_dash.six")
[file-five file-three])
"affected files should refer to new ns"))
(t/testing "testing cljc file using :clj/cljs macros in require depending on same ns in clj and cljs"
(sut/move-ns 'example.seven 'example.clj.seven src-dir ".clj" [src-dir] nil)
(sut/move-ns 'example.seven 'example.cljs.seven src-dir ".cljs" [src-dir] nil)
(t/is (= (slurp file-cljc) ex-cljc-expected)))
(t/testing "testing alias is first section of two section namespace"
(sut/move-ns 'medley.core 'moved.medley.core src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-medley-stub-moved) medley-stub-moved-expected))
(t/is (= (slurp file-medley-user) medley-user-expected)))
(t/testing "testing cljc file without ns macro, with a replacement"
(create-source-file! (io/file (io/file temp-dir "src" "clojure" "data" "xml") "node.clj") "(ns clojure.data.xml.node)")
(sut/move-ns 'clojure.data.xml.node 'clojure.moved.data.xml.node src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-data-readers) ex-data-readers-expected)))
(t/testing "namespace metadata correct on ns move"
(sut/move-ns 'example.metakw1 'moved.metakw1 src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-moved-metakw1) expected-moved-metakw1))
(sut/move-ns 'example.metakw2 'moved.metakw2 src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-moved-metakw2) expected-moved-metakw2-watermark))
(sut/move-ns 'example.metamap1 'moved.metamap1 src-dir ".clj" [src-dir] nil)
(t/is (= (slurp file-moved-metamap1) expected-moved-metamap1))
(sut/move-ns 'example.metamap2 'moved.metamap2 src-dir ".clj" [src-dir] :inlined)
(t/is (= (slurp file-moved-metamap2) expected-moved-metamap2-watermark)))))
(defn- s-rename-ns
"A litle helper for rename-ns-test"
[s old-ns-name new-ns-name add-meta-kw]
(-> s
z/of-string
(sut/rename-ns old-ns-name new-ns-name add-meta-kw)
z/root-string))
(t/deftest rename-ns-test
(t/is (= (s-rename-ns "(ns ^{:spam true} foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:spam true :mranderson/zing true} bar)")
"adds new meta to existing meta map")
(t/is (= (s-rename-ns "(ns foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:mranderson/zing true} bar)")
"adds new meta when no existing meta")
(t/is (= (s-rename-ns "(ns foo)" 'foo 'bar nil)
"(ns bar)")
"renames when no new or existing meta")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'foo 'bar nil)
"(ns ^:boop bar)")
"renames when no new meta")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'foo 'bar :mranderson/zing)
"(ns ^{:boop true :mranderson/zing true} bar)")
"renames when new meta and existing meta is kw form")
(t/is (= (s-rename-ns "(ns ^:boop foo)" 'nope 'bar :mranderson/zing)
"(ns ^:boop foo)")
"does not rename or adjust meta when old-ns-name is does not match cur ns name")
(t/is (= (s-rename-ns "(ns)" 'foo 'bar :mranderson/zing)
"(ns)")
"empty ns is unaffected")
(t/is (= (s-rename-ns "(ns #_ skipped foo)" 'foo 'bar nil)
"(ns #_ skipped bar)")
"uneval node is skipped")
(t/is (= (s-rename-ns "(ns #_#_ skip1 skip2 ^:boop #_ skip3 foo)" 'foo 'bar :mranderson/zing)
"(ns #_#_ skip1 skip2 ^{:boop true :mranderson/zing true} #_ skip3 bar)")
"uneval nodes are skipped"))
|
231cdc33867df945fe35338f2eec02a4ad7cf80585532a770a4443f565eeb8ed | huangz1990/SICP-answers | test-31-tree-map-using-map.scm | (load "test-manager/load.scm")
(load "31-tree-map-using-map.scm")
(define-each-check
(equal? (tree-map square (list (list 1 2) (list 3 4)))
(list (list (square 1) (square 2))
(list (square 3) (square 4))))
)
(run-registered-tests)
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp2/code/test-31-tree-map-using-map.scm | scheme | (load "test-manager/load.scm")
(load "31-tree-map-using-map.scm")
(define-each-check
(equal? (tree-map square (list (list 1 2) (list 3 4)))
(list (list (square 1) (square 2))
(list (square 3) (square 4))))
)
(run-registered-tests)
|
|
facfabc70eaa32b5e8f16e8cf3287a0b53dace0ecb399eee77d297d0e4eaec5c | clojure-quant/infra-guix | sway.scm | (define-module (awb99 home sway)
#:use-module (srfi srfi-1)
#:use-module (guix gexp)
#:use-module (gnu home services)
# : use - module ( gnu home - services wm )
# : use - module ( awb99 home i3blocks )
# : use - module ( home services mako )
;#:use-module (home services swappy)
#:use-module (gnu packages gnupg)
# : use - module ( kreved packages wm )
)
sway : Wayland compositor compatible with i3
the difference is that now GDM supports wayland session although you can run X apps in it using Xwayland
;
; -sway/-/blob/master/guix-sway-config.scm
;
; sway notes
; -config/src/master/sway-service.scm
see also : sway follws i3 config
;
;
; -Sway/desktop-settings/blob/sway/community/sway/etc/sway/modes/default
-Sway/desktop-settings/blob/sway/community/sway/etc/sway/config.d/98-application-defaults.conf
(define ws-bindings
(map (lambda (ws)
`(,(string->symbol (format #f "$mod+~d" ws))
workspace number ,ws))
(iota 9 1)))
(define ws-move-bindings
(map (lambda (ws)
`(,(string->symbol (format #f "$mod+Shift+~d" ws))
move container to workspace number ,ws))
(iota 9 1)))
(define-public sway-services
(list
(service home-sway-service-type
(home-sway-configuration
;; package does not work.
;;(package sway)
;; (package sway-next) ; no substitutes
(config
`((set $mod Mod4)
(set $left Left)
(set $right Right)
(set $up Up)
(set $down Down)
(set $term alacritty)
(set $menu bemenu-run
--prompt "'run:'"
--ignorecase)
(set $wmenu
"wofi --show run")
--no - startup - id " rofi -show window "
exec --no - startup - id " rofi -terminal xfce4 - terminal -show ssh "
(bindsym
--to-code
(($mod+Return exec $term)
($mod+space exec $wmenu)
($mod+Shift+space exec $menu)
($mod+s exec "wofi --show ssh")
($mod+w exec "wofi --show window")
($mod+c kill)
($mod+q reload)
($mod+Shift+q exec swaymsg exit)
($mod+$up focus prev)
($mod+$down focus next)
($mod+Shift+$left move left)
($mod+Shift+$right move right)
($mod+Shift+$up move up)
($mod+Shift+$down move down)
($mod+f fullscreen)
($mod+Tab layout toggle split tabbed)
($mod+Shift+Tab split toggle)
($mod+grave floating toggle)
($mod+Shift+grave focus mode_toggle)
($mod+Shift+s exec "grim -g \"$(slurp)\" - | swappy -f -")
(Print exec "grim - | wl-copy -t image/png")
($mod+g exec makoctl dismiss --all)
($mod+m exec makoctl set-mode dnd)
($mod+Shift+m exec makoctl set-mode default)
($mod+o exec "ykman oath list | bemenu --prompt 'otp:' --ignorecase | xargs -I {} -r ykman oath code -s '{}' | wl-copy")
,@ws-bindings
,@ws-move-bindings))
(bindsym
--locked
((XF86MonBrightnessUp exec light -A 10)
(XF86MonBrightnessDown exec light -U 10)))
This will lock your screen after 300 seconds of inactivity , then turn off
your displays after another 300 seconds , and turn your screens back on when
; resumed. It will also lock your screen before your computer goes to sleep.
(exec swayidle -w
before-sleep "'swaylock -f -c 000000'"
timeout 300 "'swaylock -f -c 000000'"
timeout 600 "'swaymsg \"output * dpms off\"'"
resume "'swaymsg \"output * dpms on\"'")
(exec wlsunset -l 50.6 -L 36.6 -T 6500 -t 3000)
(exec mako)
only enable this if every app you use is compatible with wayland
(xwayland enable)
(workspace_auto_back_and_forth yes)
(focus_follows_mouse no)
(smart_borders on)
(title_align center)
;
output HDMI - A-1 resolution 1920x1080 position 1920,0
; You can get the names of your outputs by running: swaymsg -t get_outputs
; Default wallpaper
; (output * bg ,(local-file "files/wp.jpg") fill)
( output eDP-1 scale 1.33 )
The scale factor can be fractional , but it is usually 2 for HiDPI screens .
(output * scale 1.0)
( output DP-2 scale 1.0 )
; (output eDP-1 scale 2.0)
; You can get the names of your inputs by running: swaymsg -t get_inputs
Read ` man 5 sway - input ` for more information about this section .
; input type:keyboard xkb_layout "us"
( input * xkb_layout " " )
(input "1118:1874:Microsoft_Wired_Keyboard_400"
((xkb_layout at)
; (xkb_options grp:toggle)
grp : toggle , ctrl : swapcaps
(input type:touchpad events disabled)
(input "2:10:TPPS/2_IBM_TrackPoint"
((pointer_accel 0.3)
(scroll_factor 0.8)))
(input "1390:268:ELECOM_TrackBall_Mouse_HUGE_TrackBall"
((scroll_method on_button_down)
(scroll_button BTN_TASK)))
; [instance="lxappearance"]
; [app_id="xsensors"]
; [title="Save File"]
; [app_id="thunderbird" title=".*Reminder"]
; launch some application at startup
(exec "--no-startup-id alacritty")
(assign "[app_id=\"nyxt\"]" 2)
(assign "[instance=\"chromium\"]" 2)
(assign "[app_id=\"chromium\"]" 2)
(assign "[class=\"chromium/Chromium\"]" 2)
(assign "[app_id=\"codium\"]" 3)
(assign "[instance=\"codium\"]" 3)
(assign "[instance=\"emacs\"]" 4)
(assign "[app_id=\"telegramdesktop\"]" 5)
(for_window
"[app_id=\"telegramdesktop\" title=\"Media viewer\"]"
focus)
(for_window
"[app_id=\"^.*\"]"
inhibit_idle fullscreen)
(for_window
"[title=\"^(?:Open|Save) (?:File|Folder|As).*\"]"
floating enable, resize set width 70 ppt height 70 ppt)
(font "Iosevka, Light 14")
(client.focused "#f0f0f0" "#f0f0f0" "#721045" "#721045" "#721045")
(client.unfocused "#ffffff" "#ffffff" "#595959")
(default_border normal 0)
(default_floating_border none)
was 8
(seat * xcursor_theme Adwaita 24)
(bar
((status_command i3blocks)
(position top)
(separator_symbol "|")
(font "Iosevka, Light 18")
(pango_markup enabled)
(colors
((statusline "#000000")
(background "#FFFFFF")
(focused_workspace "#f0f0f0" "#f0f0f0" "#721045")
(inactive_workspace "#ffffff" "#ffffff" "#595959")))))))))
;(service
; home-i3blocks-service-type
; (home-i3blocks-configuration
; (config
; `((battery1
; ((command . ,(local-file "files/battery" #:recursive? #t))
( BAT_NUM . 1 )
( interval . 10 ) ) )
; (battery0
; ((command . ,(local-file "files/battery" #:recursive? #t))
; (BAT_NUM . 0)
( interval . 10 ) ) )
; (date
( ( command . " date ' + % a , % d % b ' " )
( interval . 1 ) ) )
; (time
( ( command . " date + % H:%M:%S " )
( interval . 1 ) ) ) ) ) ) )
))
| null | https://raw.githubusercontent.com/clojure-quant/infra-guix/d0ed79cbf179d3cc9fce882f3b3b2aae2925176f/modules/awb99/home/sway.scm | scheme | #:use-module (home services swappy)
-sway/-/blob/master/guix-sway-config.scm
sway notes
-config/src/master/sway-service.scm
-Sway/desktop-settings/blob/sway/community/sway/etc/sway/modes/default
package does not work.
(package sway)
(package sway-next) ; no substitutes
resumed. It will also lock your screen before your computer goes to sleep.
You can get the names of your outputs by running: swaymsg -t get_outputs
Default wallpaper
(output * bg ,(local-file "files/wp.jpg") fill)
(output eDP-1 scale 2.0)
You can get the names of your inputs by running: swaymsg -t get_inputs
input type:keyboard xkb_layout "us"
(xkb_options grp:toggle)
[instance="lxappearance"]
[app_id="xsensors"]
[title="Save File"]
[app_id="thunderbird" title=".*Reminder"]
launch some application at startup
(service
home-i3blocks-service-type
(home-i3blocks-configuration
(config
`((battery1
((command . ,(local-file "files/battery" #:recursive? #t))
(battery0
((command . ,(local-file "files/battery" #:recursive? #t))
(BAT_NUM . 0)
(date
(time | (define-module (awb99 home sway)
#:use-module (srfi srfi-1)
#:use-module (guix gexp)
#:use-module (gnu home services)
# : use - module ( gnu home - services wm )
# : use - module ( awb99 home i3blocks )
# : use - module ( home services mako )
#:use-module (gnu packages gnupg)
# : use - module ( kreved packages wm )
)
sway : Wayland compositor compatible with i3
the difference is that now GDM supports wayland session although you can run X apps in it using Xwayland
see also : sway follws i3 config
-Sway/desktop-settings/blob/sway/community/sway/etc/sway/config.d/98-application-defaults.conf
(define ws-bindings
(map (lambda (ws)
`(,(string->symbol (format #f "$mod+~d" ws))
workspace number ,ws))
(iota 9 1)))
(define ws-move-bindings
(map (lambda (ws)
`(,(string->symbol (format #f "$mod+Shift+~d" ws))
move container to workspace number ,ws))
(iota 9 1)))
(define-public sway-services
(list
(service home-sway-service-type
(home-sway-configuration
(config
`((set $mod Mod4)
(set $left Left)
(set $right Right)
(set $up Up)
(set $down Down)
(set $term alacritty)
(set $menu bemenu-run
--prompt "'run:'"
--ignorecase)
(set $wmenu
"wofi --show run")
--no - startup - id " rofi -show window "
exec --no - startup - id " rofi -terminal xfce4 - terminal -show ssh "
(bindsym
--to-code
(($mod+Return exec $term)
($mod+space exec $wmenu)
($mod+Shift+space exec $menu)
($mod+s exec "wofi --show ssh")
($mod+w exec "wofi --show window")
($mod+c kill)
($mod+q reload)
($mod+Shift+q exec swaymsg exit)
($mod+$up focus prev)
($mod+$down focus next)
($mod+Shift+$left move left)
($mod+Shift+$right move right)
($mod+Shift+$up move up)
($mod+Shift+$down move down)
($mod+f fullscreen)
($mod+Tab layout toggle split tabbed)
($mod+Shift+Tab split toggle)
($mod+grave floating toggle)
($mod+Shift+grave focus mode_toggle)
($mod+Shift+s exec "grim -g \"$(slurp)\" - | swappy -f -")
(Print exec "grim - | wl-copy -t image/png")
($mod+g exec makoctl dismiss --all)
($mod+m exec makoctl set-mode dnd)
($mod+Shift+m exec makoctl set-mode default)
($mod+o exec "ykman oath list | bemenu --prompt 'otp:' --ignorecase | xargs -I {} -r ykman oath code -s '{}' | wl-copy")
,@ws-bindings
,@ws-move-bindings))
(bindsym
--locked
((XF86MonBrightnessUp exec light -A 10)
(XF86MonBrightnessDown exec light -U 10)))
This will lock your screen after 300 seconds of inactivity , then turn off
your displays after another 300 seconds , and turn your screens back on when
(exec swayidle -w
before-sleep "'swaylock -f -c 000000'"
timeout 300 "'swaylock -f -c 000000'"
timeout 600 "'swaymsg \"output * dpms off\"'"
resume "'swaymsg \"output * dpms on\"'")
(exec wlsunset -l 50.6 -L 36.6 -T 6500 -t 3000)
(exec mako)
only enable this if every app you use is compatible with wayland
(xwayland enable)
(workspace_auto_back_and_forth yes)
(focus_follows_mouse no)
(smart_borders on)
(title_align center)
output HDMI - A-1 resolution 1920x1080 position 1920,0
( output eDP-1 scale 1.33 )
The scale factor can be fractional , but it is usually 2 for HiDPI screens .
(output * scale 1.0)
( output DP-2 scale 1.0 )
Read ` man 5 sway - input ` for more information about this section .
( input * xkb_layout " " )
(input "1118:1874:Microsoft_Wired_Keyboard_400"
((xkb_layout at)
grp : toggle , ctrl : swapcaps
(input type:touchpad events disabled)
(input "2:10:TPPS/2_IBM_TrackPoint"
((pointer_accel 0.3)
(scroll_factor 0.8)))
(input "1390:268:ELECOM_TrackBall_Mouse_HUGE_TrackBall"
((scroll_method on_button_down)
(scroll_button BTN_TASK)))
(exec "--no-startup-id alacritty")
(assign "[app_id=\"nyxt\"]" 2)
(assign "[instance=\"chromium\"]" 2)
(assign "[app_id=\"chromium\"]" 2)
(assign "[class=\"chromium/Chromium\"]" 2)
(assign "[app_id=\"codium\"]" 3)
(assign "[instance=\"codium\"]" 3)
(assign "[instance=\"emacs\"]" 4)
(assign "[app_id=\"telegramdesktop\"]" 5)
(for_window
"[app_id=\"telegramdesktop\" title=\"Media viewer\"]"
focus)
(for_window
"[app_id=\"^.*\"]"
inhibit_idle fullscreen)
(for_window
"[title=\"^(?:Open|Save) (?:File|Folder|As).*\"]"
floating enable, resize set width 70 ppt height 70 ppt)
(font "Iosevka, Light 14")
(client.focused "#f0f0f0" "#f0f0f0" "#721045" "#721045" "#721045")
(client.unfocused "#ffffff" "#ffffff" "#595959")
(default_border normal 0)
(default_floating_border none)
was 8
(seat * xcursor_theme Adwaita 24)
(bar
((status_command i3blocks)
(position top)
(separator_symbol "|")
(font "Iosevka, Light 18")
(pango_markup enabled)
(colors
((statusline "#000000")
(background "#FFFFFF")
(focused_workspace "#f0f0f0" "#f0f0f0" "#721045")
(inactive_workspace "#ffffff" "#ffffff" "#595959")))))))))
( BAT_NUM . 1 )
( interval . 10 ) ) )
( interval . 10 ) ) )
( ( command . " date ' + % a , % d % b ' " )
( interval . 1 ) ) )
( ( command . " date + % H:%M:%S " )
( interval . 1 ) ) ) ) ) ) )
))
|
0aa17e992bece4b5f55b8da1091c61037e19f5dea74eba11278531d8534feeb0 | clj-br/clojuredocs | project.clj | (defproject clojuredocs "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[ring-server "0.4.0"]
[reagent "0.6.0-rc"]
[reagent-forms "0.5.24"]
[reagent-utils "0.1.9"]
[ring "1.5.0"]
[ring/ring-defaults "0.2.1"]
[compojure "1.5.1"]
[hiccup "1.0.5"]
[yogthos/config "0.8"]
[org.clojure/clojurescript "1.9.93"
:scope "provided"]
[secretary "1.2.3"]
[venantius/accountant "0.1.7"
:exclusions [org.clojure/tools.reader]]]
:plugins [[lein-environ "1.0.2"]
[lein-cljsbuild "1.1.1"]
[lein-asset-minifier "0.2.7"
:exclusions [org.clojure/clojure]]
[org.clojure/core.cache "0.6.5"]
[lein-ring "0.9.7"]]
:ring {:handler clojuredocs.handler/app
:uberwar-name "clojuredocs.war"}
:min-lein-version "2.5.0"
:uberjar-name "clojuredocs.jar"
:main clojuredocs.server
:clean-targets ^{:protect false}
[:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:source-paths ["src/clj" "src/cljc"]
:resource-paths ["resources" "target/cljsbuild"]
:minify-assets
{:assets
{"resources/public/css/site.min.css"
"resources/public/css/site.css"
"resources/public/css/bootstrap.min.css"
"resources/public/css/bootstrap-theme.min.css"}}
:cljsbuild
{:builds {:min
{:source-paths ["src/cljs" "src/cljc" "env/prod/cljs"]
:compiler
{:output-to "target/cljsbuild/public/js/app.js"
:output-dir "target/uberjar"
:optimizations :advanced
:pretty-print false}}
:app
{:source-paths ["src/cljs" "src/cljc" "env/dev/cljs"]
:compiler
{:main "clojuredocs.dev"
:asset-path "/js/out"
:output-to "target/cljsbuild/public/js/app.js"
:output-dir "target/cljsbuild/public/js/out"
:source-map true
:optimizations :none
:pretty-print true}}
:test
{:source-paths ["src/cljs" "src/cljc" "test/cljs"]
:compiler {:main clojuredocs.doo-runner
:asset-path "/js/out"
:output-to "target/test.js"
:output-dir "target/cljstest/public/js/out"
:optimizations :whitespace
:pretty-print true}}
}
}
:figwheel
{:http-server-root "public"
:server-port 3449
:nrepl-port 7002
:nrepl-middleware ["cemerick.piggieback/wrap-cljs-repl"
"cider.nrepl/cider-middleware"
"refactor-nrepl.middleware/wrap-refactor"
]
:css-dirs ["resources/public/css"]
:ring-handler clojuredocs.handler/app}
:sass {:src "src/sass"
:dst "resources/public/css"}
:profiles {:dev {:repl-options {:init-ns clojuredocs.repl}
:dependencies [[ring/ring-mock "0.3.0"]
[ring/ring-devel "1.5.0"]
[prone "1.1.1"]
[figwheel-sidecar "0.5.4-5"]
[org.clojure/tools.nrepl "0.2.12"]
[com.cemerick/piggieback "0.2.2-SNAPSHOT"]
[lein-doo "0.1.6"]
[pjstadig/humane-test-output "0.8.0"]
]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.5.4-5"]
[lein-doo "0.1.6"]
[cider/cider-nrepl "0.10.0-SNAPSHOT"]
[org.clojure/tools.namespace "0.3.0-alpha2"
:exclusions [org.clojure/tools.reader]]
[refactor-nrepl "2.0.0-SNAPSHOT"
:exclusions [org.clojure/clojure]]
[lein-sassy "1.0.7"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:env {:dev true}}
:uberjar {:hooks [minify-assets.plugin/hooks]
:source-paths ["env/prod/clj"]
:prep-tasks ["compile" ["cljsbuild" "once" "min"]]
:env {:production true}
:aot :all
:omit-source true}}) | null | https://raw.githubusercontent.com/clj-br/clojuredocs/4957150d6e97281ecd7d82ec021b96ff99656a51/project.clj | clojure | (defproject clojuredocs "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[ring-server "0.4.0"]
[reagent "0.6.0-rc"]
[reagent-forms "0.5.24"]
[reagent-utils "0.1.9"]
[ring "1.5.0"]
[ring/ring-defaults "0.2.1"]
[compojure "1.5.1"]
[hiccup "1.0.5"]
[yogthos/config "0.8"]
[org.clojure/clojurescript "1.9.93"
:scope "provided"]
[secretary "1.2.3"]
[venantius/accountant "0.1.7"
:exclusions [org.clojure/tools.reader]]]
:plugins [[lein-environ "1.0.2"]
[lein-cljsbuild "1.1.1"]
[lein-asset-minifier "0.2.7"
:exclusions [org.clojure/clojure]]
[org.clojure/core.cache "0.6.5"]
[lein-ring "0.9.7"]]
:ring {:handler clojuredocs.handler/app
:uberwar-name "clojuredocs.war"}
:min-lein-version "2.5.0"
:uberjar-name "clojuredocs.jar"
:main clojuredocs.server
:clean-targets ^{:protect false}
[:target-path
[:cljsbuild :builds :app :compiler :output-dir]
[:cljsbuild :builds :app :compiler :output-to]]
:source-paths ["src/clj" "src/cljc"]
:resource-paths ["resources" "target/cljsbuild"]
:minify-assets
{:assets
{"resources/public/css/site.min.css"
"resources/public/css/site.css"
"resources/public/css/bootstrap.min.css"
"resources/public/css/bootstrap-theme.min.css"}}
:cljsbuild
{:builds {:min
{:source-paths ["src/cljs" "src/cljc" "env/prod/cljs"]
:compiler
{:output-to "target/cljsbuild/public/js/app.js"
:output-dir "target/uberjar"
:optimizations :advanced
:pretty-print false}}
:app
{:source-paths ["src/cljs" "src/cljc" "env/dev/cljs"]
:compiler
{:main "clojuredocs.dev"
:asset-path "/js/out"
:output-to "target/cljsbuild/public/js/app.js"
:output-dir "target/cljsbuild/public/js/out"
:source-map true
:optimizations :none
:pretty-print true}}
:test
{:source-paths ["src/cljs" "src/cljc" "test/cljs"]
:compiler {:main clojuredocs.doo-runner
:asset-path "/js/out"
:output-to "target/test.js"
:output-dir "target/cljstest/public/js/out"
:optimizations :whitespace
:pretty-print true}}
}
}
:figwheel
{:http-server-root "public"
:server-port 3449
:nrepl-port 7002
:nrepl-middleware ["cemerick.piggieback/wrap-cljs-repl"
"cider.nrepl/cider-middleware"
"refactor-nrepl.middleware/wrap-refactor"
]
:css-dirs ["resources/public/css"]
:ring-handler clojuredocs.handler/app}
:sass {:src "src/sass"
:dst "resources/public/css"}
:profiles {:dev {:repl-options {:init-ns clojuredocs.repl}
:dependencies [[ring/ring-mock "0.3.0"]
[ring/ring-devel "1.5.0"]
[prone "1.1.1"]
[figwheel-sidecar "0.5.4-5"]
[org.clojure/tools.nrepl "0.2.12"]
[com.cemerick/piggieback "0.2.2-SNAPSHOT"]
[lein-doo "0.1.6"]
[pjstadig/humane-test-output "0.8.0"]
]
:source-paths ["env/dev/clj"]
:plugins [[lein-figwheel "0.5.4-5"]
[lein-doo "0.1.6"]
[cider/cider-nrepl "0.10.0-SNAPSHOT"]
[org.clojure/tools.namespace "0.3.0-alpha2"
:exclusions [org.clojure/tools.reader]]
[refactor-nrepl "2.0.0-SNAPSHOT"
:exclusions [org.clojure/clojure]]
[lein-sassy "1.0.7"]]
:injections [(require 'pjstadig.humane-test-output)
(pjstadig.humane-test-output/activate!)]
:env {:dev true}}
:uberjar {:hooks [minify-assets.plugin/hooks]
:source-paths ["env/prod/clj"]
:prep-tasks ["compile" ["cljsbuild" "once" "min"]]
:env {:production true}
:aot :all
:omit-source true}}) |
|
25dd17cecb73d5cc189fc3f40eed0d2a82c3e7516383e69b1da48bba11a3e699 | fukamachi/qlot | dist.lisp | (defpackage #:qlot/source/dist
(:nicknames #:qlot.source.dist)
(:use #:cl
#:qlot/source/base)
(:import-from #:qlot/utils/ql
#:make-versioned-distinfo-url)
(:import-from #:qlot/errors
#:invalid-definition)
(:export #:source-dist
#:source-dist-project
#:source-distribution
#:source-distinfo-url))
(in-package #:qlot/source/dist)
(defclass source-dist-project (source)
((%version :initarg :%version)
(distinfo :initarg :distinfo
:initform nil
:accessor source-distinfo-url)))
(defclass source-dist (source-dist-project)
((distribution :initarg :distribution
:accessor source-distribution)))
(defmethod source-distribution ((source source-dist-project))
(error "Must be implemented in subclasses"))
(defmethod make-source ((source (eql :dist)) &rest initargs)
(handler-case
(destructuring-bind (project-name distribution &optional (version :latest))
initargs
(make-instance 'source-dist
:project-name project-name
:distribution distribution
:%version version))
(error ()
(error 'invalid-definition
:source :dist
:usage "dist <project name> <distribution URL> [<version>]"))))
(defmethod defrost-source :after ((source source-dist-project))
(when (slot-boundp source 'qlot/source/base::version)
(setf (slot-value source '%version)
(subseq (source-version source)
(length (source-version-prefix source))))))
(defmethod print-object ((source source-dist-project) stream)
(print-unreadable-object (source stream :type t :identity t)
(format stream "~A ~A ~A"
(source-project-name source)
(source-distribution source)
(if (slot-boundp source 'qlot/source/base::version)
(source-version source)
(slot-value source '%version)))))
(defmethod source= ((source1 source-dist-project) (source2 source-dist-project))
(and (string= (source-project-name source1)
(source-project-name source2))
(string= (source-distribution source1)
(source-distribution source2))
(string= (slot-value source1 '%version)
(slot-value source2 '%version))))
(defmethod source-version-prefix ((source source-dist))
"")
| null | https://raw.githubusercontent.com/fukamachi/qlot/96c40e6e6193f4bcbc61fe23aee98916347e2d94/source/dist.lisp | lisp | (defpackage #:qlot/source/dist
(:nicknames #:qlot.source.dist)
(:use #:cl
#:qlot/source/base)
(:import-from #:qlot/utils/ql
#:make-versioned-distinfo-url)
(:import-from #:qlot/errors
#:invalid-definition)
(:export #:source-dist
#:source-dist-project
#:source-distribution
#:source-distinfo-url))
(in-package #:qlot/source/dist)
(defclass source-dist-project (source)
((%version :initarg :%version)
(distinfo :initarg :distinfo
:initform nil
:accessor source-distinfo-url)))
(defclass source-dist (source-dist-project)
((distribution :initarg :distribution
:accessor source-distribution)))
(defmethod source-distribution ((source source-dist-project))
(error "Must be implemented in subclasses"))
(defmethod make-source ((source (eql :dist)) &rest initargs)
(handler-case
(destructuring-bind (project-name distribution &optional (version :latest))
initargs
(make-instance 'source-dist
:project-name project-name
:distribution distribution
:%version version))
(error ()
(error 'invalid-definition
:source :dist
:usage "dist <project name> <distribution URL> [<version>]"))))
(defmethod defrost-source :after ((source source-dist-project))
(when (slot-boundp source 'qlot/source/base::version)
(setf (slot-value source '%version)
(subseq (source-version source)
(length (source-version-prefix source))))))
(defmethod print-object ((source source-dist-project) stream)
(print-unreadable-object (source stream :type t :identity t)
(format stream "~A ~A ~A"
(source-project-name source)
(source-distribution source)
(if (slot-boundp source 'qlot/source/base::version)
(source-version source)
(slot-value source '%version)))))
(defmethod source= ((source1 source-dist-project) (source2 source-dist-project))
(and (string= (source-project-name source1)
(source-project-name source2))
(string= (source-distribution source1)
(source-distribution source2))
(string= (slot-value source1 '%version)
(slot-value source2 '%version))))
(defmethod source-version-prefix ((source source-dist))
"")
|
|
5a98f82a9d8cabfcd04fb6e5aa2685abd143415294f0ab610c1ba24776638647 | re-ops/re-cipes | nebula.clj | (ns re-cipes.networking.nebula
"Nebula setup"
(:require
[re-cipes.hardening]
[re-cipes.access]
[re-cog.resources.permissions :refer (set-file-acl)]
[re-cog.resources.ufw :refer (add-rule)]
[re-cog.common.recipe :refer (require-recipe)]
[re-cog.facts.config :refer (configuration)]
[re-cog.facts.datalog :refer (hostname)]
[re-cog.resources.download :refer (download)]
[re-cog.resources.systemd :refer (set-service)]
[re-cog.resources.file :refer (symlink directory template)]
[re-cog.resources.archive :refer (untar)]))
(require-recipe)
(def-inline {:depends #'re-cipes.access/permissions} setup
"Installing nebula binary"
[]
(let [{:keys [home]} (configuration)
version "v1.4.0"
archive "nebula-linux-amd64.tar.gz"
tmp (<< "/tmp/~{archive}")
expected "d1ef37ca4d676f00df0ec83911cc2d9f1e70edc70651589210f9e97c68891b9b"
url (<< "/~{version}/~{archive}")]
(download url tmp expected)
(directory "/opt/nebula/" :present)
(untar tmp "/opt/nebula/")
(symlink (<< "/usr/local/bin/nebula") (<< "/opt/nebula/nebula"))))
(def-inline {:depends [#'re-cipes.access/permissions #'re-cipes.hardening/firewall]} config
"Nebula configuration"
[]
(let [{:keys [lighthouse port]} (configuration :nebula)
lighthouse? (= (lighthouse :hostname) (hostname))
host (if lighthouse?
{:port port :tun-disable true}
{:port 0 :tun-disable false})
args {:lighthouse (assoc lighthouse :port port) :host (assoc host :lighthouse? lighthouse?) :hostname (hostname)}]
(when lighthouse?
(add-rule port :allow {}))
(directory "/usr/local/etc/nebula/" :present)
(template "/tmp/resources/templates/nebula/config.yml.mustache" "/usr/local/etc/nebula/config.yml" args)))
(def-inline service
"Setting up nebula service"
[]
(let [opts {:wants "basic.target"
:after "basic.target network.target"
:restart "always"
:stop "/bin/kill -HUP $MAINPID"
:wanted-by "multi-user.target"}]
(set-file-acl "re-ops" "rwx" "/etc/systemd/system/")
(set-service "nebula" "Nebula Mesh VPN" "/usr/local/bin/nebula -config /usr/local/etc/nebula/config.yml" opts)))
| null | https://raw.githubusercontent.com/re-ops/re-cipes/480195e603a9df63aa4d2b34e19343df1f4034ba/src/re_cipes/networking/nebula.clj | clojure | (ns re-cipes.networking.nebula
"Nebula setup"
(:require
[re-cipes.hardening]
[re-cipes.access]
[re-cog.resources.permissions :refer (set-file-acl)]
[re-cog.resources.ufw :refer (add-rule)]
[re-cog.common.recipe :refer (require-recipe)]
[re-cog.facts.config :refer (configuration)]
[re-cog.facts.datalog :refer (hostname)]
[re-cog.resources.download :refer (download)]
[re-cog.resources.systemd :refer (set-service)]
[re-cog.resources.file :refer (symlink directory template)]
[re-cog.resources.archive :refer (untar)]))
(require-recipe)
(def-inline {:depends #'re-cipes.access/permissions} setup
"Installing nebula binary"
[]
(let [{:keys [home]} (configuration)
version "v1.4.0"
archive "nebula-linux-amd64.tar.gz"
tmp (<< "/tmp/~{archive}")
expected "d1ef37ca4d676f00df0ec83911cc2d9f1e70edc70651589210f9e97c68891b9b"
url (<< "/~{version}/~{archive}")]
(download url tmp expected)
(directory "/opt/nebula/" :present)
(untar tmp "/opt/nebula/")
(symlink (<< "/usr/local/bin/nebula") (<< "/opt/nebula/nebula"))))
(def-inline {:depends [#'re-cipes.access/permissions #'re-cipes.hardening/firewall]} config
"Nebula configuration"
[]
(let [{:keys [lighthouse port]} (configuration :nebula)
lighthouse? (= (lighthouse :hostname) (hostname))
host (if lighthouse?
{:port port :tun-disable true}
{:port 0 :tun-disable false})
args {:lighthouse (assoc lighthouse :port port) :host (assoc host :lighthouse? lighthouse?) :hostname (hostname)}]
(when lighthouse?
(add-rule port :allow {}))
(directory "/usr/local/etc/nebula/" :present)
(template "/tmp/resources/templates/nebula/config.yml.mustache" "/usr/local/etc/nebula/config.yml" args)))
(def-inline service
"Setting up nebula service"
[]
(let [opts {:wants "basic.target"
:after "basic.target network.target"
:restart "always"
:stop "/bin/kill -HUP $MAINPID"
:wanted-by "multi-user.target"}]
(set-file-acl "re-ops" "rwx" "/etc/systemd/system/")
(set-service "nebula" "Nebula Mesh VPN" "/usr/local/bin/nebula -config /usr/local/etc/nebula/config.yml" opts)))
|
|
72497f257fc8e840c8618a8a2923639c20be52cab3760c93b82c47479eb67a62 | Azel4231/clojure-quotes | core_test.clj | (ns clojure-quotes.core-test
(:require [clojure.test :refer :all]
[clojure-quotes.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/Azel4231/clojure-quotes/dcac762f5d3901ffd864b2149168f40190c19dac/test/clojure_quotes/core_test.clj | clojure | (ns clojure-quotes.core-test
(:require [clojure.test :refer :all]
[clojure-quotes.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
|
|
847f80e70df4c5c06b6641fe566e68f0962a59f3b84a77abf22bebb13178f495 | v-kolesnikov/sicp | 3_29_test.clj | (ns sicp.chapter03.3-29-test
(:require [clojure.test :refer :all]
[sicp.chapter03.3-29 :refer [or-gate]]
[sicp.chapter03.wirelang :refer [and-gate-delay current-time
get-signal inverter-delay
make-wire propagate set-signal!
the-agenda]]))
(deftest test-or-gate
(let [input-a (make-wire)
input-b (make-wire)
output (make-wire)
test-signal (fn [signal-a signal-b]
(set-signal! input-a signal-a)
(set-signal! input-b signal-b)
(propagate)
(get-signal output))]
(is (= :ok (or-gate input-a input-b output)))
(let [current-agenda-time (current-time the-agenda)]
(propagate)
(is (= (+ and-gate-delay
inverter-delay
inverter-delay)
(- (current-time the-agenda)
current-agenda-time))))
(is (= 0 (test-signal 0 0)))
(is (= 1 (test-signal 1 0)))
(is (= 1 (test-signal 0 1)))
(is (= 1 (test-signal 1 1)))))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/test/sicp/chapter03/3_29_test.clj | clojure | (ns sicp.chapter03.3-29-test
(:require [clojure.test :refer :all]
[sicp.chapter03.3-29 :refer [or-gate]]
[sicp.chapter03.wirelang :refer [and-gate-delay current-time
get-signal inverter-delay
make-wire propagate set-signal!
the-agenda]]))
(deftest test-or-gate
(let [input-a (make-wire)
input-b (make-wire)
output (make-wire)
test-signal (fn [signal-a signal-b]
(set-signal! input-a signal-a)
(set-signal! input-b signal-b)
(propagate)
(get-signal output))]
(is (= :ok (or-gate input-a input-b output)))
(let [current-agenda-time (current-time the-agenda)]
(propagate)
(is (= (+ and-gate-delay
inverter-delay
inverter-delay)
(- (current-time the-agenda)
current-agenda-time))))
(is (= 0 (test-signal 0 0)))
(is (= 1 (test-signal 1 0)))
(is (= 1 (test-signal 0 1)))
(is (= 1 (test-signal 1 1)))))
|
|
6a2768e89a28e8090c799366563361a7c5e8a497bd33ca33e3e20789acc36df0 | janestreet/memtrace_viewer | flame_graph_panel.mli | open! Core
open Bonsai_web
open Memtrace_viewer_common
module Selection : sig
type t =
| Flame of { fragment : Data.Fragment.t }
| Icicle of { fragment : Data.Fragment.t }
| Focus of
{ callers_fragment : Data.Fragment.t
; callees_fragment : Data.Fragment.t
}
end
type t =
{ view : Vdom.Node.t
; key_handler : Vdom_keyboard.Keyboard_event_handler.t
; selection : Selection.t option
; reset_selection :
Data.Fragment.t
-> default_selection:App_state.Default_selection.t
-> unit Vdom.Effect.t
; scroll_focus_into_view : unit Vdom.Effect.t
}
val component
: trie:Data.Fragment_trie.t Bonsai.Value.t
-> call_sites:Data.Call_sites.t Bonsai.Value.t
-> focus:Data.Fragment.t Bonsai.Value.t
-> set_focus:
(Data.Fragment.t
-> default_selection:App_state.Default_selection.t
-> unit Vdom.Effect.t)
Bonsai.Value.t
-> activate:(Selection.t -> unit Vdom.Effect.t) Bonsai.Value.t
-> t Bonsai.Computation.t
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer/46439f8bd16e77c5aa38632c9c4aa53175121d4d/client/src/flame_graph_panel.mli | ocaml | open! Core
open Bonsai_web
open Memtrace_viewer_common
module Selection : sig
type t =
| Flame of { fragment : Data.Fragment.t }
| Icicle of { fragment : Data.Fragment.t }
| Focus of
{ callers_fragment : Data.Fragment.t
; callees_fragment : Data.Fragment.t
}
end
type t =
{ view : Vdom.Node.t
; key_handler : Vdom_keyboard.Keyboard_event_handler.t
; selection : Selection.t option
; reset_selection :
Data.Fragment.t
-> default_selection:App_state.Default_selection.t
-> unit Vdom.Effect.t
; scroll_focus_into_view : unit Vdom.Effect.t
}
val component
: trie:Data.Fragment_trie.t Bonsai.Value.t
-> call_sites:Data.Call_sites.t Bonsai.Value.t
-> focus:Data.Fragment.t Bonsai.Value.t
-> set_focus:
(Data.Fragment.t
-> default_selection:App_state.Default_selection.t
-> unit Vdom.Effect.t)
Bonsai.Value.t
-> activate:(Selection.t -> unit Vdom.Effect.t) Bonsai.Value.t
-> t Bonsai.Computation.t
|
|
d19eab9961a78e566a0403acffac31bc0e4bc606ff8a54adc5f9a80680cbb2d2 | ijvcms/chuanqi_dev | world_boss_reward_config.erl | %%%-------------------------------------------------------------------
@author zhengsiying
%%% @doc
%%% 自动生成文件,不要手动修改
%%% @end
Created : 2016/10/12
%%%-------------------------------------------------------------------
-module(world_boss_reward_config).
-include("common.hrl").
-include("config.hrl").
-compile([export_all]).
get_list_conf() ->
[ world_boss_reward_config:get(X) || X <- get_list() ].
get_list() ->
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42].
get(1) ->
#world_boss_reward_conf{
key = 1,
min_rank = 1,
max_rank = 1,
min_lv = 1,
max_lv = 50,
mail_id = 17
};
get(2) ->
#world_boss_reward_conf{
key = 2,
min_rank = 2,
max_rank = 2,
min_lv = 1,
max_lv = 50,
mail_id = 18
};
get(3) ->
#world_boss_reward_conf{
key = 3,
min_rank = 3,
max_rank = 3,
min_lv = 1,
max_lv = 50,
mail_id = 19
};
get(4) ->
#world_boss_reward_conf{
key = 4,
min_rank = 4,
max_rank = 5,
min_lv = 1,
max_lv = 50,
mail_id = 20
};
get(5) ->
#world_boss_reward_conf{
key = 5,
min_rank = 6,
max_rank = 10,
min_lv = 1,
max_lv = 50,
mail_id = 21
};
get(6) ->
#world_boss_reward_conf{
key = 6,
min_rank = 11,
max_rank = 50,
min_lv = 1,
max_lv = 50,
mail_id = 22
};
get(7) ->
#world_boss_reward_conf{
key = 7,
min_rank = 51,
max_rank = 99999,
min_lv = 1,
max_lv = 50,
mail_id = 23
};
get(8) ->
#world_boss_reward_conf{
key = 8,
min_rank = 1,
max_rank = 1,
min_lv = 51,
max_lv = 60,
mail_id = 31
};
get(9) ->
#world_boss_reward_conf{
key = 9,
min_rank = 2,
max_rank = 2,
min_lv = 51,
max_lv = 60,
mail_id = 32
};
get(10) ->
#world_boss_reward_conf{
key = 10,
min_rank = 3,
max_rank = 3,
min_lv = 51,
max_lv = 60,
mail_id = 33
};
get(11) ->
#world_boss_reward_conf{
key = 11,
min_rank = 4,
max_rank = 5,
min_lv = 51,
max_lv = 60,
mail_id = 34
};
get(12) ->
#world_boss_reward_conf{
key = 12,
min_rank = 6,
max_rank = 10,
min_lv = 51,
max_lv = 60,
mail_id = 35
};
get(13) ->
#world_boss_reward_conf{
key = 13,
min_rank = 11,
max_rank = 50,
min_lv = 51,
max_lv = 60,
mail_id = 36
};
get(14) ->
#world_boss_reward_conf{
key = 14,
min_rank = 51,
max_rank = 99999,
min_lv = 51,
max_lv = 60,
mail_id = 37
};
get(15) ->
#world_boss_reward_conf{
key = 15,
min_rank = 1,
max_rank = 1,
min_lv = 61,
max_lv = 70,
mail_id = 38
};
get(16) ->
#world_boss_reward_conf{
key = 16,
min_rank = 2,
max_rank = 2,
min_lv = 61,
max_lv = 70,
mail_id = 39
};
get(17) ->
#world_boss_reward_conf{
key = 17,
min_rank = 3,
max_rank = 3,
min_lv = 61,
max_lv = 70,
mail_id = 40
};
get(18) ->
#world_boss_reward_conf{
key = 18,
min_rank = 4,
max_rank = 5,
min_lv = 61,
max_lv = 70,
mail_id = 41
};
get(19) ->
#world_boss_reward_conf{
key = 19,
min_rank = 6,
max_rank = 10,
min_lv = 61,
max_lv = 70,
mail_id = 42
};
get(20) ->
#world_boss_reward_conf{
key = 20,
min_rank = 11,
max_rank = 50,
min_lv = 61,
max_lv = 70,
mail_id = 43
};
get(21) ->
#world_boss_reward_conf{
key = 21,
min_rank = 51,
max_rank = 99999,
min_lv = 61,
max_lv = 70,
mail_id = 44
};
get(22) ->
#world_boss_reward_conf{
key = 22,
min_rank = 1,
max_rank = 1,
min_lv = 71,
max_lv = 80,
mail_id = 45
};
get(23) ->
#world_boss_reward_conf{
key = 23,
min_rank = 2,
max_rank = 2,
min_lv = 71,
max_lv = 80,
mail_id = 46
};
get(24) ->
#world_boss_reward_conf{
key = 24,
min_rank = 3,
max_rank = 3,
min_lv = 71,
max_lv = 80,
mail_id = 47
};
get(25) ->
#world_boss_reward_conf{
key = 25,
min_rank = 4,
max_rank = 5,
min_lv = 71,
max_lv = 80,
mail_id = 48
};
get(26) ->
#world_boss_reward_conf{
key = 26,
min_rank = 6,
max_rank = 10,
min_lv = 71,
max_lv = 80,
mail_id = 49
};
get(27) ->
#world_boss_reward_conf{
key = 27,
min_rank = 11,
max_rank = 50,
min_lv = 71,
max_lv = 80,
mail_id = 50
};
get(28) ->
#world_boss_reward_conf{
key = 28,
min_rank = 51,
max_rank = 99999,
min_lv = 71,
max_lv = 80,
mail_id = 51
};
get(29) ->
#world_boss_reward_conf{
key = 29,
min_rank = 1,
max_rank = 1,
min_lv = 81,
max_lv = 90,
mail_id = 52
};
get(30) ->
#world_boss_reward_conf{
key = 30,
min_rank = 2,
max_rank = 2,
min_lv = 81,
max_lv = 90,
mail_id = 53
};
get(31) ->
#world_boss_reward_conf{
key = 31,
min_rank = 3,
max_rank = 3,
min_lv = 81,
max_lv = 90,
mail_id = 54
};
get(32) ->
#world_boss_reward_conf{
key = 32,
min_rank = 4,
max_rank = 5,
min_lv = 81,
max_lv = 90,
mail_id = 55
};
get(33) ->
#world_boss_reward_conf{
key = 33,
min_rank = 6,
max_rank = 10,
min_lv = 81,
max_lv = 90,
mail_id = 56
};
get(34) ->
#world_boss_reward_conf{
key = 34,
min_rank = 11,
max_rank = 50,
min_lv = 81,
max_lv = 90,
mail_id = 57
};
get(35) ->
#world_boss_reward_conf{
key = 35,
min_rank = 51,
max_rank = 99999,
min_lv = 81,
max_lv = 90,
mail_id = 58
};
get(36) ->
#world_boss_reward_conf{
key = 36,
min_rank = 1,
max_rank = 1,
min_lv = 91,
max_lv = 999,
mail_id = 59
};
get(37) ->
#world_boss_reward_conf{
key = 37,
min_rank = 2,
max_rank = 2,
min_lv = 91,
max_lv = 999,
mail_id = 60
};
get(38) ->
#world_boss_reward_conf{
key = 38,
min_rank = 3,
max_rank = 3,
min_lv = 91,
max_lv = 999,
mail_id = 61
};
get(39) ->
#world_boss_reward_conf{
key = 39,
min_rank = 4,
max_rank = 5,
min_lv = 91,
max_lv = 999,
mail_id = 62
};
get(40) ->
#world_boss_reward_conf{
key = 40,
min_rank = 6,
max_rank = 10,
min_lv = 91,
max_lv = 999,
mail_id = 63
};
get(41) ->
#world_boss_reward_conf{
key = 41,
min_rank = 11,
max_rank = 50,
min_lv = 91,
max_lv = 999,
mail_id = 64
};
get(42) ->
#world_boss_reward_conf{
key = 42,
min_rank = 51,
max_rank = 99999,
min_lv = 91,
max_lv = 999,
mail_id = 65
};
get(_Key) ->
?ERR("undefined key from world_boss_reward_config ~p", [_Key]). | null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/config/world_boss_reward_config.erl | erlang | -------------------------------------------------------------------
@doc
自动生成文件,不要手动修改
@end
------------------------------------------------------------------- | @author zhengsiying
Created : 2016/10/12
-module(world_boss_reward_config).
-include("common.hrl").
-include("config.hrl").
-compile([export_all]).
get_list_conf() ->
[ world_boss_reward_config:get(X) || X <- get_list() ].
get_list() ->
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42].
get(1) ->
#world_boss_reward_conf{
key = 1,
min_rank = 1,
max_rank = 1,
min_lv = 1,
max_lv = 50,
mail_id = 17
};
get(2) ->
#world_boss_reward_conf{
key = 2,
min_rank = 2,
max_rank = 2,
min_lv = 1,
max_lv = 50,
mail_id = 18
};
get(3) ->
#world_boss_reward_conf{
key = 3,
min_rank = 3,
max_rank = 3,
min_lv = 1,
max_lv = 50,
mail_id = 19
};
get(4) ->
#world_boss_reward_conf{
key = 4,
min_rank = 4,
max_rank = 5,
min_lv = 1,
max_lv = 50,
mail_id = 20
};
get(5) ->
#world_boss_reward_conf{
key = 5,
min_rank = 6,
max_rank = 10,
min_lv = 1,
max_lv = 50,
mail_id = 21
};
get(6) ->
#world_boss_reward_conf{
key = 6,
min_rank = 11,
max_rank = 50,
min_lv = 1,
max_lv = 50,
mail_id = 22
};
get(7) ->
#world_boss_reward_conf{
key = 7,
min_rank = 51,
max_rank = 99999,
min_lv = 1,
max_lv = 50,
mail_id = 23
};
get(8) ->
#world_boss_reward_conf{
key = 8,
min_rank = 1,
max_rank = 1,
min_lv = 51,
max_lv = 60,
mail_id = 31
};
get(9) ->
#world_boss_reward_conf{
key = 9,
min_rank = 2,
max_rank = 2,
min_lv = 51,
max_lv = 60,
mail_id = 32
};
get(10) ->
#world_boss_reward_conf{
key = 10,
min_rank = 3,
max_rank = 3,
min_lv = 51,
max_lv = 60,
mail_id = 33
};
get(11) ->
#world_boss_reward_conf{
key = 11,
min_rank = 4,
max_rank = 5,
min_lv = 51,
max_lv = 60,
mail_id = 34
};
get(12) ->
#world_boss_reward_conf{
key = 12,
min_rank = 6,
max_rank = 10,
min_lv = 51,
max_lv = 60,
mail_id = 35
};
get(13) ->
#world_boss_reward_conf{
key = 13,
min_rank = 11,
max_rank = 50,
min_lv = 51,
max_lv = 60,
mail_id = 36
};
get(14) ->
#world_boss_reward_conf{
key = 14,
min_rank = 51,
max_rank = 99999,
min_lv = 51,
max_lv = 60,
mail_id = 37
};
get(15) ->
#world_boss_reward_conf{
key = 15,
min_rank = 1,
max_rank = 1,
min_lv = 61,
max_lv = 70,
mail_id = 38
};
get(16) ->
#world_boss_reward_conf{
key = 16,
min_rank = 2,
max_rank = 2,
min_lv = 61,
max_lv = 70,
mail_id = 39
};
get(17) ->
#world_boss_reward_conf{
key = 17,
min_rank = 3,
max_rank = 3,
min_lv = 61,
max_lv = 70,
mail_id = 40
};
get(18) ->
#world_boss_reward_conf{
key = 18,
min_rank = 4,
max_rank = 5,
min_lv = 61,
max_lv = 70,
mail_id = 41
};
get(19) ->
#world_boss_reward_conf{
key = 19,
min_rank = 6,
max_rank = 10,
min_lv = 61,
max_lv = 70,
mail_id = 42
};
get(20) ->
#world_boss_reward_conf{
key = 20,
min_rank = 11,
max_rank = 50,
min_lv = 61,
max_lv = 70,
mail_id = 43
};
get(21) ->
#world_boss_reward_conf{
key = 21,
min_rank = 51,
max_rank = 99999,
min_lv = 61,
max_lv = 70,
mail_id = 44
};
get(22) ->
#world_boss_reward_conf{
key = 22,
min_rank = 1,
max_rank = 1,
min_lv = 71,
max_lv = 80,
mail_id = 45
};
get(23) ->
#world_boss_reward_conf{
key = 23,
min_rank = 2,
max_rank = 2,
min_lv = 71,
max_lv = 80,
mail_id = 46
};
get(24) ->
#world_boss_reward_conf{
key = 24,
min_rank = 3,
max_rank = 3,
min_lv = 71,
max_lv = 80,
mail_id = 47
};
get(25) ->
#world_boss_reward_conf{
key = 25,
min_rank = 4,
max_rank = 5,
min_lv = 71,
max_lv = 80,
mail_id = 48
};
get(26) ->
#world_boss_reward_conf{
key = 26,
min_rank = 6,
max_rank = 10,
min_lv = 71,
max_lv = 80,
mail_id = 49
};
get(27) ->
#world_boss_reward_conf{
key = 27,
min_rank = 11,
max_rank = 50,
min_lv = 71,
max_lv = 80,
mail_id = 50
};
get(28) ->
#world_boss_reward_conf{
key = 28,
min_rank = 51,
max_rank = 99999,
min_lv = 71,
max_lv = 80,
mail_id = 51
};
get(29) ->
#world_boss_reward_conf{
key = 29,
min_rank = 1,
max_rank = 1,
min_lv = 81,
max_lv = 90,
mail_id = 52
};
get(30) ->
#world_boss_reward_conf{
key = 30,
min_rank = 2,
max_rank = 2,
min_lv = 81,
max_lv = 90,
mail_id = 53
};
get(31) ->
#world_boss_reward_conf{
key = 31,
min_rank = 3,
max_rank = 3,
min_lv = 81,
max_lv = 90,
mail_id = 54
};
get(32) ->
#world_boss_reward_conf{
key = 32,
min_rank = 4,
max_rank = 5,
min_lv = 81,
max_lv = 90,
mail_id = 55
};
get(33) ->
#world_boss_reward_conf{
key = 33,
min_rank = 6,
max_rank = 10,
min_lv = 81,
max_lv = 90,
mail_id = 56
};
get(34) ->
#world_boss_reward_conf{
key = 34,
min_rank = 11,
max_rank = 50,
min_lv = 81,
max_lv = 90,
mail_id = 57
};
get(35) ->
#world_boss_reward_conf{
key = 35,
min_rank = 51,
max_rank = 99999,
min_lv = 81,
max_lv = 90,
mail_id = 58
};
get(36) ->
#world_boss_reward_conf{
key = 36,
min_rank = 1,
max_rank = 1,
min_lv = 91,
max_lv = 999,
mail_id = 59
};
get(37) ->
#world_boss_reward_conf{
key = 37,
min_rank = 2,
max_rank = 2,
min_lv = 91,
max_lv = 999,
mail_id = 60
};
get(38) ->
#world_boss_reward_conf{
key = 38,
min_rank = 3,
max_rank = 3,
min_lv = 91,
max_lv = 999,
mail_id = 61
};
get(39) ->
#world_boss_reward_conf{
key = 39,
min_rank = 4,
max_rank = 5,
min_lv = 91,
max_lv = 999,
mail_id = 62
};
get(40) ->
#world_boss_reward_conf{
key = 40,
min_rank = 6,
max_rank = 10,
min_lv = 91,
max_lv = 999,
mail_id = 63
};
get(41) ->
#world_boss_reward_conf{
key = 41,
min_rank = 11,
max_rank = 50,
min_lv = 91,
max_lv = 999,
mail_id = 64
};
get(42) ->
#world_boss_reward_conf{
key = 42,
min_rank = 51,
max_rank = 99999,
min_lv = 91,
max_lv = 999,
mail_id = 65
};
get(_Key) ->
?ERR("undefined key from world_boss_reward_config ~p", [_Key]). |
e3bd6d8633d1e4fd38c38f56914c701203a703fb8ef1575cd651c891bb9cad67 | dyzsr/ocaml-selectml | moved_while_blocking.ml | (* TEST
* hassysthreads
include systhreads
** bytecode
** native
*)
let t2_begin = Atomic.make false
let t2_promoting = Atomic.make false
let t2_finish_promote = Atomic.make false
let t2_done = Atomic.make false
let t2_quit = Atomic.make false
let await a =
while not (Atomic.get a) do Thread.yield () done
let set a =
Atomic.set a true
(* no-alloc printing to stdout *)
let say msg =
Unix.write Unix.stdout (Bytes.unsafe_of_string msg) 0 (String.length msg) |> ignore
let static_ref = ref 0
let global = ref static_ref
let thread_fn () =
await t2_begin;
say "T2: alloc\n";
let r = ref 0 in
global := r;
say "T2: minor GC\n";
Gc.minor ();
global := static_ref;
say "T2: done\n";
set t2_done;
await t2_quit
let big = ref [| |]
let fill_big () = big := Array.make 1000 42
Prevent flambda to move the allocated array in a global
root ( see # 9978 ) .
root (see #9978). *)
let empty_big () = big := [| |]
[@@inline never]
let () =
let th = Thread.create thread_fn () in
Gc.Memprof.(start ~sampling_rate:1.
{ null_tracker with
alloc_minor = (fun _ ->
say " minor alloc\n";
Some ());
alloc_major = (fun _ ->
say " major alloc\n";
Some "major block\n");
promote = (fun () ->
say " promoting...\n";
set t2_promoting;
await t2_finish_promote;
say " ...done promoting\n";
Some "promoted block\n");
dealloc_major = (fun msg ->
say " major dealloc: "; say msg) });
say "T1: alloc\n";
fill_big ();
set t2_begin;
await t2_promoting;
say "T1: major GC\n";
empty_big ();
Gc.full_major ();
set t2_finish_promote;
await t2_done;
say "T1: major GC\n";
Gc.full_major ();
say "T1: done\n";
Gc.Memprof.stop ();
set t2_quit;
Thread.join th
| null | https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/statmemprof/moved_while_blocking.ml | ocaml | TEST
* hassysthreads
include systhreads
** bytecode
** native
no-alloc printing to stdout |
let t2_begin = Atomic.make false
let t2_promoting = Atomic.make false
let t2_finish_promote = Atomic.make false
let t2_done = Atomic.make false
let t2_quit = Atomic.make false
let await a =
while not (Atomic.get a) do Thread.yield () done
let set a =
Atomic.set a true
let say msg =
Unix.write Unix.stdout (Bytes.unsafe_of_string msg) 0 (String.length msg) |> ignore
let static_ref = ref 0
let global = ref static_ref
let thread_fn () =
await t2_begin;
say "T2: alloc\n";
let r = ref 0 in
global := r;
say "T2: minor GC\n";
Gc.minor ();
global := static_ref;
say "T2: done\n";
set t2_done;
await t2_quit
let big = ref [| |]
let fill_big () = big := Array.make 1000 42
Prevent flambda to move the allocated array in a global
root ( see # 9978 ) .
root (see #9978). *)
let empty_big () = big := [| |]
[@@inline never]
let () =
let th = Thread.create thread_fn () in
Gc.Memprof.(start ~sampling_rate:1.
{ null_tracker with
alloc_minor = (fun _ ->
say " minor alloc\n";
Some ());
alloc_major = (fun _ ->
say " major alloc\n";
Some "major block\n");
promote = (fun () ->
say " promoting...\n";
set t2_promoting;
await t2_finish_promote;
say " ...done promoting\n";
Some "promoted block\n");
dealloc_major = (fun msg ->
say " major dealloc: "; say msg) });
say "T1: alloc\n";
fill_big ();
set t2_begin;
await t2_promoting;
say "T1: major GC\n";
empty_big ();
Gc.full_major ();
set t2_finish_promote;
await t2_done;
say "T1: major GC\n";
Gc.full_major ();
say "T1: done\n";
Gc.Memprof.stop ();
set t2_quit;
Thread.join th
|
7d20950de1ac85b53f2901f4c3ac97ceb364726239dc2c34c962cbd77d357023 | freizl/dive-into-haskell | type-synonym-families.hs | # LANGUAGE UndecidableInstances #
# LANGUAGE PolyKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE NoStarIsType #
module Main where
-- * Open Type synonym family
type family Simplify t
type instance Simplify Integer = Integer
type instance Simplify Double = Integer
type instance Simplify Float = Integer
type instance Simplify Bool = String
-- >>> :kind Simplify
-- >>> :kind (Simplify Double)
-- (Simplify Double) :: *
-- >>> :kind! (Simplify Double)
-- (Simplify Double) :: *
-- = Integer
-- * Closed Type synonym family
type family Widen t where
Widen Bool = Int
Widen Int = Integer
Widen Char = String
Widen t = String
class Widener a where
wident :: a -> Widen a
instance Widener Bool where
wident True = 1
wident False = 0
instance Widener Int where
wident = fromIntegral
instance Widener Char where
wident c = [c]
-- >>> :t wident False
-- wident False :: Int
-- >>> :t wident (1::Int)
-- wident (1::Int) :: Integer
-- >>> :t wident 'a'
-- wident 'a' :: String
> > > : kind ! Widen
Widen : : *
= [ ]
-- >>> :kind! Widen Double
-- Widen Double :: *
= [ ]
instance Widener Double where
wident = show
-- compile error if not convert to string
because Double = String@ according to
t = String@
-- wident = toInteger
newtype UnescapingChar = UnescapingChar {unescapingChar :: Char}
-- | how to interpret @(a :: k) :: k@?
-- @
( ToUnescapingTF ( a : : k ) ) : : k
-- @
-- Why need this? I think the reason being is because
@a@ can be both ' Type ' and @'Type ' - > ' Type'@
-- due to the following implementation
@ToUnescapingTF t@
@ToUnescapingTF b@
-- where @t@ has kind @Type -> Type@
and has kind @Type@
--
type family ToUnescapingTF (a :: k) :: k where
ToUnescapingTF Char = UnescapingChar
-- cast to @:: k@ seems not required
ToUnescapingTF ( t b : : k ) = ( ToUnescapingTF t ) ( ToUnescapingTF b )
ToUnescapingTF (t b) = (ToUnescapingTF t) (ToUnescapingTF b)
ToUnescapingTF a = a
-- >>> :kind! ToUnescapingTF Int
> > > : kind ! ToUnescapingTF
> > > : kind ! ToUnescapingTF ( Maybe )
main :: IO ()
main = putStrLn ""
| null | https://raw.githubusercontent.com/freizl/dive-into-haskell/b18a6bfe212db6c3a5d707b4a640170b8bcf9330/codes/type-level-programming/type-synonym-families.hs | haskell | * Open Type synonym family
>>> :kind Simplify
>>> :kind (Simplify Double)
(Simplify Double) :: *
>>> :kind! (Simplify Double)
(Simplify Double) :: *
= Integer
* Closed Type synonym family
>>> :t wident False
wident False :: Int
>>> :t wident (1::Int)
wident (1::Int) :: Integer
>>> :t wident 'a'
wident 'a' :: String
>>> :kind! Widen Double
Widen Double :: *
compile error if not convert to string
wident = toInteger
| how to interpret @(a :: k) :: k@?
@
@
Why need this? I think the reason being is because
due to the following implementation
where @t@ has kind @Type -> Type@
cast to @:: k@ seems not required
>>> :kind! ToUnescapingTF Int | # LANGUAGE UndecidableInstances #
# LANGUAGE PolyKinds #
# LANGUAGE TypeFamilies #
# LANGUAGE NoStarIsType #
module Main where
type family Simplify t
type instance Simplify Integer = Integer
type instance Simplify Double = Integer
type instance Simplify Float = Integer
type instance Simplify Bool = String
type family Widen t where
Widen Bool = Int
Widen Int = Integer
Widen Char = String
Widen t = String
class Widener a where
wident :: a -> Widen a
instance Widener Bool where
wident True = 1
wident False = 0
instance Widener Int where
wident = fromIntegral
instance Widener Char where
wident c = [c]
> > > : kind ! Widen
Widen : : *
= [ ]
= [ ]
instance Widener Double where
wident = show
because Double = String@ according to
t = String@
newtype UnescapingChar = UnescapingChar {unescapingChar :: Char}
( ToUnescapingTF ( a : : k ) ) : : k
@a@ can be both ' Type ' and @'Type ' - > ' Type'@
@ToUnescapingTF t@
@ToUnescapingTF b@
and has kind @Type@
type family ToUnescapingTF (a :: k) :: k where
ToUnescapingTF Char = UnescapingChar
ToUnescapingTF ( t b : : k ) = ( ToUnescapingTF t ) ( ToUnescapingTF b )
ToUnescapingTF (t b) = (ToUnescapingTF t) (ToUnescapingTF b)
ToUnescapingTF a = a
> > > : kind ! ToUnescapingTF
> > > : kind ! ToUnescapingTF ( Maybe )
main :: IO ()
main = putStrLn ""
|
48a7d664ef8a3605aec7acbd1c33a6b92332e9cf1e67811292a14ded72f020aa | joearms/ezwebframe | interact1.erl | -module(interact1).
-export([start/1]).
start(Browser) -> running(Browser).
running(Browser) ->
receive
{Browser, {struct, [{entry,<<"input">>},{txt, Bin}]}} ->
Time = clock1:current_time(),
Browser ! [{cmd,append_div},{id,scroll},
{txt, list_to_binary([Time, " > ", Bin, "<br>"])}]
end,
running(Browser).
| null | https://raw.githubusercontent.com/joearms/ezwebframe/9eb320ff61d4dc7b0885f700414e56b7554788bf/demos/interact/interact1.erl | erlang | -module(interact1).
-export([start/1]).
start(Browser) -> running(Browser).
running(Browser) ->
receive
{Browser, {struct, [{entry,<<"input">>},{txt, Bin}]}} ->
Time = clock1:current_time(),
Browser ! [{cmd,append_div},{id,scroll},
{txt, list_to_binary([Time, " > ", Bin, "<br>"])}]
end,
running(Browser).
|
|
707a63252e1bb6c628bb18f48f4c598f6989486d888e6ed5746a8d5703441055 | metosin/muuntaja | core.clj | (ns muuntaja.format.core)
(defprotocol Decode
(decode [this data charset]))
(defprotocol EncodeToBytes
(encode-to-bytes [this data charset]))
(defprotocol EncodeToOutputStream
(encode-to-output-stream [this data charset]))
(defrecord Format [name encoder decoder return matches])
| null | https://raw.githubusercontent.com/metosin/muuntaja/83da35f35518aaa02ef4d1ca6bbd007b781ebf84/modules/muuntaja/src/muuntaja/format/core.clj | clojure | (ns muuntaja.format.core)
(defprotocol Decode
(decode [this data charset]))
(defprotocol EncodeToBytes
(encode-to-bytes [this data charset]))
(defprotocol EncodeToOutputStream
(encode-to-output-stream [this data charset]))
(defrecord Format [name encoder decoder return matches])
|
|
fd6c5674a047c557c64d31225e5e3b60632484e2d9aef5bdc459ba2b6c93a415 | eponai/sulolive | query.clj | (ns eponai.server.datomic.query
(:require
[clojure.string :as string]
[clojure.set :as set]
[datomic.api :as d]
[medley.core :as medley]
[taoensso.timbre :as timbre :refer [info debug warn trace]]
[eponai.common.database :as db]))
(defn schema
"Pulls schema from the db. If data is provided includes only the necessary fields for that data.
(type/ref, cardinality/many or unique/identity)."
([db] (schema db nil))
([db db-history]
(let [query (cond-> {:where '[[?e :db/ident ?id]
[:db.part/db :db.install/attribute ?e]
[(namespace ?id) ?ns]
[(.startsWith ^String ?ns "db") ?d]
[(not ?d)]]}
(some? db-history)
(db/merge-query {:where '[[$db-history ?e]]
:symbols {'$db-history db-history}}))]
(mapv #(into {} (d/entity db %))
(db/all-with db query)))))
# # # # # # # # # x - historically
(defn sym-seq
"Generates symbols. Is passed around for testability."
[path & [suffix]]
(map #(gensym (str "?" (name %) "_" suffix)) path))
(defn reverse-lookup-attr? [attr]
{:pre [(or (keyword? attr) (= '* attr))]}
(string/starts-with? (name attr) "_"))
(defn normalize-attribute [attr]
{:pre [(or (keyword? attr) (= '* attr))]}
(if (reverse-lookup-attr? attr)
(keyword (namespace attr) (subs (name attr) 1))
attr))
(defn- path->where-clause
[attr [sym next-sym]]
(let [k (normalize-attribute attr)]
(if (reverse-lookup-attr? attr)
[next-sym k sym]
[sym k next-sym])))
(defn vector-swap
"Swaps the values for index i1 and i2 in vector v."
[v i1 i2]
{:pre [(vector? v)]}
(-> v
(assoc i1 (nth v i2))
(assoc i2 (nth v i1))))
(defn- changed-path-queries
([db-history entity-query attr-path]
(changed-path-queries db-history entity-query attr-path (sym-seq (:path attr-path))))
([db-history entity-query {:keys [path attrs]} sym-seq]
{:pre [(some? db-history)]}
(let [path-syms (cons '?e sym-seq)
return-eid (last path-syms)
;; Create a path of where-clauses from entity ?e through
;; the path of the pull-pattern.
path-where-clauses (map path->where-clause
path
(partition 2 1 path-syms))
find-pattern [return-eid
'?datom-attr-keyword
'?datom-value
'?datom-tx
'?datom-added]
attribute-number-to-keyword-clause '[?datom-attr-number
:db/ident
?datom-attr-keyword]
path-query (cond-> entity-query
(seq path-where-clauses)
(db/merge-query {:where (vec path-where-clauses)})
:always
(db/merge-query {:where [attribute-number-to-keyword-clause]
:symbols {'$db-history db-history}}))
db-history-clause (->> find-pattern
(replace {'?datom-attr-keyword '?datom-attr-number})
(cons '$db-history)
(vec))]
(assert (every? #(or (keyword? %) (= '* %)) attrs)
(str "Attributes in path: " path " were not only"
" keywords or '*, were: " attrs))
;; If we've got a star attribute,
;; just return the query matching everything.
(if (some #(= '* %) attrs)
;; For a "star" attribute, just don't specify
;; which values the ?datom-attr-keyword can take.
(vector (db/merge-query {:where [db-history-clause]
:find find-pattern}
path-query))
;; Else, return queries for normal and reverse attributes.
(let [create-query (fn [attrs where-clause find-pattern]
(when (seq attrs)
(-> path-query
(db/merge-query
{:where [where-clause]
:symbols {'[?datom-attr-keyword ...] attrs}
:find find-pattern}))))
keyword-attrs (filter keyword? attrs)
query-attrs (create-query (remove reverse-lookup-attr? attrs)
db-history-clause
find-pattern)
query-reverse-attrs (create-query (->> keyword-attrs
(filter reverse-lookup-attr?)
(map normalize-attribute))
Swap the e and v for reverse attrs
(vector-swap db-history-clause 1 3)
(vector-swap find-pattern 0 2))]
(filter some? [query-attrs query-reverse-attrs]))))))
;; TODO: TEST THIS :D
(defn attr-path-root? [attr-path]
(empty? (:path attr-path)))
(defn- pattern->attr-paths
"Given a pull-pattern, return maps of {:path [] :attrs []} where:
:path is the path into the pull pattern
:attrs are all the keys at the current path.
Example:
(pattern->attr-paths '[:abc * {:foo [:bar {:baz [:abc]}]}])
Returns:
[{:path [], :attrs (:abc '* :foo)}
{:path [:foo], :attrs (:bar :baz)}
{:path [:foo :baz], :attrs (:abc)}]
Explanation:
At path [], including the join, we've got attrs :abc and :foo."
([pattern] (pattern->attr-paths pattern []))
([pattern path]
(let [ks (into [] (comp (map (fn [x]
{:post [(some? %)]}
(cond (keyword? x) x
(map? x) (ffirst x)
(= '* x) x)))
(remove #(= :db/id %)))
pattern)
joins (filter map? pattern)
attr->pattern (into {}
(map #(medley/map-keys normalize-attribute %))
joins)]
(into [{:path path :attrs ks :normalized-attr->pattern attr->pattern}]
(mapcat (fn [join]
{:pre (= 1 (count join))}
(let [[k v] (first join)]
(pattern->attr-paths v (conj path k)))))
joins))))
;; [function entity attribute value] getters
(def feav-fn #(nth % 0))
(def feav-e #(nth % 1))
(def feav-attr #(nth % 2))
(def feav-val #(nth % 3))
(defn- datoms->feav
"Returning datascript transactions with [function entity attribute value]
Can be passed an additional transducing function xf which will be applied
to each feav."
[datoms & [xf]]
(->> datoms
Sort datoms by tx number , so that earlier transactions
get applied first .
(sort-by #(nth % 3))
(into [] (cond-> (map (fn [[e a v _ added]]
[(if added :db/add :db/retract) e a v]))
(some? xf)
(comp xf)))))
(defn retracts-only-from-eids [query path path-symbols path->pulled-eids]
{:pre [(vector? path) (vector? path-symbols)]}
(let [pulled-eids (get-in path->pulled-eids (conj path ::eids))
path-sym (peek path-symbols)
exclude-sym (last (sym-seq path "exclude"))
ret (cond-> query
(seq pulled-eids)
(db/merge-query {:where [(list 'or
(list 'and
[(list '= path-sym exclude-sym)]
'[(identity false) ?datom-added])
(list 'and
[(list 'not= path-sym exclude-sym)]
'(or [(identity false) ?datom-added]
[(identity true) ?datom-added])))]
:symbols {[exclude-sym '...] (seq pulled-eids)}})
(seq (rest path))
(retracts-only-from-eids (pop path) (pop path-symbols) pulled-eids))]
ret))
# # # # # # # # History api
(defn all-datoms
"Returns all changed datoms in tx order.
Param: db-history is required.
You'll want to use (all-changes ...) instead of
this function most of the time. Only use this one
when it's too expensive to initially use (pull-many ...)
to get your dataset.
(now inlined (all-changed) function's doc string):
Finds all changed datoms matching the pull-pattern.
For all changed datoms in db-history using an entity-query and the
paths and attrs of a pull-pattern.
By defaults uses a find-pattern that gets datoms [e a v tx added],
but can be customized by passing a find-pattern parameter."
[db db-history pull-pattern entity-query & [path+eavts->txs path->feav-xf-fn immutable-entity-namespaces]]
{:pre [(some? db-history)]}
(let [pulled-eids (atom {})]
(->> pull-pattern
(pattern->attr-paths)
(into []
(comp
(remove (fn [attr-path]
(seq (sequence (comp (map (comp keyword namespace))
(filter (or immutable-entity-namespaces #{}))
(take 1))
(:attrs attr-path)))))
(mapcat (fn [{:keys [path] :as attr-path}]
(let [path-symbols (vec (sym-seq path))]
(->> (changed-path-queries db-history entity-query attr-path path-symbols)
(map (fn [query]
(retracts-only-from-eids query path (vec path-symbols) @pulled-eids)))
(map vector (repeat attr-path))))))
(mapcat (fn [[attr-path query]]
(let [eavts (db/find-with (d/history db) query)]
(when (seq eavts)
(let [feavs (datoms->feav eavts (when path->feav-xf-fn
(path->feav-xf-fn attr-path)))
attr->pattern (:normalized-attr->pattern attr-path)
;; Grouping all :db/add's by attr, so we can use pull to get any data
;; the user is missing that hadn't changed in db/history.
pullable? (set (keys attr->pattern))
adds-by-attr (transduce (comp
(filter (comp pullable? feav-attr))
(filter (comp #{:db/add} feav-fn)))
(completing
(fn [by-attr feav]
(update by-attr
(feav-attr feav)
(fnil conj #{})
(feav-val feav))))
{}
feavs)]
;; Put onto our feavs the "current truth" about all refs, using
;; pull on the latest db.
(-> feavs
;; Add extra transactions that contain only the current truth,
;; no retractions.
(into (when path+eavts->txs (path+eavts->txs attr-path eavts)))
(into (mapcat (fn [[attr vs]]
;; TODO: Now that we've pulled on some eids, we
;; don't have to pull on their children.
;; Need to make sure we only do this filtering
;; for entities that has been pulled using
;; the same pattern.
(let [cache-path (-> []
(into (map normalize-attribute) (:path attr-path))
(conj attr ::eids))]
(swap! pulled-eids update-in cache-path
(fnil set/union #{}) vs))
(d/pull-many db (get attr->pattern attr) (seq vs))))
adds-by-attr))))))))))))
(defn one
"Initially gets an entity and uses (pull ...) on it with
the pull-pattern. Gets all changed datoms when db-history is
available."
[db db-history pull-pattern entity-query]
(if (nil? db-history)
(db/pull db pull-pattern (db/one-with db entity-query))
(all-datoms db db-history pull-pattern entity-query)))
(defn all
"Initially gets everything by using (pull-many ...), then
gets all datoms that have been changed, by using the entity
query and the pull-pattern."
[db db-history pull-pattern entity-query]
(if (nil? db-history)
(db/pull-many db pull-pattern (db/all-with db entity-query))
(all-datoms db db-history pull-pattern entity-query)))
(defn- one-changed-entity
"Returns an entity id which has been changed or if something has changed
that can be reached in the pull pattern."
[db db-history pull-pattern entity-query]
{:post [(or (nil? %)
(number? %))]}
(if (nil? db-history)
(db/one-with db entity-query)
(->> pull-pattern
(pattern->attr-paths)
(some (fn [attr-path]
(->> (changed-path-queries db-history entity-query attr-path)
(some (fn [query]
(let [q (db/merge-query query {:find '[?e .]})]
(db/one-with (d/history db) q))))))))))
(defn- adds-and-retracts-for-eid
"returns all adds and retracts for an eid bounded by db-history."
[db db-history eid]
(let [attr-id->keyword (memoize #(:db/ident (d/entity db %)))]
(->> (d/datoms db-history :eavt eid)
(map (fn [[e a v t added]] [e (attr-id->keyword a) v t added]))
(datoms->feav))))
(defn one-external
"Takes the usual db, db-history, pull pattern and entity-query
to make sure we only return what's changed since the last read.
In addition, it takes a 1-arity function that returns transaction
data for datascript on the client. It's only called when something has
changed, which means we'll only call the external service if something
has changed on our end."
[db db-history query entity-query eid->client-txs]
{:pre (fn? eid->client-txs)}
(when-let [eid (one-changed-entity db db-history query entity-query)]
(let [in-datomic (if (nil? db-history)
[(db/pull db query eid)]
(adds-and-retracts-for-eid db db-history eid))
external-data (eid->client-txs eid)]
(assert (or (nil? external-data)
(sequential? external-data))
(str "Return value of eid->client-txs needs to be nil or sequential."
" Was: " external-data
" for eid: " eid))
(-> []
(into in-datomic)
(into external-data)))))
| null | https://raw.githubusercontent.com/eponai/sulolive/7a70701bbd3df6bbb92682679dcedb53f8822c18/src/eponai/server/datomic/query.clj | clojure | Create a path of where-clauses from entity ?e through
the path of the pull-pattern.
If we've got a star attribute,
just return the query matching everything.
For a "star" attribute, just don't specify
which values the ?datom-attr-keyword can take.
Else, return queries for normal and reverse attributes.
TODO: TEST THIS :D
[function entity attribute value] getters
Grouping all :db/add's by attr, so we can use pull to get any data
the user is missing that hadn't changed in db/history.
Put onto our feavs the "current truth" about all refs, using
pull on the latest db.
Add extra transactions that contain only the current truth,
no retractions.
TODO: Now that we've pulled on some eids, we
don't have to pull on their children.
Need to make sure we only do this filtering
for entities that has been pulled using
the same pattern. | (ns eponai.server.datomic.query
(:require
[clojure.string :as string]
[clojure.set :as set]
[datomic.api :as d]
[medley.core :as medley]
[taoensso.timbre :as timbre :refer [info debug warn trace]]
[eponai.common.database :as db]))
(defn schema
"Pulls schema from the db. If data is provided includes only the necessary fields for that data.
(type/ref, cardinality/many or unique/identity)."
([db] (schema db nil))
([db db-history]
(let [query (cond-> {:where '[[?e :db/ident ?id]
[:db.part/db :db.install/attribute ?e]
[(namespace ?id) ?ns]
[(.startsWith ^String ?ns "db") ?d]
[(not ?d)]]}
(some? db-history)
(db/merge-query {:where '[[$db-history ?e]]
:symbols {'$db-history db-history}}))]
(mapv #(into {} (d/entity db %))
(db/all-with db query)))))
# # # # # # # # # x - historically
(defn sym-seq
"Generates symbols. Is passed around for testability."
[path & [suffix]]
(map #(gensym (str "?" (name %) "_" suffix)) path))
(defn reverse-lookup-attr? [attr]
{:pre [(or (keyword? attr) (= '* attr))]}
(string/starts-with? (name attr) "_"))
(defn normalize-attribute [attr]
{:pre [(or (keyword? attr) (= '* attr))]}
(if (reverse-lookup-attr? attr)
(keyword (namespace attr) (subs (name attr) 1))
attr))
(defn- path->where-clause
[attr [sym next-sym]]
(let [k (normalize-attribute attr)]
(if (reverse-lookup-attr? attr)
[next-sym k sym]
[sym k next-sym])))
(defn vector-swap
"Swaps the values for index i1 and i2 in vector v."
[v i1 i2]
{:pre [(vector? v)]}
(-> v
(assoc i1 (nth v i2))
(assoc i2 (nth v i1))))
(defn- changed-path-queries
([db-history entity-query attr-path]
(changed-path-queries db-history entity-query attr-path (sym-seq (:path attr-path))))
([db-history entity-query {:keys [path attrs]} sym-seq]
{:pre [(some? db-history)]}
(let [path-syms (cons '?e sym-seq)
return-eid (last path-syms)
path-where-clauses (map path->where-clause
path
(partition 2 1 path-syms))
find-pattern [return-eid
'?datom-attr-keyword
'?datom-value
'?datom-tx
'?datom-added]
attribute-number-to-keyword-clause '[?datom-attr-number
:db/ident
?datom-attr-keyword]
path-query (cond-> entity-query
(seq path-where-clauses)
(db/merge-query {:where (vec path-where-clauses)})
:always
(db/merge-query {:where [attribute-number-to-keyword-clause]
:symbols {'$db-history db-history}}))
db-history-clause (->> find-pattern
(replace {'?datom-attr-keyword '?datom-attr-number})
(cons '$db-history)
(vec))]
(assert (every? #(or (keyword? %) (= '* %)) attrs)
(str "Attributes in path: " path " were not only"
" keywords or '*, were: " attrs))
(if (some #(= '* %) attrs)
(vector (db/merge-query {:where [db-history-clause]
:find find-pattern}
path-query))
(let [create-query (fn [attrs where-clause find-pattern]
(when (seq attrs)
(-> path-query
(db/merge-query
{:where [where-clause]
:symbols {'[?datom-attr-keyword ...] attrs}
:find find-pattern}))))
keyword-attrs (filter keyword? attrs)
query-attrs (create-query (remove reverse-lookup-attr? attrs)
db-history-clause
find-pattern)
query-reverse-attrs (create-query (->> keyword-attrs
(filter reverse-lookup-attr?)
(map normalize-attribute))
Swap the e and v for reverse attrs
(vector-swap db-history-clause 1 3)
(vector-swap find-pattern 0 2))]
(filter some? [query-attrs query-reverse-attrs]))))))
(defn attr-path-root? [attr-path]
(empty? (:path attr-path)))
(defn- pattern->attr-paths
"Given a pull-pattern, return maps of {:path [] :attrs []} where:
:path is the path into the pull pattern
:attrs are all the keys at the current path.
Example:
(pattern->attr-paths '[:abc * {:foo [:bar {:baz [:abc]}]}])
Returns:
[{:path [], :attrs (:abc '* :foo)}
{:path [:foo], :attrs (:bar :baz)}
{:path [:foo :baz], :attrs (:abc)}]
Explanation:
At path [], including the join, we've got attrs :abc and :foo."
([pattern] (pattern->attr-paths pattern []))
([pattern path]
(let [ks (into [] (comp (map (fn [x]
{:post [(some? %)]}
(cond (keyword? x) x
(map? x) (ffirst x)
(= '* x) x)))
(remove #(= :db/id %)))
pattern)
joins (filter map? pattern)
attr->pattern (into {}
(map #(medley/map-keys normalize-attribute %))
joins)]
(into [{:path path :attrs ks :normalized-attr->pattern attr->pattern}]
(mapcat (fn [join]
{:pre (= 1 (count join))}
(let [[k v] (first join)]
(pattern->attr-paths v (conj path k)))))
joins))))
(def feav-fn #(nth % 0))
(def feav-e #(nth % 1))
(def feav-attr #(nth % 2))
(def feav-val #(nth % 3))
(defn- datoms->feav
"Returning datascript transactions with [function entity attribute value]
Can be passed an additional transducing function xf which will be applied
to each feav."
[datoms & [xf]]
(->> datoms
Sort datoms by tx number , so that earlier transactions
get applied first .
(sort-by #(nth % 3))
(into [] (cond-> (map (fn [[e a v _ added]]
[(if added :db/add :db/retract) e a v]))
(some? xf)
(comp xf)))))
(defn retracts-only-from-eids [query path path-symbols path->pulled-eids]
{:pre [(vector? path) (vector? path-symbols)]}
(let [pulled-eids (get-in path->pulled-eids (conj path ::eids))
path-sym (peek path-symbols)
exclude-sym (last (sym-seq path "exclude"))
ret (cond-> query
(seq pulled-eids)
(db/merge-query {:where [(list 'or
(list 'and
[(list '= path-sym exclude-sym)]
'[(identity false) ?datom-added])
(list 'and
[(list 'not= path-sym exclude-sym)]
'(or [(identity false) ?datom-added]
[(identity true) ?datom-added])))]
:symbols {[exclude-sym '...] (seq pulled-eids)}})
(seq (rest path))
(retracts-only-from-eids (pop path) (pop path-symbols) pulled-eids))]
ret))
# # # # # # # # History api
(defn all-datoms
"Returns all changed datoms in tx order.
Param: db-history is required.
You'll want to use (all-changes ...) instead of
this function most of the time. Only use this one
when it's too expensive to initially use (pull-many ...)
to get your dataset.
(now inlined (all-changed) function's doc string):
Finds all changed datoms matching the pull-pattern.
For all changed datoms in db-history using an entity-query and the
paths and attrs of a pull-pattern.
By defaults uses a find-pattern that gets datoms [e a v tx added],
but can be customized by passing a find-pattern parameter."
[db db-history pull-pattern entity-query & [path+eavts->txs path->feav-xf-fn immutable-entity-namespaces]]
{:pre [(some? db-history)]}
(let [pulled-eids (atom {})]
(->> pull-pattern
(pattern->attr-paths)
(into []
(comp
(remove (fn [attr-path]
(seq (sequence (comp (map (comp keyword namespace))
(filter (or immutable-entity-namespaces #{}))
(take 1))
(:attrs attr-path)))))
(mapcat (fn [{:keys [path] :as attr-path}]
(let [path-symbols (vec (sym-seq path))]
(->> (changed-path-queries db-history entity-query attr-path path-symbols)
(map (fn [query]
(retracts-only-from-eids query path (vec path-symbols) @pulled-eids)))
(map vector (repeat attr-path))))))
(mapcat (fn [[attr-path query]]
(let [eavts (db/find-with (d/history db) query)]
(when (seq eavts)
(let [feavs (datoms->feav eavts (when path->feav-xf-fn
(path->feav-xf-fn attr-path)))
attr->pattern (:normalized-attr->pattern attr-path)
pullable? (set (keys attr->pattern))
adds-by-attr (transduce (comp
(filter (comp pullable? feav-attr))
(filter (comp #{:db/add} feav-fn)))
(completing
(fn [by-attr feav]
(update by-attr
(feav-attr feav)
(fnil conj #{})
(feav-val feav))))
{}
feavs)]
(-> feavs
(into (when path+eavts->txs (path+eavts->txs attr-path eavts)))
(into (mapcat (fn [[attr vs]]
(let [cache-path (-> []
(into (map normalize-attribute) (:path attr-path))
(conj attr ::eids))]
(swap! pulled-eids update-in cache-path
(fnil set/union #{}) vs))
(d/pull-many db (get attr->pattern attr) (seq vs))))
adds-by-attr))))))))))))
(defn one
"Initially gets an entity and uses (pull ...) on it with
the pull-pattern. Gets all changed datoms when db-history is
available."
[db db-history pull-pattern entity-query]
(if (nil? db-history)
(db/pull db pull-pattern (db/one-with db entity-query))
(all-datoms db db-history pull-pattern entity-query)))
(defn all
"Initially gets everything by using (pull-many ...), then
gets all datoms that have been changed, by using the entity
query and the pull-pattern."
[db db-history pull-pattern entity-query]
(if (nil? db-history)
(db/pull-many db pull-pattern (db/all-with db entity-query))
(all-datoms db db-history pull-pattern entity-query)))
(defn- one-changed-entity
"Returns an entity id which has been changed or if something has changed
that can be reached in the pull pattern."
[db db-history pull-pattern entity-query]
{:post [(or (nil? %)
(number? %))]}
(if (nil? db-history)
(db/one-with db entity-query)
(->> pull-pattern
(pattern->attr-paths)
(some (fn [attr-path]
(->> (changed-path-queries db-history entity-query attr-path)
(some (fn [query]
(let [q (db/merge-query query {:find '[?e .]})]
(db/one-with (d/history db) q))))))))))
(defn- adds-and-retracts-for-eid
"returns all adds and retracts for an eid bounded by db-history."
[db db-history eid]
(let [attr-id->keyword (memoize #(:db/ident (d/entity db %)))]
(->> (d/datoms db-history :eavt eid)
(map (fn [[e a v t added]] [e (attr-id->keyword a) v t added]))
(datoms->feav))))
(defn one-external
"Takes the usual db, db-history, pull pattern and entity-query
to make sure we only return what's changed since the last read.
In addition, it takes a 1-arity function that returns transaction
data for datascript on the client. It's only called when something has
changed, which means we'll only call the external service if something
has changed on our end."
[db db-history query entity-query eid->client-txs]
{:pre (fn? eid->client-txs)}
(when-let [eid (one-changed-entity db db-history query entity-query)]
(let [in-datomic (if (nil? db-history)
[(db/pull db query eid)]
(adds-and-retracts-for-eid db db-history eid))
external-data (eid->client-txs eid)]
(assert (or (nil? external-data)
(sequential? external-data))
(str "Return value of eid->client-txs needs to be nil or sequential."
" Was: " external-data
" for eid: " eid))
(-> []
(into in-datomic)
(into external-data)))))
|
045abe7006d4f53f2aff40a0ad7800f443aed9b5addda4b31ab913abe9489861 | penpot/penpot | project_menu.cljs | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
;;
;; Copyright (c) KALEIDOS INC
(ns app.main.ui.dashboard.project-menu
(:require
[app.common.spec :as us]
[app.main.data.dashboard :as dd]
[app.main.data.messages :as dm]
[app.main.data.modal :as modal]
[app.main.refs :as refs]
[app.main.store :as st]
[app.main.ui.components.context-menu-a11y :refer [context-menu-a11y]]
[app.main.ui.context :as ctx]
[app.main.ui.dashboard.import :as udi]
[app.util.dom :as dom]
[app.util.i18n :as i18n :refer [tr]]
[app.util.router :as rt]
[cljs.spec.alpha :as s]
[rumext.v2 :as mf]))
(s/def ::project some?)
(s/def ::show? boolean?)
(s/def ::on-edit fn?)
(s/def ::on-menu-close fn?)
(s/def ::top (s/nilable ::us/number))
(s/def ::left (s/nilable ::us/number))
(s/def ::on-import fn?)
(s/def ::project-menu
(s/keys :req-un [::project ::show? ::on-edit ::on-menu-close]
:opt-un [::top ::left ::on-import]))
(mf/defc project-menu
[{:keys [project show? on-edit on-menu-close top left on-import] :as props}]
(us/verify ::project-menu props)
(let [top (or top 0)
left (or left 0)
current-team-id (mf/use-ctx ctx/current-team-id)
teams (mf/deref refs/teams)
teams (-> teams (dissoc current-team-id) vals vec)
on-duplicate-success
(fn [new-project]
(st/emit! (dm/success (tr "dashboard.success-duplicate-project"))
(rt/nav :dashboard-files
{:team-id (:team-id new-project)
:project-id (:id new-project)})))
on-duplicate
(fn []
(st/emit! (dd/duplicate-project
(with-meta project {:on-success on-duplicate-success}))))
toggle-pin
#(st/emit! (dd/toggle-project-pin project))
on-move-success
(fn [team-id]
(st/emit! (dd/go-to-projects team-id)))
on-move
(fn [team-id]
(let [data {:id (:id project) :team-id team-id}
mdata {:on-success #(on-move-success team-id)}]
#(st/emit! (dm/success (tr "dashboard.success-move-project"))
(dd/move-project (with-meta data mdata)))))
delete-fn
(fn [_]
(st/emit! (dm/success (tr "dashboard.success-delete-project"))
(dd/delete-project project)
(dd/go-to-projects (:team-id project))))
on-delete
#(st/emit!
(modal/show
{:type :confirm
:title (tr "modals.delete-project-confirm.title")
:message (tr "modals.delete-project-confirm.message")
:accept-label (tr "modals.delete-project-confirm.accept")
:on-accept delete-fn}))
file-input (mf/use-ref nil)
on-import-files
(mf/use-callback
(fn []
(dom/click (mf/ref-val file-input))))
on-finish-import
(mf/use-callback
(fn []
(when (fn? on-import) (on-import))))
options [(when-not (:is-default project)
{:option-name (tr "labels.rename")
:id "project-menu-rename"
:option-handler on-edit
:data-test "project-rename"})
(when-not (:is-default project)
{:option-name (tr "dashboard.duplicate")
:id "project-menu-duplicated"
:option-handler on-duplicate
:data-test "project-duplicate"})
(when-not (:is-default project)
{:option-name (tr "dashboard.pin-unpin")
:id "project-menu-pin"
:option-handler toggle-pin})
(when (and (seq teams) (not (:is-default project)))
{:option-name (tr "dashboard.move-to")
:id "project-menu-move-to"
:sub-options (for [team teams]
{:option-name (:name team)
:id (:name team)
:option-handler (on-move (:id team))})
:data-test "project-move-to"})
(when (some? on-import)
{:option-name (tr "dashboard.import")
:id "project-menu-import"
:option-handler on-import-files
:data-test "file-import"})
(when-not (:is-default project)
{:option-name :separator})
(when-not (:is-default project)
{:option-name (tr "labels.delete")
:id "project-menu-delete"
:option-handler on-delete
:data-test "project-delete"})]]
[:*
[:& udi/import-form {:ref file-input
:project-id (:id project)
:on-finish-import on-finish-import}]
[:& context-menu-a11y
{:on-close on-menu-close
:show show?
:fixed? (or (not= top 0) (not= left 0))
:min-width? true
:top top
:left left
:options options}]]))
| null | https://raw.githubusercontent.com/penpot/penpot/96ce475206d90cccb01c8cd63d684132690e480b/frontend/src/app/main/ui/dashboard/project_menu.cljs | clojure |
Copyright (c) KALEIDOS INC | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns app.main.ui.dashboard.project-menu
(:require
[app.common.spec :as us]
[app.main.data.dashboard :as dd]
[app.main.data.messages :as dm]
[app.main.data.modal :as modal]
[app.main.refs :as refs]
[app.main.store :as st]
[app.main.ui.components.context-menu-a11y :refer [context-menu-a11y]]
[app.main.ui.context :as ctx]
[app.main.ui.dashboard.import :as udi]
[app.util.dom :as dom]
[app.util.i18n :as i18n :refer [tr]]
[app.util.router :as rt]
[cljs.spec.alpha :as s]
[rumext.v2 :as mf]))
(s/def ::project some?)
(s/def ::show? boolean?)
(s/def ::on-edit fn?)
(s/def ::on-menu-close fn?)
(s/def ::top (s/nilable ::us/number))
(s/def ::left (s/nilable ::us/number))
(s/def ::on-import fn?)
(s/def ::project-menu
(s/keys :req-un [::project ::show? ::on-edit ::on-menu-close]
:opt-un [::top ::left ::on-import]))
(mf/defc project-menu
[{:keys [project show? on-edit on-menu-close top left on-import] :as props}]
(us/verify ::project-menu props)
(let [top (or top 0)
left (or left 0)
current-team-id (mf/use-ctx ctx/current-team-id)
teams (mf/deref refs/teams)
teams (-> teams (dissoc current-team-id) vals vec)
on-duplicate-success
(fn [new-project]
(st/emit! (dm/success (tr "dashboard.success-duplicate-project"))
(rt/nav :dashboard-files
{:team-id (:team-id new-project)
:project-id (:id new-project)})))
on-duplicate
(fn []
(st/emit! (dd/duplicate-project
(with-meta project {:on-success on-duplicate-success}))))
toggle-pin
#(st/emit! (dd/toggle-project-pin project))
on-move-success
(fn [team-id]
(st/emit! (dd/go-to-projects team-id)))
on-move
(fn [team-id]
(let [data {:id (:id project) :team-id team-id}
mdata {:on-success #(on-move-success team-id)}]
#(st/emit! (dm/success (tr "dashboard.success-move-project"))
(dd/move-project (with-meta data mdata)))))
delete-fn
(fn [_]
(st/emit! (dm/success (tr "dashboard.success-delete-project"))
(dd/delete-project project)
(dd/go-to-projects (:team-id project))))
on-delete
#(st/emit!
(modal/show
{:type :confirm
:title (tr "modals.delete-project-confirm.title")
:message (tr "modals.delete-project-confirm.message")
:accept-label (tr "modals.delete-project-confirm.accept")
:on-accept delete-fn}))
file-input (mf/use-ref nil)
on-import-files
(mf/use-callback
(fn []
(dom/click (mf/ref-val file-input))))
on-finish-import
(mf/use-callback
(fn []
(when (fn? on-import) (on-import))))
options [(when-not (:is-default project)
{:option-name (tr "labels.rename")
:id "project-menu-rename"
:option-handler on-edit
:data-test "project-rename"})
(when-not (:is-default project)
{:option-name (tr "dashboard.duplicate")
:id "project-menu-duplicated"
:option-handler on-duplicate
:data-test "project-duplicate"})
(when-not (:is-default project)
{:option-name (tr "dashboard.pin-unpin")
:id "project-menu-pin"
:option-handler toggle-pin})
(when (and (seq teams) (not (:is-default project)))
{:option-name (tr "dashboard.move-to")
:id "project-menu-move-to"
:sub-options (for [team teams]
{:option-name (:name team)
:id (:name team)
:option-handler (on-move (:id team))})
:data-test "project-move-to"})
(when (some? on-import)
{:option-name (tr "dashboard.import")
:id "project-menu-import"
:option-handler on-import-files
:data-test "file-import"})
(when-not (:is-default project)
{:option-name :separator})
(when-not (:is-default project)
{:option-name (tr "labels.delete")
:id "project-menu-delete"
:option-handler on-delete
:data-test "project-delete"})]]
[:*
[:& udi/import-form {:ref file-input
:project-id (:id project)
:on-finish-import on-finish-import}]
[:& context-menu-a11y
{:on-close on-menu-close
:show show?
:fixed? (or (not= top 0) (not= left 0))
:min-width? true
:top top
:left left
:options options}]]))
|
98826d757d23dad099489e57baada3eee283d6950516017dac7322f153e40920 | ekmett/ekmett.github.com | Origami.hs | # LANGUAGE ExistentialQuantification , ViewPatterns , FlexibleInstances , PatternGuards #
module Origami where
import Prelude hiding ((.),id)
import Control.Arrow
import Control.Comonad
import Control.Category
import Control.Comonad.Density
import Control.Functor hiding (first,second)
import Control.Functor.Extras
import Control.Functor.Pointed
import Control.Applicative
import Data.Foldable (Foldable)
import qualified Data.Foldable as Foldable
import Data.Traversable (Traversable)
import qualified Data.Traversable as Traversable
import Data.Monoid.Reducer
class Fold f where
fold :: Foldable g => f a b -> g a -> b
scan :: Traversable g => f a b -> g a -> g b
comp :: f b c -> f a b -> f a c
mapAccum :: Traversable g => f a b -> g a -> (b, g b)
foldf :: Foldable g => f a b -> g a -> f a b
scanf :: Traversable g => f a b -> g a -> (f a b, g b)
reducer :: (c `Reducer` m) => f c m
scan f = snd . mapAccum f
-- horrible definition, avoid it!
mapAccum f g = (fold f g, scan f g)
-- Foldl
newtype Moore a b = Moore { runMoore :: (b, a -> Moore a b) }
instance Functor (Moore a) where
fmap f = Moore . (f *** fmap (fmap f)) . runMoore
stepMoore :: Moore a b -> a -> Moore a b
stepMoore = snd . runMoore
moore :: b -> (a -> Moore a b) -> Moore a b
moore a b = Moore (a,b)
instance Copointed (Moore a) where
extract = fst . runMoore
instance Comonad (Moore a) where
duplicate m = Moore (m, duplicate . stepMoore m)
extend f m = Moore (f m, extend f . stepMoore m)
instance Fold Moore where
fold m = extract . foldf m
foldf = Foldable.foldl stepMoore
m `comp` n = extract m `moore` \(stepMoore n -> n') -> stepMoore m (extract n') `comp` n'
scan m = snd . scanf m
scanf = Traversable.mapAccumL scan' where
scan' n (stepMoore n -> n') = (n', extract n')
mapAccum m = first extract . Traversable.mapAccumL scan' m where
scan' n (stepMoore n -> n') = (n', extract n')
reducer = step mempty where
step e = Moore (e, step . snoc e)
instance Pointed (Moore a) where
point x = p where p = Moore (x, const p)
instance Applicative (Moore a) where
pure = point
Moore (z,f) <*> Moore (z',f') = Moore (z z', liftA2 (<*>) f f')
Foldl1
newtype Mealy a b = Mealy { runMealy :: a -> (Mealy a b, b) }
instance Functor (Mealy a) where
fmap f (Mealy g) = Mealy $ (fmap f *** f) . g
instance Fold Mealy where
fold m = snd . Foldable.foldl (runMealy . fst) (m, error "Mealy.fold: empty")
comp = (.)
scan m = snd . Traversable.mapAccumL runMealy m
reducer = Mealy $ \(unit -> e') -> (step e', e') where
step e = Mealy $ \(snoc e -> e') -> (step e', e')
instance Pointed (Mealy a) where
point x = p where p = Mealy (const (p,x))
instance Applicative (Mealy a) where
pure = point
-- pointfree view-patterns!
-- f <*> x = Mealy $ \(runMealy f &&& runMealy x -> ((f',f''),(x',x''))) -> (f' <*> x', f'' x'')
Mealy f <*> Mealy x = Mealy $ \a ->
let (f', f'') = f a
(x', x'') = x a
in (f' <*> x', f'' x'')
instance Category Mealy where
id = Mealy ((,) id)
g . f = Mealy $ \a ->
let (f', b) = runMealy f a
(g', c) = runMealy g b
in (g' . f', c)
instance Monoid (Mealy a a) where
mempty = id
mappend = (.)
instance Arrow Mealy where
arr f = f' where f' = Mealy $ \a -> (f', f a)
first (Mealy f) = Mealy $ \(a,b) ->
let (f', f'') = f a
in (first f', (f'', b))
second (Mealy f) = Mealy $ \(a,b) ->
let (f', f'') = f b
in (second f', (a, f''))
Mealy f &&& Mealy g = Mealy $ \ a ->
let (f', f'') = f a
(g', g'') = g a
in (f' &&& g', (f'',g''))
Mealy f *** Mealy g = Mealy $ \(a,b) ->
let (f', f'') = f a
(g', g'') = g b
in (f' *** g', (f'',g''))
instance ArrowChoice Mealy where
left m = Mealy $ \a -> case a of
Left x -> (left *** Left) $ runMealy m x
Right y -> (left m, Right y)
right m = Mealy $ \a -> case a of
Left x -> (right m, Left x)
Right y -> (right *** Right) $ runMealy m y
f ||| g = Mealy $ \a -> case a of
Left x | (f',x') <- runMealy f x -> (f' ||| g, x')
Right y | (g',y') <- runMealy g y -> (f ||| g', y')
f +++ g = Mealy $ \a -> case a of
Left x | (f',x') <- runMealy f x -> (f' +++ g, Left x')
Right y | (g',y') <- runMealy g y -> (f +++ g', Right y')
contravariant Yoneda lemma applied to a non-"Functor " functor F a where F a c = F ( a - > c - > c ) c
an explicit machine
data Foldr a b = forall c. Foldr (c -> b) (a -> c -> c) c
instance Functor (Foldr a) where
fmap g (Foldr m f z) = Foldr (g . m) f z
instance Pointed (Foldr a) where
point x = Foldr (const x) undefined undefined
instance Applicative (Foldr a) where
pure = point
Foldr m f z <*> Foldr m' f' z' = Foldr m'' f'' (z,z') where
m'' (c,c') = m c (m' c')
f'' a (c,c') = (f a c, f' a c')
instance Fold Foldr where
fold (Foldr m f z) = m . Foldable.foldr f z
Foldr m f z `comp` Foldr m' f' z' = Foldr (m . fst) f'' (z,z') where
f'' a (c,d) | d' <- f' a d = (f (m' d') c, d')
mapAccum (Foldr m f z) = first m . Traversable.mapAccumR scan' z where
scan' c a | c' <- f a c = (c',m c')
reducer = Foldr id cons mempty
instance Copointed (Foldr a) where
extract (Foldr m _ z) = m z
instance Comonad (Foldr a) where
extend = flip Foldr stepFoldr
stepFoldr :: a -> Foldr a b -> Foldr a b
stepFoldr a (Foldr m f z) = Foldr m f (f a z)
contravaraiant Yoneda lemma applied to a non-"Functor " functor F a where F a c = F ( a - > c - > c ) ( a - > c )
data Foldr1 a b = forall c. Foldr1 (c -> b) (a -> c -> c) (a -> c)
instance Functor (Foldr1 a) where
fmap g (Foldr1 m f z) = Foldr1 (g . m) f z
instance Pointed (Foldr1 a) where
point x = Foldr1 (const x) undefined undefined
instance Category Foldr1 where
id = Foldr1 id const id
Foldr1 m f z . Foldr1 m' f' z' = Foldr1 (m . fst) f'' z'' where
z'' (z' -> b) = (z (m' b), b)
f'' a (c,d) | d' <- f' a d = (f (m' d') c, d')
instance Arrow Foldr1 where
arr f = f' where f' = Foldr1 f const id
first (Foldr1 m f z) = Foldr1 (first m) f' (first z) where
f' (a,b) (c,_) = (f a c, b)
second (Foldr1 m f z) = Foldr1 (second m) f' (second z) where
f' (a,b) (_,c) = (a, f b c)
Foldr1 m f z *** Foldr1 m' f' z' = Foldr1 (m *** m') f'' (z *** z') where
f'' (a,b) (c,d) = (f a c, f' b d)
Foldr1 m f z &&& Foldr1 m' f' z' = Foldr1 (m *** m') f'' (z &&& z') where
f'' a (c,d) = (f a c, f' a d)
toFoldr1 :: Foldr a b -> Foldr1 a b
toFoldr1 (Foldr m f z) = Foldr1 m f (flip f z)
runFoldr1 :: Foldr1 a b -> a -> (Foldr1 a b, b)
runFoldr1 (Foldr1 m f z) (z -> c) = (Foldr1 m f (flip f c), m c)
instance Fold Foldr1 where
fold (Foldr1 m f z) = m' . Foldable.foldr f' Nothing where
f' a Nothing = Just (z a)
f' a (Just b) = Just (f a b)
m' Nothing = error "Foldr1.fold: empty"
m' (Just a) = m a
comp = (.)
reducer = Foldr1 id cons unit
scan m = snd . Traversable.mapAccumR runFoldr1 m
-- instance Applicative Foldr1
instance ArrowChoice Foldr1
Density comonad based
data DFoldr a b = forall c. DFoldr ((a -> c -> c) -> c -> b) (a -> c -> c) c
stepDFoldr :: a -> DFoldr a b -> DFoldr a b
stepDFoldr a (DFoldr m f z) = DFoldr m f (f a z)
instance Functor (DFoldr a) where
fmap g (DFoldr m f z) = DFoldr (fmap g . m) f z
instance Pointed (DFoldr a) where
point x = DFoldr (\_ _ -> x) undefined undefined
instance Applicative (DFoldr a) where
pure = point
DFoldr m f z <*> DFoldr m' f' z' = DFoldr m'' f'' (z,z') where
m'' _ (c,c') = m f c (m' f' c')
f'' a (c,c') = (f a c, f' a c')
instance Fold DFoldr where
fold (DFoldr m f z) = m f . Foldable.foldr f z
DFoldr m f z `comp` DFoldr m' f' z' = DFoldr (const (m f . fst)) f'' (z,z') where
f'' a (c,d) | d' <- f' a d = (f (m' f' d') c, d')
mapAccum (DFoldr m f z) = first (m f) . Traversable.mapAccumR scan' z where
scan' c a | c' <- f a c = (c',m f c')
reducer = DFoldr (const id) cons mempty
instance Copointed (DFoldr a) where
extract (DFoldr m f z) = m f z
instance Comonad (DFoldr a) where
extend = flip DFoldr stepDFoldr . const
type Algebra f c = f c -> c
-- A density comonad of an arbitrary f-Algebra
data FoldF f b = forall c. FoldF (Algebra f c -> b) (Algebra f c)
instance Functor (FoldF f) where
fmap g (FoldF m f) = FoldF (g . m) f
instance Pointed (FoldF f) where
point x = FoldF (const x) (const x)
instance Copointed (FoldF f) where
extract (FoldF m f) = m f
instance Comonad (FoldF f) where
duplicate (FoldF m f) = FoldF (FoldF m) f
extend g (FoldF m f) = FoldF (g . FoldF m) f
| null | https://raw.githubusercontent.com/ekmett/ekmett.github.com/8d3abab5b66db631e148e1d046d18909bece5893/haskell/Origami.hs | haskell | horrible definition, avoid it!
Foldl
pointfree view-patterns!
f <*> x = Mealy $ \(runMealy f &&& runMealy x -> ((f',f''),(x',x''))) -> (f' <*> x', f'' x'')
instance Applicative Foldr1
A density comonad of an arbitrary f-Algebra | # LANGUAGE ExistentialQuantification , ViewPatterns , FlexibleInstances , PatternGuards #
module Origami where
import Prelude hiding ((.),id)
import Control.Arrow
import Control.Comonad
import Control.Category
import Control.Comonad.Density
import Control.Functor hiding (first,second)
import Control.Functor.Extras
import Control.Functor.Pointed
import Control.Applicative
import Data.Foldable (Foldable)
import qualified Data.Foldable as Foldable
import Data.Traversable (Traversable)
import qualified Data.Traversable as Traversable
import Data.Monoid.Reducer
class Fold f where
fold :: Foldable g => f a b -> g a -> b
scan :: Traversable g => f a b -> g a -> g b
comp :: f b c -> f a b -> f a c
mapAccum :: Traversable g => f a b -> g a -> (b, g b)
foldf :: Foldable g => f a b -> g a -> f a b
scanf :: Traversable g => f a b -> g a -> (f a b, g b)
reducer :: (c `Reducer` m) => f c m
scan f = snd . mapAccum f
mapAccum f g = (fold f g, scan f g)
newtype Moore a b = Moore { runMoore :: (b, a -> Moore a b) }
instance Functor (Moore a) where
fmap f = Moore . (f *** fmap (fmap f)) . runMoore
stepMoore :: Moore a b -> a -> Moore a b
stepMoore = snd . runMoore
moore :: b -> (a -> Moore a b) -> Moore a b
moore a b = Moore (a,b)
instance Copointed (Moore a) where
extract = fst . runMoore
instance Comonad (Moore a) where
duplicate m = Moore (m, duplicate . stepMoore m)
extend f m = Moore (f m, extend f . stepMoore m)
instance Fold Moore where
fold m = extract . foldf m
foldf = Foldable.foldl stepMoore
m `comp` n = extract m `moore` \(stepMoore n -> n') -> stepMoore m (extract n') `comp` n'
scan m = snd . scanf m
scanf = Traversable.mapAccumL scan' where
scan' n (stepMoore n -> n') = (n', extract n')
mapAccum m = first extract . Traversable.mapAccumL scan' m where
scan' n (stepMoore n -> n') = (n', extract n')
reducer = step mempty where
step e = Moore (e, step . snoc e)
instance Pointed (Moore a) where
point x = p where p = Moore (x, const p)
instance Applicative (Moore a) where
pure = point
Moore (z,f) <*> Moore (z',f') = Moore (z z', liftA2 (<*>) f f')
Foldl1
newtype Mealy a b = Mealy { runMealy :: a -> (Mealy a b, b) }
instance Functor (Mealy a) where
fmap f (Mealy g) = Mealy $ (fmap f *** f) . g
instance Fold Mealy where
fold m = snd . Foldable.foldl (runMealy . fst) (m, error "Mealy.fold: empty")
comp = (.)
scan m = snd . Traversable.mapAccumL runMealy m
reducer = Mealy $ \(unit -> e') -> (step e', e') where
step e = Mealy $ \(snoc e -> e') -> (step e', e')
instance Pointed (Mealy a) where
point x = p where p = Mealy (const (p,x))
instance Applicative (Mealy a) where
pure = point
Mealy f <*> Mealy x = Mealy $ \a ->
let (f', f'') = f a
(x', x'') = x a
in (f' <*> x', f'' x'')
instance Category Mealy where
id = Mealy ((,) id)
g . f = Mealy $ \a ->
let (f', b) = runMealy f a
(g', c) = runMealy g b
in (g' . f', c)
instance Monoid (Mealy a a) where
mempty = id
mappend = (.)
instance Arrow Mealy where
arr f = f' where f' = Mealy $ \a -> (f', f a)
first (Mealy f) = Mealy $ \(a,b) ->
let (f', f'') = f a
in (first f', (f'', b))
second (Mealy f) = Mealy $ \(a,b) ->
let (f', f'') = f b
in (second f', (a, f''))
Mealy f &&& Mealy g = Mealy $ \ a ->
let (f', f'') = f a
(g', g'') = g a
in (f' &&& g', (f'',g''))
Mealy f *** Mealy g = Mealy $ \(a,b) ->
let (f', f'') = f a
(g', g'') = g b
in (f' *** g', (f'',g''))
instance ArrowChoice Mealy where
left m = Mealy $ \a -> case a of
Left x -> (left *** Left) $ runMealy m x
Right y -> (left m, Right y)
right m = Mealy $ \a -> case a of
Left x -> (right m, Left x)
Right y -> (right *** Right) $ runMealy m y
f ||| g = Mealy $ \a -> case a of
Left x | (f',x') <- runMealy f x -> (f' ||| g, x')
Right y | (g',y') <- runMealy g y -> (f ||| g', y')
f +++ g = Mealy $ \a -> case a of
Left x | (f',x') <- runMealy f x -> (f' +++ g, Left x')
Right y | (g',y') <- runMealy g y -> (f +++ g', Right y')
contravariant Yoneda lemma applied to a non-"Functor " functor F a where F a c = F ( a - > c - > c ) c
an explicit machine
data Foldr a b = forall c. Foldr (c -> b) (a -> c -> c) c
instance Functor (Foldr a) where
fmap g (Foldr m f z) = Foldr (g . m) f z
instance Pointed (Foldr a) where
point x = Foldr (const x) undefined undefined
instance Applicative (Foldr a) where
pure = point
Foldr m f z <*> Foldr m' f' z' = Foldr m'' f'' (z,z') where
m'' (c,c') = m c (m' c')
f'' a (c,c') = (f a c, f' a c')
instance Fold Foldr where
fold (Foldr m f z) = m . Foldable.foldr f z
Foldr m f z `comp` Foldr m' f' z' = Foldr (m . fst) f'' (z,z') where
f'' a (c,d) | d' <- f' a d = (f (m' d') c, d')
mapAccum (Foldr m f z) = first m . Traversable.mapAccumR scan' z where
scan' c a | c' <- f a c = (c',m c')
reducer = Foldr id cons mempty
instance Copointed (Foldr a) where
extract (Foldr m _ z) = m z
instance Comonad (Foldr a) where
extend = flip Foldr stepFoldr
stepFoldr :: a -> Foldr a b -> Foldr a b
stepFoldr a (Foldr m f z) = Foldr m f (f a z)
contravaraiant Yoneda lemma applied to a non-"Functor " functor F a where F a c = F ( a - > c - > c ) ( a - > c )
data Foldr1 a b = forall c. Foldr1 (c -> b) (a -> c -> c) (a -> c)
instance Functor (Foldr1 a) where
fmap g (Foldr1 m f z) = Foldr1 (g . m) f z
instance Pointed (Foldr1 a) where
point x = Foldr1 (const x) undefined undefined
instance Category Foldr1 where
id = Foldr1 id const id
Foldr1 m f z . Foldr1 m' f' z' = Foldr1 (m . fst) f'' z'' where
z'' (z' -> b) = (z (m' b), b)
f'' a (c,d) | d' <- f' a d = (f (m' d') c, d')
instance Arrow Foldr1 where
arr f = f' where f' = Foldr1 f const id
first (Foldr1 m f z) = Foldr1 (first m) f' (first z) where
f' (a,b) (c,_) = (f a c, b)
second (Foldr1 m f z) = Foldr1 (second m) f' (second z) where
f' (a,b) (_,c) = (a, f b c)
Foldr1 m f z *** Foldr1 m' f' z' = Foldr1 (m *** m') f'' (z *** z') where
f'' (a,b) (c,d) = (f a c, f' b d)
Foldr1 m f z &&& Foldr1 m' f' z' = Foldr1 (m *** m') f'' (z &&& z') where
f'' a (c,d) = (f a c, f' a d)
toFoldr1 :: Foldr a b -> Foldr1 a b
toFoldr1 (Foldr m f z) = Foldr1 m f (flip f z)
runFoldr1 :: Foldr1 a b -> a -> (Foldr1 a b, b)
runFoldr1 (Foldr1 m f z) (z -> c) = (Foldr1 m f (flip f c), m c)
instance Fold Foldr1 where
fold (Foldr1 m f z) = m' . Foldable.foldr f' Nothing where
f' a Nothing = Just (z a)
f' a (Just b) = Just (f a b)
m' Nothing = error "Foldr1.fold: empty"
m' (Just a) = m a
comp = (.)
reducer = Foldr1 id cons unit
scan m = snd . Traversable.mapAccumR runFoldr1 m
instance ArrowChoice Foldr1
Density comonad based
data DFoldr a b = forall c. DFoldr ((a -> c -> c) -> c -> b) (a -> c -> c) c
stepDFoldr :: a -> DFoldr a b -> DFoldr a b
stepDFoldr a (DFoldr m f z) = DFoldr m f (f a z)
instance Functor (DFoldr a) where
fmap g (DFoldr m f z) = DFoldr (fmap g . m) f z
instance Pointed (DFoldr a) where
point x = DFoldr (\_ _ -> x) undefined undefined
instance Applicative (DFoldr a) where
pure = point
DFoldr m f z <*> DFoldr m' f' z' = DFoldr m'' f'' (z,z') where
m'' _ (c,c') = m f c (m' f' c')
f'' a (c,c') = (f a c, f' a c')
instance Fold DFoldr where
fold (DFoldr m f z) = m f . Foldable.foldr f z
DFoldr m f z `comp` DFoldr m' f' z' = DFoldr (const (m f . fst)) f'' (z,z') where
f'' a (c,d) | d' <- f' a d = (f (m' f' d') c, d')
mapAccum (DFoldr m f z) = first (m f) . Traversable.mapAccumR scan' z where
scan' c a | c' <- f a c = (c',m f c')
reducer = DFoldr (const id) cons mempty
instance Copointed (DFoldr a) where
extract (DFoldr m f z) = m f z
instance Comonad (DFoldr a) where
extend = flip DFoldr stepDFoldr . const
type Algebra f c = f c -> c
data FoldF f b = forall c. FoldF (Algebra f c -> b) (Algebra f c)
instance Functor (FoldF f) where
fmap g (FoldF m f) = FoldF (g . m) f
instance Pointed (FoldF f) where
point x = FoldF (const x) (const x)
instance Copointed (FoldF f) where
extract (FoldF m f) = m f
instance Comonad (FoldF f) where
duplicate (FoldF m f) = FoldF (FoldF m) f
extend g (FoldF m f) = FoldF (g . FoldF m) f
|
b714f340d92edd864982f4da05c00720464c373bee6104302dc132b61b50dc6e | avsm/eeww | tests.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Gallium , INRIA Paris
(* *)
Copyright 2016 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Definition of tests, built from actions *)
type t = {
test_name : string;
test_run_by_default : bool;
test_actions : Actions.t list;
test_description : string
}
let compare t1 t2 = String.compare t1.test_name t2.test_name
let (tests: (string, t) Hashtbl.t) = Hashtbl.create 20
let register test = Hashtbl.add tests test.test_name test
let get_registered_tests () =
let f _test_name test acc = test::acc in
let unsorted_tests = Hashtbl.fold f tests [] in
List.sort compare unsorted_tests
let default_tests () =
let f _test_name test acc =
if test.test_run_by_default then test::acc else acc in
Hashtbl.fold f tests []
let lookup name =
try Some (Hashtbl.find tests name)
with Not_found -> None
let test_of_action action =
{
test_name = Actions.name action;
test_run_by_default = false;
test_actions = [action];
test_description = Actions.description action;
}
let run_actions log testenv actions =
let total = List.length actions in
let rec run_actions_aux action_number env = function
| [] -> (Result.pass, env)
| action::remaining_actions ->
begin
Printf.fprintf log "\nRunning action %d/%d (%s)\n%!"
action_number total (Actions.name action);
let (result, env') = Actions.run log env action in
Printf.fprintf log "Action %d/%d (%s) %s\n%!"
action_number total (Actions.name action)
(Result.string_of_result result);
if Result.is_pass result
then run_actions_aux (action_number+1) env' remaining_actions
else (result, env')
end in
run_actions_aux 1 testenv actions
let run log env test =
Printf.fprintf log "Running test %s with %d actions\n%!"
test.test_name
(List.length test.test_actions);
run_actions log env test.test_actions
module TestSet = Set.Make
(struct
type nonrec t = t
let compare = compare
end)
| null | https://raw.githubusercontent.com/avsm/eeww/4d65720b5dd51376842ffe5c8c220d5329c1dc10/boot/ocaml/ocamltest/tests.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Definition of tests, built from actions | , projet Gallium , INRIA Paris
Copyright 2016 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type t = {
test_name : string;
test_run_by_default : bool;
test_actions : Actions.t list;
test_description : string
}
let compare t1 t2 = String.compare t1.test_name t2.test_name
let (tests: (string, t) Hashtbl.t) = Hashtbl.create 20
let register test = Hashtbl.add tests test.test_name test
let get_registered_tests () =
let f _test_name test acc = test::acc in
let unsorted_tests = Hashtbl.fold f tests [] in
List.sort compare unsorted_tests
let default_tests () =
let f _test_name test acc =
if test.test_run_by_default then test::acc else acc in
Hashtbl.fold f tests []
let lookup name =
try Some (Hashtbl.find tests name)
with Not_found -> None
let test_of_action action =
{
test_name = Actions.name action;
test_run_by_default = false;
test_actions = [action];
test_description = Actions.description action;
}
let run_actions log testenv actions =
let total = List.length actions in
let rec run_actions_aux action_number env = function
| [] -> (Result.pass, env)
| action::remaining_actions ->
begin
Printf.fprintf log "\nRunning action %d/%d (%s)\n%!"
action_number total (Actions.name action);
let (result, env') = Actions.run log env action in
Printf.fprintf log "Action %d/%d (%s) %s\n%!"
action_number total (Actions.name action)
(Result.string_of_result result);
if Result.is_pass result
then run_actions_aux (action_number+1) env' remaining_actions
else (result, env')
end in
run_actions_aux 1 testenv actions
let run log env test =
Printf.fprintf log "Running test %s with %d actions\n%!"
test.test_name
(List.length test.test_actions);
run_actions log env test.test_actions
module TestSet = Set.Make
(struct
type nonrec t = t
let compare = compare
end)
|
71392682cf858d1a13a2f1542dd45befbe432eb7d15efc9d9885ffd33d2c5c4d | neongreen/haskell-ex | spiral.hs | module Spiral where
import Data.Matrix
data Direction = R | D | U | L
main :: IO ()
main = do
putStrLn "Please enter a size for the spiral!"
i <- getInt
putStrLn $ concatMap (\line -> (concatMap (\x -> if x == 1 then "* " else " ") line) ++ "\n") $ toLists $ spiral i
spiral :: Int -> Matrix Int
spiral i = buildSpiral (zero i i) i R (1,1) 0
buildSpiral :: Matrix Int -> Int -> Direction -> (Int, Int) -> Int -> Matrix Int
buildSpiral m i dir curr count | count >=3 = (setElem 1 curr m)
| (available m curr) =
if (atBound dir curr i m)
then buildSpiral m i (nextDir dir) curr (count + 1)
else buildSpiral (setElem 1 curr m) i dir (nextPos dir curr) 0
| otherwise = m
available :: Matrix Int -> (Int, Int) -> Bool
available m pos = (m ! pos) \= 1
atBound :: Direction -> (Int,Int) -> Int -> Matrix Int -> Bool
atBound R c@(x,y) i m = (atEnd i R c) || ((distToEnd i R c) > 1 && m ! (x,y+2) == 1)
atBound D c@(x,y) i m = (atEnd i D c) || ((distToEnd i D c) > 1 && m ! (x+2,y) == 1)
atBound L c@(x,y) i m = (atEnd i L c) || ((distToEnd i L c) > 1 && m ! (x,y-2) == 1)
atBound U c@(x,y) i m = (atEnd i U c) || ((distToEnd i U c) > 1 && m ! (x-2,y) == 1)
atEnd :: Int -> Direction -> (Int,Int) -> Bool
atEnd i R (x,y) = i - y == 0
atEnd i D (x,y) = i - x == 0
atEnd _ L (x,y) = y == 1
atEnd _ U (x,y) = x == 1
distToEnd :: Int -> Direction -> (Int,Int) -> Int
distToEnd i R (x,y) = i - y
distToEnd i D (x,y) = i - x
distToEnd _ L (x,y) = y - 1
distToEnd _ U (x,y) = x - 1
nextPos :: Direction -> (Int,Int) -> (Int,Int)
nextPos R (x,y) = (x,y+1)
nextPos D (x,y) = (x+1,y)
nextPos L (x,y) = (x,y-1)
nextPos U (x,y) = (x-1,y)
nextDir :: Direction -> Direction
nextDir R = D
nextDir D = L
nextDir U = R
nextDir L = U
getInt :: IO Int
getInt = do
str <- getLine
return (read str) | null | https://raw.githubusercontent.com/neongreen/haskell-ex/345115444fdf370a43390fd942e2851b9b1963ad/week4/spiral/aneksteind/spiral.hs | haskell | module Spiral where
import Data.Matrix
data Direction = R | D | U | L
main :: IO ()
main = do
putStrLn "Please enter a size for the spiral!"
i <- getInt
putStrLn $ concatMap (\line -> (concatMap (\x -> if x == 1 then "* " else " ") line) ++ "\n") $ toLists $ spiral i
spiral :: Int -> Matrix Int
spiral i = buildSpiral (zero i i) i R (1,1) 0
buildSpiral :: Matrix Int -> Int -> Direction -> (Int, Int) -> Int -> Matrix Int
buildSpiral m i dir curr count | count >=3 = (setElem 1 curr m)
| (available m curr) =
if (atBound dir curr i m)
then buildSpiral m i (nextDir dir) curr (count + 1)
else buildSpiral (setElem 1 curr m) i dir (nextPos dir curr) 0
| otherwise = m
available :: Matrix Int -> (Int, Int) -> Bool
available m pos = (m ! pos) \= 1
atBound :: Direction -> (Int,Int) -> Int -> Matrix Int -> Bool
atBound R c@(x,y) i m = (atEnd i R c) || ((distToEnd i R c) > 1 && m ! (x,y+2) == 1)
atBound D c@(x,y) i m = (atEnd i D c) || ((distToEnd i D c) > 1 && m ! (x+2,y) == 1)
atBound L c@(x,y) i m = (atEnd i L c) || ((distToEnd i L c) > 1 && m ! (x,y-2) == 1)
atBound U c@(x,y) i m = (atEnd i U c) || ((distToEnd i U c) > 1 && m ! (x-2,y) == 1)
atEnd :: Int -> Direction -> (Int,Int) -> Bool
atEnd i R (x,y) = i - y == 0
atEnd i D (x,y) = i - x == 0
atEnd _ L (x,y) = y == 1
atEnd _ U (x,y) = x == 1
distToEnd :: Int -> Direction -> (Int,Int) -> Int
distToEnd i R (x,y) = i - y
distToEnd i D (x,y) = i - x
distToEnd _ L (x,y) = y - 1
distToEnd _ U (x,y) = x - 1
nextPos :: Direction -> (Int,Int) -> (Int,Int)
nextPos R (x,y) = (x,y+1)
nextPos D (x,y) = (x+1,y)
nextPos L (x,y) = (x,y-1)
nextPos U (x,y) = (x-1,y)
nextDir :: Direction -> Direction
nextDir R = D
nextDir D = L
nextDir U = R
nextDir L = U
getInt :: IO Int
getInt = do
str <- getLine
return (read str) |
|
5f758dc38d79780f445c8481c4628e33588aba2668a09fcb76a76113d1d42c4a | dmjio/scythe | LexerUtils.hs | # LANGUAGE RecordWildCards #
module Data.CSV.LexerUtils where
import Control.Monad.State
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Builder
import qualified Data.ByteString.Internal as B (w2c)
import qualified Data.ByteString.Lazy as L
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Word
import GHC.Int
appendMode :: Action
appendMode len bs = do
s@LexerState {..} <- get
put s { stringBuffer
= stringBuffer
<> (lazyByteString
$ L.take len (alexStr bs))
}
pure Nothing
endMode :: Action
endMode _ _ = do
mode <- gets lexerMode
case mode of
InNormal -> pure Nothing
InString -> apply
where
apply = do
buf <- toLazyByteString <$> gets stringBuffer
modify $ \s -> s
{ lexerMode = InNormal
, stringBuffer = mempty
}
pure $ Just (Item buf)
data LexerMode
= InNormal
| InString
deriving (Show, Eq)
data Token
= Item !L.ByteString
| Newline
| Comma
| TokenError !Error
deriving (Show, Eq)
type Action = Int64 -> AlexInput -> State LexerState (Maybe Token)
data AlexInput = AlexInput
{ alexChar :: {-# UNPACK #-} !Char
, alexStr :: !L.ByteString
, alexBytePos :: {-# UNPACK #-} !Int
} deriving (Show, Eq)
data LexerState
= LexerState
{ matchedInput :: {-# UNPACK #-} !AlexInput
, lexerMode :: !LexerMode
, stringBuffer :: !Builder
}
item :: Action
item inputLength _ = do
LexerState {..} <- get
pure $ Just $ Item $
L.take inputLength (alexStr matchedInput)
token t = \_ _ ->
pure (Just t)
errorAction :: AlexInput -> State LexerState [Token]
errorAction AlexInput {..} =
pure [TokenError (LexerError (LT.unpack $ LT.decodeUtf8 alexStr))]
data Error
= LexerError String
| UntermString
deriving (Show, Eq)
eofAction :: State LexerState [Token]
eofAction = do
mode <- gets lexerMode
pure $ case mode of
InString -> [TokenError UntermString]
InNormal -> []
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput)
alexGetByte AlexInput {..} =
case L.uncons alexStr of
Nothing -> Nothing
Just (c, rest) ->
Just (c, AlexInput {
alexChar = B.w2c c,
alexStr = rest,
alexBytePos = alexBytePos+1
})
startString :: Action
startString _ _ =
Nothing <$ do
modify $ \s -> s { lexerMode = InString }
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar = alexChar
| null | https://raw.githubusercontent.com/dmjio/scythe/a5f4e79803e9bf74cb1247a3ba7217a18b529f36/src/Data/CSV/LexerUtils.hs | haskell | # UNPACK #
# UNPACK #
# UNPACK # | # LANGUAGE RecordWildCards #
module Data.CSV.LexerUtils where
import Control.Monad.State
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Builder
import qualified Data.ByteString.Internal as B (w2c)
import qualified Data.ByteString.Lazy as L
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Word
import GHC.Int
appendMode :: Action
appendMode len bs = do
s@LexerState {..} <- get
put s { stringBuffer
= stringBuffer
<> (lazyByteString
$ L.take len (alexStr bs))
}
pure Nothing
endMode :: Action
endMode _ _ = do
mode <- gets lexerMode
case mode of
InNormal -> pure Nothing
InString -> apply
where
apply = do
buf <- toLazyByteString <$> gets stringBuffer
modify $ \s -> s
{ lexerMode = InNormal
, stringBuffer = mempty
}
pure $ Just (Item buf)
data LexerMode
= InNormal
| InString
deriving (Show, Eq)
data Token
= Item !L.ByteString
| Newline
| Comma
| TokenError !Error
deriving (Show, Eq)
type Action = Int64 -> AlexInput -> State LexerState (Maybe Token)
data AlexInput = AlexInput
, alexStr :: !L.ByteString
} deriving (Show, Eq)
data LexerState
= LexerState
, lexerMode :: !LexerMode
, stringBuffer :: !Builder
}
item :: Action
item inputLength _ = do
LexerState {..} <- get
pure $ Just $ Item $
L.take inputLength (alexStr matchedInput)
token t = \_ _ ->
pure (Just t)
errorAction :: AlexInput -> State LexerState [Token]
errorAction AlexInput {..} =
pure [TokenError (LexerError (LT.unpack $ LT.decodeUtf8 alexStr))]
data Error
= LexerError String
| UntermString
deriving (Show, Eq)
eofAction :: State LexerState [Token]
eofAction = do
mode <- gets lexerMode
pure $ case mode of
InString -> [TokenError UntermString]
InNormal -> []
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput)
alexGetByte AlexInput {..} =
case L.uncons alexStr of
Nothing -> Nothing
Just (c, rest) ->
Just (c, AlexInput {
alexChar = B.w2c c,
alexStr = rest,
alexBytePos = alexBytePos+1
})
startString :: Action
startString _ _ =
Nothing <$ do
modify $ \s -> s { lexerMode = InString }
alexInputPrevChar :: AlexInput -> Char
alexInputPrevChar = alexChar
|
ceb97320547e281d4c39641683e8ec5740aff3f5921b72737f0cfc1fca09eba1 | oriansj/mes-m2 | display_arith.scm | GNU --- Maxwell Equations of Software
Copyright © 2016,2018 Jan ( janneke ) Nieuwenhuizen < >
;;;
This file is part of GNU .
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
GNU is distributed in the hope that it will be useful , but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / >
;; Setup output file
(set-current-output-port (open-output-file "test/results/test007.answer"))
;; Test -
(display "Test -\n")
(display (- 4 1))
(display #\newline)
(display (- 1 4))
(display #\newline)
(display (- 4 4))
(display #\newline)
;; Test -
(display "Test +\n")
(display (+ 4 1))
(display #\newline)
(display (+ 1 -4))
(display #\newline)
(display (+ 4 4))
(display #\newline)
;; Test quotient
(display "Test quotient\n")
(display (quotient 4 1))
(display #\newline)
(display (quotient 1 4))
(display #\newline)
(display (quotient 4 -1))
(display #\newline)
;; Test Remainder
(display "Test remainder\n")
(display (remainder 13 4))
(display #\newline)
(display (remainder -13 4))
(display #\newline)
;; Test modulo
(display "Test modulo\n")
(display (modulo 13 4))
(display #\newline)
(display (modulo -13 4))
(display #\newline)
(display (modulo 13 -4))
(display #\newline)
(display (modulo -13 -4))
(display #\newline)
;; Test *
(display "Test *\n")
(display (* 2 2 2 2))
(display #\newline)
(display (* 1 4))
(display #\newline)
(display (* 4 -1))
(display #\newline)
;; Test logand
(display "Test logand\nUsing:")
(display #x123456)
(display #\newline)
(display (logand #x123456 #xFF000000))
(display #\newline)
(display (logand #x123456 #xFF0000))
(display #\newline)
(display (logand #x123456 #xFF00))
(display #\newline)
(display (logand #x123456 #xFF))
(display #\newline)
(display (logand #x123456 -1))
(display #\newline)
;; Test logior
(display "Test logior\n")
(display (logior #x0 #x0))
(display #\newline)
(display (logior #x0 #xFF))
(display #\newline)
(display (logior #xFF #x0))
(display #\newline)
(display (logior #xFF #xFF))
(display #\newline)
(display (logior #xFF00 -1))
(display #\newline)
;; Test not
(display "Test lognot\n")
(display (lognot 1))
(display #\newline)
(display (lognot -1))
(display #\newline)
;; Test logxor
(display "Test logxor\n")
(display (logxor #xFF00FF #xFF00))
(display #\newline)
(display (logxor #xFF00FF #xF00F0))
(display #\newline)
(display (logxor #xF0F0F0 #xFF00FF))
(display #\newline)
;; Test ash
(display "Test ash\n")
(display (ash 4 1))
(display #\newline)
(display (ash 1 4))
(display #\newline)
(display (ash 4 -1))
(display #\newline)
(display (ash #xFF00 -8))
(display #\newline)
(exit 0)
| null | https://raw.githubusercontent.com/oriansj/mes-m2/b44fbc976ae334252de4eb82a57c361a195f2194/test/test007/display_arith.scm | scheme |
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Setup output file
Test -
Test -
Test quotient
Test Remainder
Test modulo
Test *
Test logand
Test logior
Test not
Test logxor
Test ash | GNU --- Maxwell Equations of Software
Copyright © 2016,2018 Jan ( janneke ) Nieuwenhuizen < >
This file is part of GNU .
under the terms of the GNU General Public License as published by
GNU is distributed in the hope that it will be useful , but
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / >
(set-current-output-port (open-output-file "test/results/test007.answer"))
(display "Test -\n")
(display (- 4 1))
(display #\newline)
(display (- 1 4))
(display #\newline)
(display (- 4 4))
(display #\newline)
(display "Test +\n")
(display (+ 4 1))
(display #\newline)
(display (+ 1 -4))
(display #\newline)
(display (+ 4 4))
(display #\newline)
(display "Test quotient\n")
(display (quotient 4 1))
(display #\newline)
(display (quotient 1 4))
(display #\newline)
(display (quotient 4 -1))
(display #\newline)
(display "Test remainder\n")
(display (remainder 13 4))
(display #\newline)
(display (remainder -13 4))
(display #\newline)
(display "Test modulo\n")
(display (modulo 13 4))
(display #\newline)
(display (modulo -13 4))
(display #\newline)
(display (modulo 13 -4))
(display #\newline)
(display (modulo -13 -4))
(display #\newline)
(display "Test *\n")
(display (* 2 2 2 2))
(display #\newline)
(display (* 1 4))
(display #\newline)
(display (* 4 -1))
(display #\newline)
(display "Test logand\nUsing:")
(display #x123456)
(display #\newline)
(display (logand #x123456 #xFF000000))
(display #\newline)
(display (logand #x123456 #xFF0000))
(display #\newline)
(display (logand #x123456 #xFF00))
(display #\newline)
(display (logand #x123456 #xFF))
(display #\newline)
(display (logand #x123456 -1))
(display #\newline)
(display "Test logior\n")
(display (logior #x0 #x0))
(display #\newline)
(display (logior #x0 #xFF))
(display #\newline)
(display (logior #xFF #x0))
(display #\newline)
(display (logior #xFF #xFF))
(display #\newline)
(display (logior #xFF00 -1))
(display #\newline)
(display "Test lognot\n")
(display (lognot 1))
(display #\newline)
(display (lognot -1))
(display #\newline)
(display "Test logxor\n")
(display (logxor #xFF00FF #xFF00))
(display #\newline)
(display (logxor #xFF00FF #xF00F0))
(display #\newline)
(display (logxor #xF0F0F0 #xFF00FF))
(display #\newline)
(display "Test ash\n")
(display (ash 4 1))
(display #\newline)
(display (ash 1 4))
(display #\newline)
(display (ash 4 -1))
(display #\newline)
(display (ash #xFF00 -8))
(display #\newline)
(exit 0)
|
5310e93546342d9bbd39c5f8df849d1d56bb6f1fc5df964f00a1ae16f8480714 | dimitaruzunov/fp-2018 | cycle.scm | (require rackunit rackunit/text-ui)
(define the-empty-stream '())
(define-syntax cons-stream
(syntax-rules ()
((cons-stream h t)
(cons h (delay t)))))
(define (empty-stream? s)
(equal? s the-empty-stream))
(define head car)
(define (tail s)
(force (cdr s)))
(define (cycle l)
(define (iter remaining)
(if (null? remaining)
(iter l)
(cons-stream (car remaining)
(iter (cdr remaining)))))
(if (null? l)
the-empty-stream
(iter l)))
(define (stream-take n s)
(if (or (= n 0)
(empty-stream? s))
'()
(cons (head s)
(stream-take (- n 1) (tail s)))))
(define cycle-tests
(test-suite
"Tests for cycle"
(check-equal? (stream-take 5 (cycle '())) '())
(check-equal? (stream-take 5 (cycle '(1))) '(1 1 1 1 1))
(check-equal? (stream-take 3 (cycle '(6))) '(6 6 6))
(check-equal? (stream-take 2 (cycle '(1 2 3))) '(1 2))
(check-equal? (stream-take 5 (cycle '(1 2 3))) '(1 2 3 1 2))
(check-equal? (stream-take 7 (cycle '(1 2 3))) '(1 2 3 1 2 3 1))))
(run-tests cycle-tests)
| null | https://raw.githubusercontent.com/dimitaruzunov/fp-2018/f75f0cd009cc7f41ce55a5ec71fb4b8eadafc4eb/exercises/08/cycle.scm | scheme | (require rackunit rackunit/text-ui)
(define the-empty-stream '())
(define-syntax cons-stream
(syntax-rules ()
((cons-stream h t)
(cons h (delay t)))))
(define (empty-stream? s)
(equal? s the-empty-stream))
(define head car)
(define (tail s)
(force (cdr s)))
(define (cycle l)
(define (iter remaining)
(if (null? remaining)
(iter l)
(cons-stream (car remaining)
(iter (cdr remaining)))))
(if (null? l)
the-empty-stream
(iter l)))
(define (stream-take n s)
(if (or (= n 0)
(empty-stream? s))
'()
(cons (head s)
(stream-take (- n 1) (tail s)))))
(define cycle-tests
(test-suite
"Tests for cycle"
(check-equal? (stream-take 5 (cycle '())) '())
(check-equal? (stream-take 5 (cycle '(1))) '(1 1 1 1 1))
(check-equal? (stream-take 3 (cycle '(6))) '(6 6 6))
(check-equal? (stream-take 2 (cycle '(1 2 3))) '(1 2))
(check-equal? (stream-take 5 (cycle '(1 2 3))) '(1 2 3 1 2))
(check-equal? (stream-take 7 (cycle '(1 2 3))) '(1 2 3 1 2 3 1))))
(run-tests cycle-tests)
|
|
b3c829205c0fa859a0e74b1017fa9f18ffbded7f4dd52103ca983420329aec56 | patricoferris/ocaml-multicore-monorepo | check_runtime.ml | Js_of_ocaml compiler
* /
* Copyright ( C ) 2021 Hugo Heuzard
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2021 Hugo Heuzard
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open! Js_of_ocaml_compiler.Stdlib
open Js_of_ocaml_compiler
let group_by_snd l =
l
|> List.sort_uniq ~compare:(fun (n1, l1) (n2, l2) ->
match Poly.compare l1 l2 with
| 0 -> String.compare n1 n2
| c -> c)
|> List.group ~f:(fun (_, g1) (_, g2) -> Poly.equal g1 g2)
let print_groups output l =
List.iter l ~f:(fun group ->
match group with
| [] -> assert false
| (_, loc) :: _ ->
(match loc with
| [] -> ()
| loc ->
output_string
output
(Printf.sprintf "\nFrom %s:\n" (String.concat ~sep:"," loc)));
List.iter group ~f:(fun (name, _) ->
output_string output (Printf.sprintf "%s\n" name)))
let f (runtime_files, bytecode, target_env) =
let runtime_files, builtin =
List.partition_map runtime_files ~f:(fun name ->
match Builtins.find name with
| Some t -> `Snd t
| None -> `Fst name)
in
let builtin =
if false then builtin else Js_of_ocaml_compiler_runtime_files.runtime @ builtin
in
List.iter builtin ~f:(fun t ->
let filename = Builtins.File.name t in
let runtimes = Linker.parse_builtin t in
Linker.load_fragments ~target_env ~filename runtimes);
Linker.load_files ~target_env runtime_files;
let all_prims =
List.concat_map bytecode ~f:(fun f ->
let ic = open_in_bin f in
let prims =
match Parse_bytecode.from_channel ic with
| `Cmo x -> x.Cmo_format.cu_primitives
| `Cma x ->
List.concat_map
~f:(fun x -> x.Cmo_format.cu_primitives)
x.Cmo_format.lib_units
| `Exe ->
let toc = Parse_bytecode.Toc.read ic in
Parse_bytecode.read_primitives toc ic
in
close_in ic;
List.map ~f:(fun p -> p, f) prims)
in
let _percent_prim, needed =
List.partition all_prims ~f:(fun (x, _) -> Char.equal (String.get x 0) '%')
in
let origin =
List.fold_left
~f:(fun acc (x, y) ->
let l = try StringMap.find x acc with Not_found -> [] in
StringMap.add x (y :: l) acc)
~init:StringMap.empty
needed
in
let needed = StringSet.of_list (List.map ~f:fst needed) in
let from_runtime1 = Linker.get_provided () in
let from_runtime2 = Primitive.get_external () in
(* [from_runtime2] is a superset of [from_runtime1].
Extra primitives are registered on the ocaml side (e.g. generate.ml) *)
assert (StringSet.is_empty (StringSet.diff from_runtime1 from_runtime2));
let missing' = StringSet.diff needed from_runtime1 in
let all_used, missing =
let state = Linker.init () in
let state, missing = Linker.resolve_deps state needed in
StringSet.of_list (Linker.all state), missing
in
assert (StringSet.equal missing missing');
let extra =
StringSet.diff from_runtime1 all_used
|> StringSet.elements
|> List.map ~f:(fun name ->
( name
, match Linker.origin ~name with
| None -> []
| Some x -> [ x ] ))
|> group_by_snd
in
let missing_for_real =
StringSet.diff missing from_runtime2
|> StringSet.elements
|> List.map ~f:(fun x -> x, StringMap.find x origin)
|> group_by_snd
in
let output = stdout in
set_binary_mode_out output true;
output_string output "Missing\n";
output_string output "-------\n";
print_groups output missing_for_real;
output_string output "\n";
output_string output "Unused\n";
output_string output "-------\n";
print_groups output extra;
output_string output "\n";
()
let options =
let open Cmdliner in
(* TODO: add flags to only display missing or extra primitives *)
let files =
let doc = "Bytecode and JavaScript files [$(docv)]. " in
Arg.(value & pos_all string [] & info [] ~docv:"FILES" ~doc)
in
let build_t files target_env =
let runtime_files, bc_files =
List.partition files ~f:(fun file -> Filename.check_suffix file ".js")
in
`Ok (runtime_files, bc_files, target_env)
in
let target_env =
let doc = "Runtime compile target." in
let options = List.map ~f:(fun env -> Target_env.to_string env, env) Target_env.all in
let docv = Printf.sprintf "{%s}" (String.concat ~sep:"," (List.map ~f:fst options)) in
Arg.(
value & opt (enum options) Target_env.Isomorphic & info [ "target-env" ] ~docv ~doc)
in
let t = Term.(const build_t $ files $ target_env) in
Term.ret t
let info =
Info.make
~name:"check-runtime"
~doc:"Check runtime"
~description:"js_of_ocaml-check-runtime checks the runtime."
let command =
let t = Cmdliner.Term.(const f $ options) in
Cmdliner.Cmd.v info t
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/624b3293ee41e83736fe7ac3a79f810c2b70f68b/duniverse/js_of_ocaml/compiler/bin-js_of_ocaml/check_runtime.ml | ocaml | [from_runtime2] is a superset of [from_runtime1].
Extra primitives are registered on the ocaml side (e.g. generate.ml)
TODO: add flags to only display missing or extra primitives | Js_of_ocaml compiler
* /
* Copyright ( C ) 2021 Hugo Heuzard
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* /
* Copyright (C) 2021 Hugo Heuzard
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open! Js_of_ocaml_compiler.Stdlib
open Js_of_ocaml_compiler
let group_by_snd l =
l
|> List.sort_uniq ~compare:(fun (n1, l1) (n2, l2) ->
match Poly.compare l1 l2 with
| 0 -> String.compare n1 n2
| c -> c)
|> List.group ~f:(fun (_, g1) (_, g2) -> Poly.equal g1 g2)
let print_groups output l =
List.iter l ~f:(fun group ->
match group with
| [] -> assert false
| (_, loc) :: _ ->
(match loc with
| [] -> ()
| loc ->
output_string
output
(Printf.sprintf "\nFrom %s:\n" (String.concat ~sep:"," loc)));
List.iter group ~f:(fun (name, _) ->
output_string output (Printf.sprintf "%s\n" name)))
let f (runtime_files, bytecode, target_env) =
let runtime_files, builtin =
List.partition_map runtime_files ~f:(fun name ->
match Builtins.find name with
| Some t -> `Snd t
| None -> `Fst name)
in
let builtin =
if false then builtin else Js_of_ocaml_compiler_runtime_files.runtime @ builtin
in
List.iter builtin ~f:(fun t ->
let filename = Builtins.File.name t in
let runtimes = Linker.parse_builtin t in
Linker.load_fragments ~target_env ~filename runtimes);
Linker.load_files ~target_env runtime_files;
let all_prims =
List.concat_map bytecode ~f:(fun f ->
let ic = open_in_bin f in
let prims =
match Parse_bytecode.from_channel ic with
| `Cmo x -> x.Cmo_format.cu_primitives
| `Cma x ->
List.concat_map
~f:(fun x -> x.Cmo_format.cu_primitives)
x.Cmo_format.lib_units
| `Exe ->
let toc = Parse_bytecode.Toc.read ic in
Parse_bytecode.read_primitives toc ic
in
close_in ic;
List.map ~f:(fun p -> p, f) prims)
in
let _percent_prim, needed =
List.partition all_prims ~f:(fun (x, _) -> Char.equal (String.get x 0) '%')
in
let origin =
List.fold_left
~f:(fun acc (x, y) ->
let l = try StringMap.find x acc with Not_found -> [] in
StringMap.add x (y :: l) acc)
~init:StringMap.empty
needed
in
let needed = StringSet.of_list (List.map ~f:fst needed) in
let from_runtime1 = Linker.get_provided () in
let from_runtime2 = Primitive.get_external () in
assert (StringSet.is_empty (StringSet.diff from_runtime1 from_runtime2));
let missing' = StringSet.diff needed from_runtime1 in
let all_used, missing =
let state = Linker.init () in
let state, missing = Linker.resolve_deps state needed in
StringSet.of_list (Linker.all state), missing
in
assert (StringSet.equal missing missing');
let extra =
StringSet.diff from_runtime1 all_used
|> StringSet.elements
|> List.map ~f:(fun name ->
( name
, match Linker.origin ~name with
| None -> []
| Some x -> [ x ] ))
|> group_by_snd
in
let missing_for_real =
StringSet.diff missing from_runtime2
|> StringSet.elements
|> List.map ~f:(fun x -> x, StringMap.find x origin)
|> group_by_snd
in
let output = stdout in
set_binary_mode_out output true;
output_string output "Missing\n";
output_string output "-------\n";
print_groups output missing_for_real;
output_string output "\n";
output_string output "Unused\n";
output_string output "-------\n";
print_groups output extra;
output_string output "\n";
()
let options =
let open Cmdliner in
let files =
let doc = "Bytecode and JavaScript files [$(docv)]. " in
Arg.(value & pos_all string [] & info [] ~docv:"FILES" ~doc)
in
let build_t files target_env =
let runtime_files, bc_files =
List.partition files ~f:(fun file -> Filename.check_suffix file ".js")
in
`Ok (runtime_files, bc_files, target_env)
in
let target_env =
let doc = "Runtime compile target." in
let options = List.map ~f:(fun env -> Target_env.to_string env, env) Target_env.all in
let docv = Printf.sprintf "{%s}" (String.concat ~sep:"," (List.map ~f:fst options)) in
Arg.(
value & opt (enum options) Target_env.Isomorphic & info [ "target-env" ] ~docv ~doc)
in
let t = Term.(const build_t $ files $ target_env) in
Term.ret t
let info =
Info.make
~name:"check-runtime"
~doc:"Check runtime"
~description:"js_of_ocaml-check-runtime checks the runtime."
let command =
let t = Cmdliner.Term.(const f $ options) in
Cmdliner.Cmd.v info t
|
ec88c5e9df09d5f86f1abfef449f2628904866ba81c93200fdaf7e37b2c6e8cc | thi-ng/cgg | project.clj | (defproject thi.ng/cgg "0.1.0-SNAPSHOT"
:description "Cosine gradient designer for thi.ng/color"
:url ""
:license {:name "Apache Software License"
:url "-2.0"
:distribution :repo}
:min-lein-version "2.6.1"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.7.228"]
[org.clojure/core.async "0.2.374" :exclusions [org.clojure/tools.reader]]
[thi.ng/geom "0.0.1062"]
[thi.ng/color "1.1.1"]
[cljs-log "0.2.2"]
[reagent "0.5.1"]]
:plugins [[lein-figwheel "0.5.2"]
[lein-cljsbuild "1.1.3" :exclusions [[org.clojure/clojure]]]]
:source-paths ["src"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:cljsbuild {:builds
[{:id "dev"
:source-paths ["src"]
:figwheel true
:compiler {:main thi.ng.cgg.core
:asset-path "js/compiled/out"
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:source-map-timestamp true}}
{:id "prod"
:source-paths ["src"]
:compiler {:output-to "resources/public/js/compiled/app.js"
:main thi.ng.cgg.core
:optimizations :advanced
:pretty-print false}}]}
:figwheel {:css-dirs ["resources/public/css"]})
| null | https://raw.githubusercontent.com/thi-ng/cgg/8eab52272a9934c2d4af98bfc91d711fca8d5294/project.clj | clojure | (defproject thi.ng/cgg "0.1.0-SNAPSHOT"
:description "Cosine gradient designer for thi.ng/color"
:url ""
:license {:name "Apache Software License"
:url "-2.0"
:distribution :repo}
:min-lein-version "2.6.1"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.7.228"]
[org.clojure/core.async "0.2.374" :exclusions [org.clojure/tools.reader]]
[thi.ng/geom "0.0.1062"]
[thi.ng/color "1.1.1"]
[cljs-log "0.2.2"]
[reagent "0.5.1"]]
:plugins [[lein-figwheel "0.5.2"]
[lein-cljsbuild "1.1.3" :exclusions [[org.clojure/clojure]]]]
:source-paths ["src"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:cljsbuild {:builds
[{:id "dev"
:source-paths ["src"]
:figwheel true
:compiler {:main thi.ng.cgg.core
:asset-path "js/compiled/out"
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:source-map-timestamp true}}
{:id "prod"
:source-paths ["src"]
:compiler {:output-to "resources/public/js/compiled/app.js"
:main thi.ng.cgg.core
:optimizations :advanced
:pretty-print false}}]}
:figwheel {:css-dirs ["resources/public/css"]})
|
|
866d6313c4ed386e90800190919e83c793752af817e026cb72f302638fdabfde | ertugrulcetin/ClojureNews | entry.cljs | (ns controller.entry
(:require [ajax.core :as ajax :refer [GET POST PUT DELETE]]
[cljc.validation :as validation]
[cljc.error-messages :as error-message]
[reagent.core :as r]
[goog.dom :as dom]
[util.view]
[util.controller]
[view.entry.story-entry]
[view.entry.ask-entry]
[view.entry.job]
[view.entry.event]
[view.list.story]
[view.list.ask]
[view.list.job]
[view.list.event]
[view.list.newest]
[controller.upvote]
[controller.comment-entry]))
(declare add-event-listener-to-add-comment-button
add-event-listener-to-upvote-buttons-for-comments
add-event-listener-to-edit-story-button
add-event-listener-to-edit-ask-button
add-event-listener-to-story-button-yes
add-event-listener-to-story-button-no
add-event-listener-to-ask-button-yes
add-event-listener-to-ask-button-no
add-event-listener-to-edit-job-button
add-event-listener-to-delete-job-button-yes
add-event-listener-to-delete-job-button-no
add-event-listener-to-upvote-buttons-for-entries
add-event-listener-to-upvote-buttons-for-newest-entries
add-event-listener-to-edit-event-button
add-event-listener-to-delete-event-button-yes
add-event-listener-to-delete-event-button-no
add-event-listener-to-upvote-button-for-entry)
(defn get-stories-by-page
[page]
(GET (str "/entry/story/p/" page)
{:handler (fn [response]
(when-not (= "1" page)
(util.view/change-page-title "Story"))
(r/render-component [(fn []
(view.list.story/component-list-story response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-entries response :story))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-story-by-id
[id]
(GET (str "/entry/story/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-story response))] util.view/main-container)
(add-event-listener-to-add-comment-button get-story-by-id id)
(add-event-listener-to-upvote-buttons-for-comments response :story)
(add-event-listener-to-upvote-button-for-entry id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-story-by-id
[id]
(GET (str "/entry/story/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-story-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-story
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
:else
(POST (str "/entry/story/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url (str "/#!/story/" id)))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-story-by-id
[id]
(GET (str "/entry/story/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-delete response))] util.view/main-container)
(add-event-listener-to-story-button-yes id)
(add-event-listener-to-story-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-story
[id]
(DELETE (str "/entry/story/delete/" id)
{:handler (fn []
(util.view/change-url-to-story))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-ask-by-id
[id]
(GET (str "/entry/ask/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-ask response))] util.view/main-container)
(add-event-listener-to-add-comment-button get-ask-by-id id)
(add-event-listener-to-upvote-buttons-for-comments response :ask)
(add-event-listener-to-upvote-button-for-entry id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-ask-by-page
[page]
(GET (str "/entry/ask/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Ask")
(r/render-component [(fn []
(view.list.ask/component-list-ask response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-entries response :ask))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-ask-by-id
[id]
(GET (str "/entry/ask/info/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-ask-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-ask
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
text (:text data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-text? text))
(util.view/render-error-message error-message/text)
:else
(POST (str "/entry/ask/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url (str "/#!/ask/" id)))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-ask-by-id
[id]
(GET (str "/entry/ask/info/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-delete response))] util.view/main-container)
(add-event-listener-to-ask-button-yes id)
(add-event-listener-to-ask-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-ask
[id]
(DELETE (str "/entry/ask/delete/" id)
{:handler (fn []
(util.view/change-url-to-ask))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-newest-by-page
[page]
(GET (str "/entry/newest/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Newest")
(r/render-component [(fn []
(view.list.newest/component-list-newest response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-newest-entries response))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-jobs-by-page
[page]
(GET (str "/entry/job/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Clojure Jobs")
(r/render-component [(fn []
(view.list.job/component-job response page))] util.view/main-container))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-job-by-id
[id]
(GET (str "/entry/job/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :job-entry :title))
(r/render-component [(fn []
(view.entry.job/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-job-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-job
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
url (:url data)
country (:country data)
city (:city data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-url? url))
(util.view/render-error-message error-message/url)
(not (validation/submit-country? country))
(util.view/render-error-message error-message/country)
(not (validation/submit-city? city))
(util.view/render-error-message error-message/city)
:else
(POST (str "/entry/job/edit/" id)
{:params (assoc data :remote? (.-checked (dom/getElement "remoteId")))
:handler (fn [_]
(util.view/change-url-to-jobs))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-job-by-id
[id]
(GET (str "/entry/job/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :job-entry :title))
(r/render-component [(fn []
(view.entry.job/component-delete response))] util.view/main-container)
(add-event-listener-to-delete-job-button-yes id)
(add-event-listener-to-delete-job-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-job
[id]
(DELETE (str "/entry/job/delete/" id)
{:handler (fn []
(util.view/change-url-to-jobs))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-events-by-page
[page]
(GET (str "/entry/event/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Events")
(r/render-component [(fn []
(view.list.event/component-event response page))] util.view/main-container))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-event-by-id
[id]
(GET (str "/entry/event/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :event-entry :title))
(r/render-component [(fn []
(view.entry.event/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-event-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-event
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
url (:url data)
country (:country data)
city (:city data)
day (:starting-date-day data)
month (:starting-date-month data)
year (:starting-date-year data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-url? url))
(util.view/render-error-message error-message/url)
(not (validation/submit-country? country))
(util.view/render-error-message error-message/country)
(not (validation/submit-city? city))
(util.view/render-error-message error-message/city)
(not (validation/submit-day? day))
(util.view/render-error-message error-message/day)
(not (validation/submit-month? month))
(util.view/render-error-message error-message/month)
(not (validation/submit-year? year))
(util.view/render-error-message error-message/year)
:else
(POST (str "/entry/event/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url-to-events))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-event-by-id
[id]
(GET (str "/entry/event/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :event-entry :title))
(r/render-component [(fn []
(view.entry.event/component-delete response))] util.view/main-container)
(add-event-listener-to-delete-event-button-yes id)
(add-event-listener-to-delete-event-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-event
[id]
(DELETE (str "/entry/event/delete/" id)
{:handler (fn []
(util.view/change-url-to-events))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn add-event-listener-to-add-comment-button
[entry id]
(.addEventListener (dom/getElement "buttonAddCommentId") "click" (fn [_]
(controller.comment-entry/add-comment entry id ["textId"]))))
(defn add-event-listener-to-upvote-buttons-for-comments
[response type]
(let [comments (if (= type :story) :story-comments :ask-comments)
upvoted-comments (if (= type :story) :story-upvoted-comments :ask-upvoted-comments)]
(doseq [commentt (-> response comments)]
(let [comment-id (:_id commentt)
upvoted-comments (-> response upvoted-comments)]
(when-not (util.view/in? comment-id upvoted-comments)
(when-let [node (dom/getElement (str "id-upvote-" comment-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-comment comment-id)))))))))
(defn add-event-listener-to-upvote-buttons-for-entries
[response type]
(let [entries (if (= type :story) :story-entry :ask-entry)
upvoted-entries (if (= type :story) :story-upvoted-entries :ask-upvoted-entries)]
(doseq [entry (-> response entries)]
(let [entry-id (:_id entry)
upvoted-entries-coll (-> response upvoted-entries)]
(when-not (util.view/in? entry-id upvoted-entries-coll)
(when-let [node (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-entry entry-id)))))))))
(defn add-event-listener-to-upvote-buttons-for-newest-entries
[response]
(let [entries :newest-entry
upvoted-entries :newest-upvoted-entries]
(doseq [entry (-> response entries)]
(let [entry-id (:_id entry)
upvoted-entries-coll (-> response upvoted-entries)]
(when-not (util.view/in? entry-id upvoted-entries-coll)
(when-let [node (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-entry entry-id)))))))))
(defn add-event-listener-to-edit-story-button
[id]
(.addEventListener (dom/getElement "buttonStoryEditId") "click" (fn [_]
(edit-story id ["titleId"]))))
(defn add-event-listener-to-edit-ask-button
[id]
(.addEventListener (dom/getElement "buttonAskEditId") "click" (fn [_]
(edit-ask id ["titleId" "textId"]))))
(defn add-event-listener-to-story-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteStoryYesId") "click" (fn [_]
(delete-story id))))
(defn add-event-listener-to-story-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteStoryNoId") "click" (fn [_]
(util.view/change-url-to-story))))
(defn add-event-listener-to-ask-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteAskYesId") "click" (fn [_]
(delete-ask id))))
(defn add-event-listener-to-ask-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteAskNoId") "click" (fn [_]
(util.view/change-url-to-ask))))
(defn add-event-listener-to-edit-job-button
[id]
(.addEventListener (dom/getElement "jobEditButtonId") "click" (fn [_]
(edit-job id ["titleId" "urlId" "countryId" "cityId" "remoteId"]))))
(defn add-event-listener-to-delete-job-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteJobYesId") "click" (fn [_]
(delete-job id))))
(defn add-event-listener-to-delete-job-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteJobNoId") "click" (fn [_]
(util.view/change-url-to-jobs))))
(defn add-event-listener-to-edit-event-button
[id]
(.addEventListener (dom/getElement "eventEditButtonId") "click" (fn [_]
(edit-event id ["titleId" "urlId" "countryId" "cityId" "startingDateDayId" "startingDateMonthId" "startingDateYearId"]))))
(defn add-event-listener-to-delete-event-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteEventYesId") "click" (fn [_]
(delete-event id))))
(defn add-event-listener-to-delete-event-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteEventNoId") "click" (fn [_]
(util.view/change-url-to-events))))
(defn add-event-listener-to-upvote-button-for-entry
[entry-id]
(when-let [element (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener element "click" (fn [_]
(controller.upvote/upvote-entry entry-id))))) | null | https://raw.githubusercontent.com/ertugrulcetin/ClojureNews/28002f6b620fa4977d561b0cfca0c7f6a635057b/src/cljs/controller/entry.cljs | clojure | (ns controller.entry
(:require [ajax.core :as ajax :refer [GET POST PUT DELETE]]
[cljc.validation :as validation]
[cljc.error-messages :as error-message]
[reagent.core :as r]
[goog.dom :as dom]
[util.view]
[util.controller]
[view.entry.story-entry]
[view.entry.ask-entry]
[view.entry.job]
[view.entry.event]
[view.list.story]
[view.list.ask]
[view.list.job]
[view.list.event]
[view.list.newest]
[controller.upvote]
[controller.comment-entry]))
(declare add-event-listener-to-add-comment-button
add-event-listener-to-upvote-buttons-for-comments
add-event-listener-to-edit-story-button
add-event-listener-to-edit-ask-button
add-event-listener-to-story-button-yes
add-event-listener-to-story-button-no
add-event-listener-to-ask-button-yes
add-event-listener-to-ask-button-no
add-event-listener-to-edit-job-button
add-event-listener-to-delete-job-button-yes
add-event-listener-to-delete-job-button-no
add-event-listener-to-upvote-buttons-for-entries
add-event-listener-to-upvote-buttons-for-newest-entries
add-event-listener-to-edit-event-button
add-event-listener-to-delete-event-button-yes
add-event-listener-to-delete-event-button-no
add-event-listener-to-upvote-button-for-entry)
(defn get-stories-by-page
[page]
(GET (str "/entry/story/p/" page)
{:handler (fn [response]
(when-not (= "1" page)
(util.view/change-page-title "Story"))
(r/render-component [(fn []
(view.list.story/component-list-story response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-entries response :story))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-story-by-id
[id]
(GET (str "/entry/story/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-story response))] util.view/main-container)
(add-event-listener-to-add-comment-button get-story-by-id id)
(add-event-listener-to-upvote-buttons-for-comments response :story)
(add-event-listener-to-upvote-button-for-entry id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-story-by-id
[id]
(GET (str "/entry/story/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-story-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-story
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
:else
(POST (str "/entry/story/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url (str "/#!/story/" id)))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-story-by-id
[id]
(GET (str "/entry/story/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :story-entry :title))
(r/render-component [(fn []
(view.entry.story-entry/component-delete response))] util.view/main-container)
(add-event-listener-to-story-button-yes id)
(add-event-listener-to-story-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-story
[id]
(DELETE (str "/entry/story/delete/" id)
{:handler (fn []
(util.view/change-url-to-story))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-ask-by-id
[id]
(GET (str "/entry/ask/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-ask response))] util.view/main-container)
(add-event-listener-to-add-comment-button get-ask-by-id id)
(add-event-listener-to-upvote-buttons-for-comments response :ask)
(add-event-listener-to-upvote-button-for-entry id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-ask-by-page
[page]
(GET (str "/entry/ask/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Ask")
(r/render-component [(fn []
(view.list.ask/component-list-ask response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-entries response :ask))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-ask-by-id
[id]
(GET (str "/entry/ask/info/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-ask-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-ask
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
text (:text data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-text? text))
(util.view/render-error-message error-message/text)
:else
(POST (str "/entry/ask/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url (str "/#!/ask/" id)))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-ask-by-id
[id]
(GET (str "/entry/ask/info/" id)
{:handler (fn [response]
(util.view/change-page-title (str "Ask CN: " (-> response :ask-entry :title)))
(r/render-component [(fn []
(view.entry.ask-entry/component-delete response))] util.view/main-container)
(add-event-listener-to-ask-button-yes id)
(add-event-listener-to-ask-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-ask
[id]
(DELETE (str "/entry/ask/delete/" id)
{:handler (fn []
(util.view/change-url-to-ask))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-newest-by-page
[page]
(GET (str "/entry/newest/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Newest")
(r/render-component [(fn []
(view.list.newest/component-list-newest response page))] util.view/main-container)
(add-event-listener-to-upvote-buttons-for-newest-entries response))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-jobs-by-page
[page]
(GET (str "/entry/job/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Clojure Jobs")
(r/render-component [(fn []
(view.list.job/component-job response page))] util.view/main-container))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-job-by-id
[id]
(GET (str "/entry/job/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :job-entry :title))
(r/render-component [(fn []
(view.entry.job/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-job-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-job
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
url (:url data)
country (:country data)
city (:city data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-url? url))
(util.view/render-error-message error-message/url)
(not (validation/submit-country? country))
(util.view/render-error-message error-message/country)
(not (validation/submit-city? city))
(util.view/render-error-message error-message/city)
:else
(POST (str "/entry/job/edit/" id)
{:params (assoc data :remote? (.-checked (dom/getElement "remoteId")))
:handler (fn [_]
(util.view/change-url-to-jobs))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-job-by-id
[id]
(GET (str "/entry/job/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :job-entry :title))
(r/render-component [(fn []
(view.entry.job/component-delete response))] util.view/main-container)
(add-event-listener-to-delete-job-button-yes id)
(add-event-listener-to-delete-job-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-job
[id]
(DELETE (str "/entry/job/delete/" id)
{:handler (fn []
(util.view/change-url-to-jobs))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn get-events-by-page
[page]
(GET (str "/entry/event/p/" page)
{:handler (fn [response]
(util.view/change-page-title "Events")
(r/render-component [(fn []
(view.list.event/component-event response page))] util.view/main-container))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-event-by-id
[id]
(GET (str "/entry/event/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :event-entry :title))
(r/render-component [(fn []
(view.entry.event/component-edit response))] util.view/main-container)
(add-event-listener-to-edit-event-button id))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn edit-event
[id field-ids]
(let [data (util.view/create-field-val-map field-ids)
title (:title data)
url (:url data)
country (:country data)
city (:city data)
day (:starting-date-day data)
month (:starting-date-month data)
year (:starting-date-year data)]
(cond
(not (validation/submit-title? title))
(util.view/render-error-message error-message/title)
(not (validation/submit-url? url))
(util.view/render-error-message error-message/url)
(not (validation/submit-country? country))
(util.view/render-error-message error-message/country)
(not (validation/submit-city? city))
(util.view/render-error-message error-message/city)
(not (validation/submit-day? day))
(util.view/render-error-message error-message/day)
(not (validation/submit-month? month))
(util.view/render-error-message error-message/month)
(not (validation/submit-year? year))
(util.view/render-error-message error-message/year)
:else
(POST (str "/entry/event/edit/" id)
{:params data
:handler (fn [_]
(util.view/change-url-to-events))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))))
(defn delete-event-by-id
[id]
(GET (str "/entry/event/info/" id)
{:handler (fn [response]
(util.view/change-page-title (-> response :event-entry :title))
(r/render-component [(fn []
(view.entry.event/component-delete response))] util.view/main-container)
(add-event-listener-to-delete-event-button-yes id)
(add-event-listener-to-delete-event-button-no))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn delete-event
[id]
(DELETE (str "/entry/event/delete/" id)
{:handler (fn []
(util.view/change-url-to-events))
:error-handler util.controller/error-handler
:format (ajax/json-request-format)
:response-format (ajax/json-response-format {:keywords? true})}))
(defn add-event-listener-to-add-comment-button
[entry id]
(.addEventListener (dom/getElement "buttonAddCommentId") "click" (fn [_]
(controller.comment-entry/add-comment entry id ["textId"]))))
(defn add-event-listener-to-upvote-buttons-for-comments
[response type]
(let [comments (if (= type :story) :story-comments :ask-comments)
upvoted-comments (if (= type :story) :story-upvoted-comments :ask-upvoted-comments)]
(doseq [commentt (-> response comments)]
(let [comment-id (:_id commentt)
upvoted-comments (-> response upvoted-comments)]
(when-not (util.view/in? comment-id upvoted-comments)
(when-let [node (dom/getElement (str "id-upvote-" comment-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-comment comment-id)))))))))
(defn add-event-listener-to-upvote-buttons-for-entries
[response type]
(let [entries (if (= type :story) :story-entry :ask-entry)
upvoted-entries (if (= type :story) :story-upvoted-entries :ask-upvoted-entries)]
(doseq [entry (-> response entries)]
(let [entry-id (:_id entry)
upvoted-entries-coll (-> response upvoted-entries)]
(when-not (util.view/in? entry-id upvoted-entries-coll)
(when-let [node (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-entry entry-id)))))))))
(defn add-event-listener-to-upvote-buttons-for-newest-entries
[response]
(let [entries :newest-entry
upvoted-entries :newest-upvoted-entries]
(doseq [entry (-> response entries)]
(let [entry-id (:_id entry)
upvoted-entries-coll (-> response upvoted-entries)]
(when-not (util.view/in? entry-id upvoted-entries-coll)
(when-let [node (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener node "click" (fn [_]
(controller.upvote/upvote-entry entry-id)))))))))
(defn add-event-listener-to-edit-story-button
[id]
(.addEventListener (dom/getElement "buttonStoryEditId") "click" (fn [_]
(edit-story id ["titleId"]))))
(defn add-event-listener-to-edit-ask-button
[id]
(.addEventListener (dom/getElement "buttonAskEditId") "click" (fn [_]
(edit-ask id ["titleId" "textId"]))))
(defn add-event-listener-to-story-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteStoryYesId") "click" (fn [_]
(delete-story id))))
(defn add-event-listener-to-story-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteStoryNoId") "click" (fn [_]
(util.view/change-url-to-story))))
(defn add-event-listener-to-ask-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteAskYesId") "click" (fn [_]
(delete-ask id))))
(defn add-event-listener-to-ask-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteAskNoId") "click" (fn [_]
(util.view/change-url-to-ask))))
(defn add-event-listener-to-edit-job-button
[id]
(.addEventListener (dom/getElement "jobEditButtonId") "click" (fn [_]
(edit-job id ["titleId" "urlId" "countryId" "cityId" "remoteId"]))))
(defn add-event-listener-to-delete-job-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteJobYesId") "click" (fn [_]
(delete-job id))))
(defn add-event-listener-to-delete-job-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteJobNoId") "click" (fn [_]
(util.view/change-url-to-jobs))))
(defn add-event-listener-to-edit-event-button
[id]
(.addEventListener (dom/getElement "eventEditButtonId") "click" (fn [_]
(edit-event id ["titleId" "urlId" "countryId" "cityId" "startingDateDayId" "startingDateMonthId" "startingDateYearId"]))))
(defn add-event-listener-to-delete-event-button-yes
[id]
(.addEventListener (dom/getElement "buttonDeleteEventYesId") "click" (fn [_]
(delete-event id))))
(defn add-event-listener-to-delete-event-button-no
[]
(.addEventListener (dom/getElement "buttonDeleteEventNoId") "click" (fn [_]
(util.view/change-url-to-events))))
(defn add-event-listener-to-upvote-button-for-entry
[entry-id]
(when-let [element (dom/getElement (str "id-upvote-" entry-id))]
(.addEventListener element "click" (fn [_]
(controller.upvote/upvote-entry entry-id))))) |
|
d6b16aa20f468b36b7df47f0a03a04e424c0b586d39051861e3d53a109ecf6e1 | ranjitjhala/haddock-annot | Command.hs | -----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Command
Copyright : 2007
--
-- Maintainer :
-- Portability : portable
--
This is to do with command line handling . The Cabal command line is
-- organised into a number of named sub-commands (much like darcs). The
' CommandUI ' abstraction represents one of these sub - commands , with a name ,
-- description, a set of flags. Commands can be associated with actions and
-- run. It handles some common stuff automatically, like the @--help@ and
-- command line completion flags. It is designed to allow other tools make
derived commands . This feature is used heavily in - install@.
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Command (
-- * Command interface
CommandUI(..),
commandShowOptions,
CommandParse(..),
commandParseArgs,
-- ** Constructing commands
ShowOrParseArgs(..),
makeCommand,
-- ** Associating actions with commands
Command,
commandAddAction,
noExtraFlags,
-- ** Running commands
commandsRun,
-- * Option Fields
OptionField(..), Name,
-- ** Constructing Option Fields
option, multiOption,
* * Liftings & Projections
liftOption, viewAsFieldDescr,
-- * Option Descriptions
OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder,
* * OptDescr ' smart ' constructors
MkOptDescr,
reqArg, reqArg', optArg, optArg', noArg,
boolOpt, boolOpt', choiceOpt, choiceOptFromEnum
) where
import Control.Monad
import Data.Char (isAlpha, toLower)
import Data.List (sortBy)
import Data.Maybe
import Data.Monoid
import qualified Distribution.GetOpt as GetOpt
import Distribution.Text
( Text(disp, parse) )
import Distribution.ParseUtils
import Distribution.ReadE
import Distribution.Simple.Utils (die, intercalate)
import Text.PrettyPrint.HughesPJ ( punctuate, cat, comma, text, empty)
data CommandUI flags = CommandUI {
-- | The name of the command as it would be entered on the command line.
-- For example @\"build\"@.
commandName :: String,
| A short , one line description of the command to use in help texts .
commandSynopsis :: String,
-- | The useage line summary for this command
commandUsage :: String -> String,
-- | Additional explanation of the command to use in help texts.
commandDescription :: Maybe (String -> String),
| Initial \/ empty flags
commandDefaultFlags :: flags,
-- | All the Option fields for this command
commandOptions :: ShowOrParseArgs -> [OptionField flags]
}
data ShowOrParseArgs = ShowArgs | ParseArgs
type Name = String
type Description = String
-- | We usually have a datatype for storing configuration values, where
-- every field stores a configuration option, and the user sets
-- the value either via command line flags or a configuration file.
-- An individual OptionField models such a field, and we usually
-- build a list of options associated to a configuration datatype.
data OptionField a = OptionField {
optionName :: Name,
optionDescr :: [OptDescr a] }
| An OptionField takes one or more OptDescrs , describing the command line interface for the field .
data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder (ReadE (a->a)) (a -> [String])
| OptArg Description OptFlags ArgPlaceHolder (ReadE (a->a)) (a->a) (a -> [Maybe String])
| ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)]
| BoolOpt Description OptFlags{-True-} OptFlags{-False-} (Bool -> a -> a) (a-> Maybe Bool)
-- | Short command line option strings
type SFlags = [Char]
-- | Long command line option strings
type LFlags = [String]
type OptFlags = (SFlags,LFlags)
type ArgPlaceHolder = String
| Create an option taking a single OptDescr .
No explicit Name is given for the Option , the name is the first LFlag given .
option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a -> OptionField a
option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set]
option _ _ _ _ _ _ = error "Distribution.command.option: An OptionField must have at least one LFlag"
| Create an option taking several OptDescrs .
You will have to give the flags and description individually to the OptDescr constructor .
multiOption :: Name -> get -> set
^MkOptDescr constructors partially applied to flags and description .
-> OptionField a
multiOption n get set args = OptionField n [arg get set | arg <- args]
type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set -> OptDescr a
-- | Create a string-valued command line interface.
reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg ad mkflag showflag sf lf d get set =
ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) (showflag . get)
-- | Create a string-valued command line interface with a default value.
optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg ad mkflag def showflag sf lf d get set =
OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag)
(\b -> set (get b `mappend` def) b)
(showflag . get)
-- | (String -> a) variant of "reqArg"
reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg' ad mkflag showflag =
reqArg ad (succeedReadE mkflag) showflag
| ( String - > a ) variant of " optArg "
optArg' :: Monoid b => ArgPlaceHolder -> (Maybe String -> b) -> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg' ad mkflag showflag =
optArg ad (succeedReadE (mkflag . Just)) def showflag
where def = mkflag Nothing
noArg :: (Eq b, Monoid b) => b -> MkOptDescr (a -> b) (b -> a -> a) a
noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d
boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags -> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt g s sfT sfF _sf _lf@(n:_) d get set =
BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get)
boolOpt _ _ _ _ _ _ _ _ _ = error "Distribution.Simple.Setup.boolOpt: unreachable"
boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags -> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get)
-- | create a Choice option
choiceOpt :: Eq b => [(b,OptFlags,Description)] -> MkOptDescr (a -> b) (b -> a -> a) a
choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts
where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff]
| create a Choice option out of an enumeration type .
As long flags , the Show output is used . As short flags , the first character
-- which does not conflict with a previous one is used.
choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => MkOptDescr (a -> b) (b -> a -> a) a
choiceOptFromEnum _sf _lf d get = choiceOpt [ (x, (sf, [map toLower $ show x]), d')
| (x, sf) <- sflags'
, let d' = d ++ show x]
_sf _lf d get
where sflags' = foldl f [] [firstOne..]
f prev x = let prevflags = concatMap snd prev in
prev ++ take 1 [(x, [toLower sf]) | sf <- show x, isAlpha sf
, toLower sf `notElem` prevflags]
firstOne = minBound `asTypeOf` get undefined
commandGetOpts :: ShowOrParseArgs -> CommandUI flags -> [GetOpt.OptDescr (flags -> flags)]
commandGetOpts showOrParse command =
concatMap viewAsGetOpt (commandOptions command showOrParse)
viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)]
viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa
where
optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) =
[GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d]
where set' = readEOrFail set
optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) =
[GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d]
where set' Nothing = def
set' (Just txt) = readEOrFail set txt
optDescrToGetOpt (ChoiceOpt alts) =
[GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ]
optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) =
[ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d)
, GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ]
| to view as a FieldDescr , we sort the list of interfaces ( Req > Bool > Choice > Opt ) and consider only the first one .
viewAsFieldDescr :: OptionField a -> FieldDescr a
viewAsFieldDescr (OptionField _n []) = error "Distribution.command.viewAsFieldDescr: unexpected"
viewAsFieldDescr (OptionField n dd) = FieldDescr n get set
where optDescr = head $ sortBy cmp dd
ReqArg{} `cmp` ReqArg{} = EQ
ReqArg{} `cmp` _ = GT
BoolOpt{} `cmp` ReqArg{} = LT
BoolOpt{} `cmp` BoolOpt{} = EQ
BoolOpt{} `cmp` _ = GT
ChoiceOpt{} `cmp` ReqArg{} = LT
ChoiceOpt{} `cmp` BoolOpt{} = LT
ChoiceOpt{} `cmp` ChoiceOpt{} = EQ
ChoiceOpt{} `cmp` _ = GT
OptArg{} `cmp` OptArg{} = EQ
OptArg{} `cmp` _ = LT
get t = case optDescr of
ReqArg _ _ _ _ ppr ->
(cat . punctuate comma . map text . ppr) t
OptArg _ _ _ _ _ ppr ->
case ppr t of
[] -> empty
(Nothing : _) -> text "True"
(Just a : _) -> text a
ChoiceOpt alts ->
fromMaybe empty $ listToMaybe
[ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t]
BoolOpt _ _ _ _ enabled -> (maybe empty disp . enabled) t
set line val a =
case optDescr of
ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val
-- We parse for a single value instead of a list,
as one ca n't really implement parseList : : a - > ReadE [ a ]
with the current ReadE definition
ChoiceOpt{} -> case getChoiceByLongFlag optDescr val of
Just f -> return (f a)
_ -> syntaxError line val
BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val
OptArg _ _ _ _readE _ _ -> -- The behaviour in this case is not clear, and it has no use so far,
-- so we avoid future surprises by not implementing it.
error "Command.optionToFieldDescr: feature not implemented"
getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b)
getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe [ set | (_,(_sf,lf:_), set, _) <- alts
, lf == val]
getChoiceByLongFlag _ _ = error "Distribution.command.getChoiceByLongFlag: expected a choice option"
getCurrentChoice :: OptDescr a -> a -> [String]
getCurrentChoice (ChoiceOpt alts) a =
[ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a]
getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr"
liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b
liftOption get' set' opt = opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt}
liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b
liftOptDescr get' set' (ChoiceOpt opts) =
ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get'))
| (d, ff, set, get) <- opts]
liftOptDescr get' set' (OptArg d ff ad set def get) =
OptArg d ff ad (liftSet get' set' `fmap` set) (liftSet get' set' def) (get . get')
liftOptDescr get' set' (ReqArg d ff ad set get) =
ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get')
liftOptDescr get' set' (BoolOpt d ffT ffF set get) =
BoolOpt d ffT ffF (liftSet get' set' . set) (get . get')
liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b
liftSet get' set' set x = set' (set $ get' x) x
-- | Show flags in the standard long option command line format
commandShowOptions :: CommandUI flags -> flags -> [String]
commandShowOptions command v = concat
[ showOptDescr v od | o <- commandOptions command ParseArgs
, od <- optionDescr o]
where
showOptDescr :: a -> OptDescr a -> [String]
showOptDescr x (BoolOpt _ (_,lfT:_) (_,lfF:_) _ enabled)
= case enabled x of
Nothing -> []
Just True -> ["--" ++ lfT]
Just False -> ["--" ++ lfF]
showOptDescr x c@ChoiceOpt{}
= ["--" ++ val | val <- getCurrentChoice c x]
showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag)
= [ "--"++lf++"="++flag
| flag <- showflag x ]
showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag)
= [ case flag of
Just s -> "--"++lf++"="++s
Nothing -> "--"++lf
| flag <- showflag x ]
showOptDescr _ _
= error "Distribution.Simple.Command.showOptDescr: unreachable"
commandListOptions :: CommandUI flags -> [String]
commandListOptions command =
concatMap listOption $
addCommonFlags ShowArgs $ -- This is a slight hack, we don't want
-- "--list-options" showing up in the
list options output , so use ShowArgs
commandGetOpts ShowArgs command
where
listOption (GetOpt.Option shortNames longNames _ _) =
[ "-" ++ [name] | name <- shortNames ]
++ [ "--" ++ name | name <- longNames ]
-- | The help text for this command with descriptions of all the options.
commandHelp :: CommandUI flags -> String -> String
commandHelp command pname =
commandUsage command pname
++ (GetOpt.usageInfo ""
. addCommonFlags ShowArgs
$ commandGetOpts ShowArgs command)
++ case commandDescription command of
Nothing -> ""
Just desc -> '\n': desc pname
| Make a Command from standard ' GetOpt ' options .
makeCommand :: String -- ^ name
-> String -- ^ short description
-> Maybe (String -> String) -- ^ long description
-> flags -- ^ initial\/empty flags
-> (ShowOrParseArgs -> [OptionField flags]) -- ^ options
-> CommandUI flags
makeCommand name shortDesc longDesc defaultFlags options =
CommandUI {
commandName = name,
commandSynopsis = shortDesc,
commandDescription = longDesc,
commandUsage = usage,
commandDefaultFlags = defaultFlags,
commandOptions = options
}
where usage pname = "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n"
++ "Flags for " ++ name ++ ":"
-- | Common flags that apply to every command
data CommonFlag = HelpFlag | ListOptionsFlag
commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag]
commonFlags showOrParseArgs = case showOrParseArgs of
ShowArgs -> [help]
ParseArgs -> [help, list]
where
help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag)
"Show this help text"
helpShortFlags = case showOrParseArgs of
ShowArgs -> ['h']
ParseArgs -> ['h', '?']
list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag)
"Print a list of command line flags"
addCommonFlags :: ShowOrParseArgs
-> [GetOpt.OptDescr a]
-> [GetOpt.OptDescr (Either CommonFlag a)]
addCommonFlags showOrParseArgs options =
map (fmapOptDesc Left) (commonFlags showOrParseArgs)
++ map (fmapOptDesc Right) options
where fmapOptDesc f (GetOpt.Option s l d m) =
GetOpt.Option s l (fmapArgDesc f d) m
fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a)
fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d
fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d
-- | Parse a bunch of command line arguments
--
commandParseArgs :: CommandUI flags
-> Bool -- ^ Is the command a global or subcommand?
-> [String]
-> CommandParse (flags -> flags, [String])
commandParseArgs command global args =
let options = addCommonFlags ParseArgs
$ commandGetOpts ParseArgs command
order | global = GetOpt.RequireOrder
| otherwise = GetOpt.Permute
in case GetOpt.getOpt' order options args of
(flags, _, _, _)
| any listFlag flags -> CommandList (commandListOptions command)
| any helpFlag flags -> CommandHelp (commandHelp command)
where listFlag (Left ListOptionsFlag) = True; listFlag _ = False
helpFlag (Left HelpFlag) = True; helpFlag _ = False
(flags, opts, opts', [])
| global || null opts' -> CommandReadyToGo (accum flags, mix opts opts')
| otherwise -> CommandErrors (unrecognised opts')
(_, _, _, errs) -> CommandErrors errs
where -- Note: It is crucial to use reverse function composition here or to
-- reverse the flags here as we want to process the flags left to right
-- but data flow in function compsition is right to left.
accum flags = foldr (flip (.)) id [ f | Right f <- flags ]
unrecognised opts = [ "unrecognized option `" ++ opt ++ "'\n"
| opt <- opts ]
-- For unrecognised global flags we put them in the position just after
-- the command, if there is one. This gives us a chance to parse them
-- as sub-command rather than global flags.
mix [] ys = ys
mix (x:xs) ys = x:ys++xs
data CommandParse flags = CommandHelp (String -> String)
| CommandList [String]
| CommandErrors [String]
| CommandReadyToGo flags
instance Functor CommandParse where
fmap _ (CommandHelp help) = CommandHelp help
fmap _ (CommandList opts) = CommandList opts
fmap _ (CommandErrors errs) = CommandErrors errs
fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags)
data Command action = Command String String ([String] -> CommandParse action)
commandAddAction :: CommandUI flags
-> (flags -> [String] -> action)
-> Command action
commandAddAction command action =
Command (commandName command)
(commandSynopsis command)
(fmap (uncurry applyDefaultArgs)
. commandParseArgs command False)
where applyDefaultArgs mkflags args =
let flags = mkflags (commandDefaultFlags command)
in action flags args
commandsRun :: CommandUI a
-> [Command action]
-> [String]
-> CommandParse (a, CommandParse action)
commandsRun globalCommand commands args =
case commandParseArgs globalCommand' True args of
CommandHelp help -> CommandHelp help
CommandList opts -> CommandList (opts ++ commandNames)
CommandErrors errs -> CommandErrors errs
CommandReadyToGo (mkflags, args') -> case args' of
("help":cmdArgs) -> handleHelpCommand cmdArgs
(name:cmdArgs) -> case lookupCommand name of
[Command _ _ action] -> CommandReadyToGo (flags, action cmdArgs)
_ -> CommandReadyToGo (flags, badCommand name)
[] -> CommandReadyToGo (flags, noCommand)
where flags = mkflags (commandDefaultFlags globalCommand)
where
lookupCommand cname = [ cmd | cmd@(Command cname' _ _) <- commands'
, cname'==cname ]
noCommand = CommandErrors ["no command given (try --help)\n"]
badCommand cname = CommandErrors ["unrecognised command: " ++ cname
++ " (try --help)\n"]
commands' = commands ++ [commandAddAction helpCommandUI undefined]
commandNames = [ name | Command name _ _ <- commands' ]
globalCommand' = globalCommand {
commandUsage = \pname ->
(case commandUsage globalCommand pname of
"" -> ""
original -> original ++ "\n")
++ "Usage: " ++ pname ++ " COMMAND [FLAGS]\n"
++ " or: " ++ pname ++ " [GLOBAL FLAGS]\n\n"
++ "Global flags:",
commandDescription = Just $ \pname ->
"Commands:\n"
++ unlines [ " " ++ align name ++ " " ++ description
| Command name description _ <- commands' ]
++ case commandDescription globalCommand of
Nothing -> ""
Just desc -> '\n': desc pname
}
where maxlen = maximum [ length name | Command name _ _ <- commands' ]
align str = str ++ replicate (maxlen - length str) ' '
-- A bit of a hack: support "prog help" as a synonym of "prog --help"
-- furthermore, support "prog help command" as "prog command --help"
handleHelpCommand cmdArgs =
case commandParseArgs helpCommandUI True cmdArgs of
CommandHelp help -> CommandHelp help
CommandList list -> CommandList (list ++ commandNames)
CommandErrors _ -> CommandHelp globalHelp
CommandReadyToGo (_,[]) -> CommandHelp globalHelp
CommandReadyToGo (_,(name:cmdArgs')) ->
case lookupCommand name of
[Command _ _ action] ->
case action ("--help":cmdArgs') of
CommandHelp help -> CommandHelp help
CommandList _ -> CommandList []
_ -> CommandHelp globalHelp
_ -> badCommand name
where globalHelp = commandHelp globalCommand'
helpCommandUI =
(makeCommand "help" "Help about commands" Nothing () (const [])) {
commandUsage = \pname ->
"Usage: " ++ pname ++ " help [FLAGS]\n"
++ " or: " ++ pname ++ " help COMMAND [FLAGS]\n\n"
++ "Flags for help:"
}
-- | Utility function, many commands do not accept additional flags. This
-- action fails with a helpful error message if the user supplies any extra.
--
noExtraFlags :: [String] -> IO ()
noExtraFlags [] = return ()
noExtraFlags extraFlags =
die $ "Unrecognised flags: " ++ intercalate ", " extraFlags
--TODO: eliminate this function and turn it into a variant on commandAddAction
-- instead like commandAddActionNoArgs that doesn't supply the [String]
| null | https://raw.githubusercontent.com/ranjitjhala/haddock-annot/ffaa182b17c3047887ff43dbe358c246011903f6/Cabal-1.10.1.1/Distribution/Simple/Command.hs | haskell | ---------------------------------------------------------------------------
|
Module : Distribution.Simple.Command
Maintainer :
Portability : portable
organised into a number of named sub-commands (much like darcs). The
description, a set of flags. Commands can be associated with actions and
run. It handles some common stuff automatically, like the @--help@ and
command line completion flags. It is designed to allow other tools make
* Command interface
** Constructing commands
** Associating actions with commands
** Running commands
* Option Fields
** Constructing Option Fields
* Option Descriptions
| The name of the command as it would be entered on the command line.
For example @\"build\"@.
| The useage line summary for this command
| Additional explanation of the command to use in help texts.
| All the Option fields for this command
| We usually have a datatype for storing configuration values, where
every field stores a configuration option, and the user sets
the value either via command line flags or a configuration file.
An individual OptionField models such a field, and we usually
build a list of options associated to a configuration datatype.
True
False
| Short command line option strings
| Long command line option strings
| Create a string-valued command line interface.
| Create a string-valued command line interface with a default value.
| (String -> a) variant of "reqArg"
| create a Choice option
which does not conflict with a previous one is used.
We parse for a single value instead of a list,
The behaviour in this case is not clear, and it has no use so far,
so we avoid future surprises by not implementing it.
| Show flags in the standard long option command line format
This is a slight hack, we don't want
"--list-options" showing up in the
| The help text for this command with descriptions of all the options.
^ name
^ short description
^ long description
^ initial\/empty flags
^ options
| Common flags that apply to every command
| Parse a bunch of command line arguments
^ Is the command a global or subcommand?
Note: It is crucial to use reverse function composition here or to
reverse the flags here as we want to process the flags left to right
but data flow in function compsition is right to left.
For unrecognised global flags we put them in the position just after
the command, if there is one. This gives us a chance to parse them
as sub-command rather than global flags.
A bit of a hack: support "prog help" as a synonym of "prog --help"
furthermore, support "prog help command" as "prog command --help"
| Utility function, many commands do not accept additional flags. This
action fails with a helpful error message if the user supplies any extra.
TODO: eliminate this function and turn it into a variant on commandAddAction
instead like commandAddActionNoArgs that doesn't supply the [String] | Copyright : 2007
This is to do with command line handling . The Cabal command line is
' CommandUI ' abstraction represents one of these sub - commands , with a name ,
derived commands . This feature is used heavily in - install@.
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are
met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution .
* Neither the name of nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Command (
CommandUI(..),
commandShowOptions,
CommandParse(..),
commandParseArgs,
ShowOrParseArgs(..),
makeCommand,
Command,
commandAddAction,
noExtraFlags,
commandsRun,
OptionField(..), Name,
option, multiOption,
* * Liftings & Projections
liftOption, viewAsFieldDescr,
OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder,
* * OptDescr ' smart ' constructors
MkOptDescr,
reqArg, reqArg', optArg, optArg', noArg,
boolOpt, boolOpt', choiceOpt, choiceOptFromEnum
) where
import Control.Monad
import Data.Char (isAlpha, toLower)
import Data.List (sortBy)
import Data.Maybe
import Data.Monoid
import qualified Distribution.GetOpt as GetOpt
import Distribution.Text
( Text(disp, parse) )
import Distribution.ParseUtils
import Distribution.ReadE
import Distribution.Simple.Utils (die, intercalate)
import Text.PrettyPrint.HughesPJ ( punctuate, cat, comma, text, empty)
data CommandUI flags = CommandUI {
commandName :: String,
| A short , one line description of the command to use in help texts .
commandSynopsis :: String,
commandUsage :: String -> String,
commandDescription :: Maybe (String -> String),
| Initial \/ empty flags
commandDefaultFlags :: flags,
commandOptions :: ShowOrParseArgs -> [OptionField flags]
}
data ShowOrParseArgs = ShowArgs | ParseArgs
type Name = String
type Description = String
data OptionField a = OptionField {
optionName :: Name,
optionDescr :: [OptDescr a] }
| An OptionField takes one or more OptDescrs , describing the command line interface for the field .
data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder (ReadE (a->a)) (a -> [String])
| OptArg Description OptFlags ArgPlaceHolder (ReadE (a->a)) (a->a) (a -> [Maybe String])
| ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)]
type SFlags = [Char]
type LFlags = [String]
type OptFlags = (SFlags,LFlags)
type ArgPlaceHolder = String
| Create an option taking a single OptDescr .
No explicit Name is given for the Option , the name is the first LFlag given .
option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a -> OptionField a
option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set]
option _ _ _ _ _ _ = error "Distribution.command.option: An OptionField must have at least one LFlag"
| Create an option taking several OptDescrs .
You will have to give the flags and description individually to the OptDescr constructor .
multiOption :: Name -> get -> set
^MkOptDescr constructors partially applied to flags and description .
-> OptionField a
multiOption n get set args = OptionField n [arg get set | arg <- args]
type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set -> OptDescr a
reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg ad mkflag showflag sf lf d get set =
ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) (showflag . get)
optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg ad mkflag def showflag sf lf d get set =
OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag)
(\b -> set (get b `mappend` def) b)
(showflag . get)
reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg' ad mkflag showflag =
reqArg ad (succeedReadE mkflag) showflag
| ( String - > a ) variant of " optArg "
optArg' :: Monoid b => ArgPlaceHolder -> (Maybe String -> b) -> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg' ad mkflag showflag =
optArg ad (succeedReadE (mkflag . Just)) def showflag
where def = mkflag Nothing
noArg :: (Eq b, Monoid b) => b -> MkOptDescr (a -> b) (b -> a -> a) a
noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d
boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags -> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt g s sfT sfF _sf _lf@(n:_) d get set =
BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get)
boolOpt _ _ _ _ _ _ _ _ _ = error "Distribution.Simple.Setup.boolOpt: unreachable"
boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags -> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get)
choiceOpt :: Eq b => [(b,OptFlags,Description)] -> MkOptDescr (a -> b) (b -> a -> a) a
choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts
where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff]
| create a Choice option out of an enumeration type .
As long flags , the Show output is used . As short flags , the first character
choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => MkOptDescr (a -> b) (b -> a -> a) a
choiceOptFromEnum _sf _lf d get = choiceOpt [ (x, (sf, [map toLower $ show x]), d')
| (x, sf) <- sflags'
, let d' = d ++ show x]
_sf _lf d get
where sflags' = foldl f [] [firstOne..]
f prev x = let prevflags = concatMap snd prev in
prev ++ take 1 [(x, [toLower sf]) | sf <- show x, isAlpha sf
, toLower sf `notElem` prevflags]
firstOne = minBound `asTypeOf` get undefined
commandGetOpts :: ShowOrParseArgs -> CommandUI flags -> [GetOpt.OptDescr (flags -> flags)]
commandGetOpts showOrParse command =
concatMap viewAsGetOpt (commandOptions command showOrParse)
viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)]
viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa
where
optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) =
[GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d]
where set' = readEOrFail set
optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) =
[GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d]
where set' Nothing = def
set' (Just txt) = readEOrFail set txt
optDescrToGetOpt (ChoiceOpt alts) =
[GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ]
optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) =
[ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d)
, GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ]
| to view as a FieldDescr , we sort the list of interfaces ( Req > Bool > Choice > Opt ) and consider only the first one .
viewAsFieldDescr :: OptionField a -> FieldDescr a
viewAsFieldDescr (OptionField _n []) = error "Distribution.command.viewAsFieldDescr: unexpected"
viewAsFieldDescr (OptionField n dd) = FieldDescr n get set
where optDescr = head $ sortBy cmp dd
ReqArg{} `cmp` ReqArg{} = EQ
ReqArg{} `cmp` _ = GT
BoolOpt{} `cmp` ReqArg{} = LT
BoolOpt{} `cmp` BoolOpt{} = EQ
BoolOpt{} `cmp` _ = GT
ChoiceOpt{} `cmp` ReqArg{} = LT
ChoiceOpt{} `cmp` BoolOpt{} = LT
ChoiceOpt{} `cmp` ChoiceOpt{} = EQ
ChoiceOpt{} `cmp` _ = GT
OptArg{} `cmp` OptArg{} = EQ
OptArg{} `cmp` _ = LT
get t = case optDescr of
ReqArg _ _ _ _ ppr ->
(cat . punctuate comma . map text . ppr) t
OptArg _ _ _ _ _ ppr ->
case ppr t of
[] -> empty
(Nothing : _) -> text "True"
(Just a : _) -> text a
ChoiceOpt alts ->
fromMaybe empty $ listToMaybe
[ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t]
BoolOpt _ _ _ _ enabled -> (maybe empty disp . enabled) t
set line val a =
case optDescr of
ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val
as one ca n't really implement parseList : : a - > ReadE [ a ]
with the current ReadE definition
ChoiceOpt{} -> case getChoiceByLongFlag optDescr val of
Just f -> return (f a)
_ -> syntaxError line val
BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val
error "Command.optionToFieldDescr: feature not implemented"
getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b)
getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe [ set | (_,(_sf,lf:_), set, _) <- alts
, lf == val]
getChoiceByLongFlag _ _ = error "Distribution.command.getChoiceByLongFlag: expected a choice option"
getCurrentChoice :: OptDescr a -> a -> [String]
getCurrentChoice (ChoiceOpt alts) a =
[ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a]
getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr"
liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b
liftOption get' set' opt = opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt}
liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b
liftOptDescr get' set' (ChoiceOpt opts) =
ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get'))
| (d, ff, set, get) <- opts]
liftOptDescr get' set' (OptArg d ff ad set def get) =
OptArg d ff ad (liftSet get' set' `fmap` set) (liftSet get' set' def) (get . get')
liftOptDescr get' set' (ReqArg d ff ad set get) =
ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get')
liftOptDescr get' set' (BoolOpt d ffT ffF set get) =
BoolOpt d ffT ffF (liftSet get' set' . set) (get . get')
liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b
liftSet get' set' set x = set' (set $ get' x) x
commandShowOptions :: CommandUI flags -> flags -> [String]
commandShowOptions command v = concat
[ showOptDescr v od | o <- commandOptions command ParseArgs
, od <- optionDescr o]
where
showOptDescr :: a -> OptDescr a -> [String]
showOptDescr x (BoolOpt _ (_,lfT:_) (_,lfF:_) _ enabled)
= case enabled x of
Nothing -> []
Just True -> ["--" ++ lfT]
Just False -> ["--" ++ lfF]
showOptDescr x c@ChoiceOpt{}
= ["--" ++ val | val <- getCurrentChoice c x]
showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag)
= [ "--"++lf++"="++flag
| flag <- showflag x ]
showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag)
= [ case flag of
Just s -> "--"++lf++"="++s
Nothing -> "--"++lf
| flag <- showflag x ]
showOptDescr _ _
= error "Distribution.Simple.Command.showOptDescr: unreachable"
commandListOptions :: CommandUI flags -> [String]
commandListOptions command =
concatMap listOption $
list options output , so use ShowArgs
commandGetOpts ShowArgs command
where
listOption (GetOpt.Option shortNames longNames _ _) =
[ "-" ++ [name] | name <- shortNames ]
++ [ "--" ++ name | name <- longNames ]
commandHelp :: CommandUI flags -> String -> String
commandHelp command pname =
commandUsage command pname
++ (GetOpt.usageInfo ""
. addCommonFlags ShowArgs
$ commandGetOpts ShowArgs command)
++ case commandDescription command of
Nothing -> ""
Just desc -> '\n': desc pname
| Make a Command from standard ' GetOpt ' options .
-> CommandUI flags
makeCommand name shortDesc longDesc defaultFlags options =
CommandUI {
commandName = name,
commandSynopsis = shortDesc,
commandDescription = longDesc,
commandUsage = usage,
commandDefaultFlags = defaultFlags,
commandOptions = options
}
where usage pname = "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n"
++ "Flags for " ++ name ++ ":"
data CommonFlag = HelpFlag | ListOptionsFlag
commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag]
commonFlags showOrParseArgs = case showOrParseArgs of
ShowArgs -> [help]
ParseArgs -> [help, list]
where
help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag)
"Show this help text"
helpShortFlags = case showOrParseArgs of
ShowArgs -> ['h']
ParseArgs -> ['h', '?']
list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag)
"Print a list of command line flags"
addCommonFlags :: ShowOrParseArgs
-> [GetOpt.OptDescr a]
-> [GetOpt.OptDescr (Either CommonFlag a)]
addCommonFlags showOrParseArgs options =
map (fmapOptDesc Left) (commonFlags showOrParseArgs)
++ map (fmapOptDesc Right) options
where fmapOptDesc f (GetOpt.Option s l d m) =
GetOpt.Option s l (fmapArgDesc f d) m
fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a)
fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d
fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d
commandParseArgs :: CommandUI flags
-> [String]
-> CommandParse (flags -> flags, [String])
commandParseArgs command global args =
let options = addCommonFlags ParseArgs
$ commandGetOpts ParseArgs command
order | global = GetOpt.RequireOrder
| otherwise = GetOpt.Permute
in case GetOpt.getOpt' order options args of
(flags, _, _, _)
| any listFlag flags -> CommandList (commandListOptions command)
| any helpFlag flags -> CommandHelp (commandHelp command)
where listFlag (Left ListOptionsFlag) = True; listFlag _ = False
helpFlag (Left HelpFlag) = True; helpFlag _ = False
(flags, opts, opts', [])
| global || null opts' -> CommandReadyToGo (accum flags, mix opts opts')
| otherwise -> CommandErrors (unrecognised opts')
(_, _, _, errs) -> CommandErrors errs
accum flags = foldr (flip (.)) id [ f | Right f <- flags ]
unrecognised opts = [ "unrecognized option `" ++ opt ++ "'\n"
| opt <- opts ]
mix [] ys = ys
mix (x:xs) ys = x:ys++xs
data CommandParse flags = CommandHelp (String -> String)
| CommandList [String]
| CommandErrors [String]
| CommandReadyToGo flags
instance Functor CommandParse where
fmap _ (CommandHelp help) = CommandHelp help
fmap _ (CommandList opts) = CommandList opts
fmap _ (CommandErrors errs) = CommandErrors errs
fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags)
data Command action = Command String String ([String] -> CommandParse action)
commandAddAction :: CommandUI flags
-> (flags -> [String] -> action)
-> Command action
commandAddAction command action =
Command (commandName command)
(commandSynopsis command)
(fmap (uncurry applyDefaultArgs)
. commandParseArgs command False)
where applyDefaultArgs mkflags args =
let flags = mkflags (commandDefaultFlags command)
in action flags args
commandsRun :: CommandUI a
-> [Command action]
-> [String]
-> CommandParse (a, CommandParse action)
commandsRun globalCommand commands args =
case commandParseArgs globalCommand' True args of
CommandHelp help -> CommandHelp help
CommandList opts -> CommandList (opts ++ commandNames)
CommandErrors errs -> CommandErrors errs
CommandReadyToGo (mkflags, args') -> case args' of
("help":cmdArgs) -> handleHelpCommand cmdArgs
(name:cmdArgs) -> case lookupCommand name of
[Command _ _ action] -> CommandReadyToGo (flags, action cmdArgs)
_ -> CommandReadyToGo (flags, badCommand name)
[] -> CommandReadyToGo (flags, noCommand)
where flags = mkflags (commandDefaultFlags globalCommand)
where
lookupCommand cname = [ cmd | cmd@(Command cname' _ _) <- commands'
, cname'==cname ]
noCommand = CommandErrors ["no command given (try --help)\n"]
badCommand cname = CommandErrors ["unrecognised command: " ++ cname
++ " (try --help)\n"]
commands' = commands ++ [commandAddAction helpCommandUI undefined]
commandNames = [ name | Command name _ _ <- commands' ]
globalCommand' = globalCommand {
commandUsage = \pname ->
(case commandUsage globalCommand pname of
"" -> ""
original -> original ++ "\n")
++ "Usage: " ++ pname ++ " COMMAND [FLAGS]\n"
++ " or: " ++ pname ++ " [GLOBAL FLAGS]\n\n"
++ "Global flags:",
commandDescription = Just $ \pname ->
"Commands:\n"
++ unlines [ " " ++ align name ++ " " ++ description
| Command name description _ <- commands' ]
++ case commandDescription globalCommand of
Nothing -> ""
Just desc -> '\n': desc pname
}
where maxlen = maximum [ length name | Command name _ _ <- commands' ]
align str = str ++ replicate (maxlen - length str) ' '
handleHelpCommand cmdArgs =
case commandParseArgs helpCommandUI True cmdArgs of
CommandHelp help -> CommandHelp help
CommandList list -> CommandList (list ++ commandNames)
CommandErrors _ -> CommandHelp globalHelp
CommandReadyToGo (_,[]) -> CommandHelp globalHelp
CommandReadyToGo (_,(name:cmdArgs')) ->
case lookupCommand name of
[Command _ _ action] ->
case action ("--help":cmdArgs') of
CommandHelp help -> CommandHelp help
CommandList _ -> CommandList []
_ -> CommandHelp globalHelp
_ -> badCommand name
where globalHelp = commandHelp globalCommand'
helpCommandUI =
(makeCommand "help" "Help about commands" Nothing () (const [])) {
commandUsage = \pname ->
"Usage: " ++ pname ++ " help [FLAGS]\n"
++ " or: " ++ pname ++ " help COMMAND [FLAGS]\n\n"
++ "Flags for help:"
}
noExtraFlags :: [String] -> IO ()
noExtraFlags [] = return ()
noExtraFlags extraFlags =
die $ "Unrecognised flags: " ++ intercalate ", " extraFlags
|
5c70611cf1542d9820a10090655721de5e61d9da9ccefad31a2afad13e187930 | zotonic/zotonic | m_linkedin.erl | @author < >
2017
%% @doc Model for mod_linkedin
Copyright 2017
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(m_linkedin).
-behaviour (zotonic_model).
-export([
m_get/3,
is_useauth/1
]).
-include_lib("kernel/include/logger.hrl").
-spec m_get( list(), zotonic_model:opt_msg(), z:context() ) -> zotonic_model:return().
m_get([ <<"useauth">> | Rest ], _Msg, Context) ->
{ok, {is_useauth(Context), Rest}};
m_get(_Vs, _Msg, _Context) ->
{error, unknown_path}.
-spec is_useauth( z:context() ) -> boolean().
is_useauth(Context) ->
case m_config:get_value(mod_linkedin, appid, Context) of
undefined -> false;
<<>> -> false;
_ -> m_config:get_boolean(mod_linkedin, useauth, Context)
end.
| null | https://raw.githubusercontent.com/zotonic/zotonic/1bb4aa8a0688d007dd8ec8ba271546f658312da8/apps/zotonic_mod_linkedin/src/models/m_linkedin.erl | erlang | @doc Model for mod_linkedin
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | @author < >
2017
Copyright 2017
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(m_linkedin).
-behaviour (zotonic_model).
-export([
m_get/3,
is_useauth/1
]).
-include_lib("kernel/include/logger.hrl").
-spec m_get( list(), zotonic_model:opt_msg(), z:context() ) -> zotonic_model:return().
m_get([ <<"useauth">> | Rest ], _Msg, Context) ->
{ok, {is_useauth(Context), Rest}};
m_get(_Vs, _Msg, _Context) ->
{error, unknown_path}.
-spec is_useauth( z:context() ) -> boolean().
is_useauth(Context) ->
case m_config:get_value(mod_linkedin, appid, Context) of
undefined -> false;
<<>> -> false;
_ -> m_config:get_boolean(mod_linkedin, useauth, Context)
end.
|
0fb9f9756bea041342aa0754a5f849e12133b2e6e17680eefdfceefbf1f67c01 | silky/quipper | QFTAdder.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
import Quipper
import QuipperLib.Arith
import QuipperLib.QFTAdd
import QuipperLib.Simulation
import QuipperLib.Unboxing
import System.Environment
---- Functions for testing the QFTAdd circuit --------
-- | Output a preview of the qft_add_in_place circuit for quantum integers of
-- the given size
print_qft_add :: Int -> IO ()
print_qft_add m = print_generic Preview qft_add_in_place (qdint_shape m) (qdint_shape m)
-- | Simulate the running of the qft_add_in_place circuit for the given IntM inputs
run_qft_add :: IntM -> IntM -> IO (IntM,IntM)
run_qft_add = run_generic_io (undefined :: Double) (unbox qft_add_in_place)
| A wrapper around ' run_qft_add ' so that it can be used with standard Integer
-- arguments.
test_add :: Integer -> Integer -> IO Integer
test_add a b = if (a < 0) || (b < 0) then error "test_add: negative argument" else
do
let m = 2 + ceiling (log (fromInteger (a+b)) / log 2) -- a slight cheat to work out how many qubits to use
let a' = intm m a
let b' = intm m b
(_,ab) <- run_qft_add a' b'
return (fromIntegral ab)
-- | A datatype for the possible command line arguments
data Args = Usage | One Int | Two Integer Integer
-- | A function to parse the command line arguments
parseArgs :: [String] -> Args
parseArgs [s1] = case reads s1 of
[(x1,_)] -> One x1
_ -> Usage
parseArgs [s1,s2] = case (reads s1,reads s2) of
([(x1,_)],[(x2,_)]) -> Two x1 x2
_ -> Usage
parseArgs _ = Usage
-- | The main function calls either test_add or print_qft_add depending upon
-- command line arguments.
main :: IO ()
main = do
args <- getArgs
case parseArgs args of
Usage -> usage
One x -> print_qft_add x
Two x1 x2 -> do
res <- test_add x1 x2
putStrLn (show x1 ++ " + " ++ show x2 ++ " = " ++ show res)
usage :: IO ()
usage = do
name <- getProgName
putStrLn ("usage: " ++ name ++ " num1 <num2>")
putStrLn " - one argument:"
putStrLn " preview the circuit for quantum integers of that length"
putStrLn " - two arguments:"
putStrLn " simulate the circuit for adding the two numbers"
putStrLn " (note that quantum simulation is not efficient)"
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/tests/QFTAdder.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
-- Functions for testing the QFTAdd circuit --------
| Output a preview of the qft_add_in_place circuit for quantum integers of
the given size
| Simulate the running of the qft_add_in_place circuit for the given IntM inputs
arguments.
a slight cheat to work out how many qubits to use
| A datatype for the possible command line arguments
| A function to parse the command line arguments
| The main function calls either test_add or print_qft_add depending upon
command line arguments. | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
import Quipper
import QuipperLib.Arith
import QuipperLib.QFTAdd
import QuipperLib.Simulation
import QuipperLib.Unboxing
import System.Environment
print_qft_add :: Int -> IO ()
print_qft_add m = print_generic Preview qft_add_in_place (qdint_shape m) (qdint_shape m)
run_qft_add :: IntM -> IntM -> IO (IntM,IntM)
run_qft_add = run_generic_io (undefined :: Double) (unbox qft_add_in_place)
| A wrapper around ' run_qft_add ' so that it can be used with standard Integer
test_add :: Integer -> Integer -> IO Integer
test_add a b = if (a < 0) || (b < 0) then error "test_add: negative argument" else
do
let a' = intm m a
let b' = intm m b
(_,ab) <- run_qft_add a' b'
return (fromIntegral ab)
data Args = Usage | One Int | Two Integer Integer
parseArgs :: [String] -> Args
parseArgs [s1] = case reads s1 of
[(x1,_)] -> One x1
_ -> Usage
parseArgs [s1,s2] = case (reads s1,reads s2) of
([(x1,_)],[(x2,_)]) -> Two x1 x2
_ -> Usage
parseArgs _ = Usage
main :: IO ()
main = do
args <- getArgs
case parseArgs args of
Usage -> usage
One x -> print_qft_add x
Two x1 x2 -> do
res <- test_add x1 x2
putStrLn (show x1 ++ " + " ++ show x2 ++ " = " ++ show res)
usage :: IO ()
usage = do
name <- getProgName
putStrLn ("usage: " ++ name ++ " num1 <num2>")
putStrLn " - one argument:"
putStrLn " preview the circuit for quantum integers of that length"
putStrLn " - two arguments:"
putStrLn " simulate the circuit for adding the two numbers"
putStrLn " (note that quantum simulation is not efficient)"
|
b30981be8cc3b5ea23ca892d76bfe6acafd7485cf2c0c054bfe24342476c7b66 | ocsigen/js_of_ocaml | svg.ml | Graph viewer
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type command =
| Move_to of float * float
| Curve_to of float * float * float * float * float * float
type color = float * float * float
type element =
| Path of command list * color option * color option
| Ellipse of float * float * float * float * color option * color option
| Polygon of (float * float) list * color option * color option
| Text of float * float * string * string * float * color option * color option
(****)
let width = 16499.
let height = 22807.
8192
let w = truncate ((width *. float h /. height) +. 0.5)
let s = Cairo.image_surface_create Cairo.FORMAT_ARGB32 w h
let perform_draw ctx fill stroke =
(*
print_extent ctx fill stroke;
*)
(match fill with
| Some (r, g, b) ->
Cairo.set_source_rgb ctx r g b;
if stroke <> None then Cairo.fill_preserve ctx else Cairo.fill ctx
| None -> ());
match stroke with
| Some (r, g, b) ->
Cairo.set_source_rgb ctx r g b;
Cairo.stroke ctx
| None -> ()
let pi = 4. *. atan 1.
let draw_element ctx e =
match e with
| Path (cmd, fill, stroke) ->
List.iter
(fun c ->
match c with
| Move_to (x, y) -> Cairo.move_to ctx x y
| Curve_to (x1, y1, x2, y2, x3, y3) -> Cairo.curve_to ctx x1 y1 x2 y2 x3 y3)
cmd;
perform_draw ctx fill stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Cairo.save ctx;
Cairo.translate ctx cx cy;
Cairo.scale ctx rx ry;
Cairo.arc ctx 0. 0. 1. 0. (2. *. pi);
Cairo.restore ctx;
perform_draw ctx fill stroke
| Polygon (points, fill, stroke) -> (
match points with
| (x, y) :: rem ->
Cairo.move_to ctx x y;
List.iter (fun (x, y) -> Cairo.line_to ctx x y) rem;
Cairo.close_path ctx;
perform_draw ctx fill stroke
| [] -> ())
| Text (x, y, txt, font, font_size, fill, stroke) ->
let ext = Cairo.text_extents ctx txt in
Cairo.move_to ctx (x -. ext.Cairo.x_bearing -. (ext.Cairo.text_width /. 2.)) y;
Cairo.select_font_face ctx font Cairo.FONT_SLANT_NORMAL Cairo.FONT_WEIGHT_NORMAL;
Cairo.set_font_size ctx font_size;
Cairo.show_text ctx txt;
perform_draw ctx fill stroke
let path_extent ctx fill stroke =
if stroke <> None then Cairo.stroke_extents ctx else Cairo.fill_extents ctx
let compute_extent ctx e =
Cairo.new_path ctx;
match e with
| Path (cmd, fill, stroke) ->
List.iter
(fun c ->
match c with
| Move_to (x, y) -> Cairo.move_to ctx x y
| Curve_to (x1, y1, x2, y2, x3, y3) -> Cairo.curve_to ctx x1 y1 x2 y2 x3 y3)
cmd;
path_extent ctx fill stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Cairo.save ctx;
Cairo.translate ctx cx cy;
Cairo.scale ctx rx ry;
Cairo.arc ctx 0. 0. 1. 0. (2. *. pi);
Cairo.restore ctx;
path_extent ctx fill stroke
| Polygon (points, fill, stroke) -> (
match points with
| (x, y) :: rem ->
Cairo.move_to ctx x y;
List.iter (fun (x, y) -> Cairo.line_to ctx x y) rem;
Cairo.close_path ctx;
path_extent ctx fill stroke
| [] -> assert false)
| Text (x, y, txt, font, font_size, fill, stroke) ->
let ext = Cairo.text_extents ctx txt in
( x -. (ext.Cairo.text_width /. 2.)
, y +. ext.Cairo.y_bearing
, x +. (ext.Cairo.text_width /. 2.)
, y +. ext.Cairo.y_bearing +. ext.Cairo.text_height )
let ctx = Cairo.create s
let scale = float h /. height
let _ =
Cairo.scale ctx scale scale;
Cairo.translate ctx 364. 22443.
(****)
let convert (r, g, b) =
let c i = float i /. 255.99 in
c r, c g, c b
let named_colors =
let colors = Hashtbl.create 101 in
List.iter
(fun (nm, v) -> Hashtbl.add colors nm (convert v))
[ "aliceblue", (240, 248, 255)
; "antiquewhite", (250, 235, 215)
; "aqua", (0, 255, 255)
; "aquamarine", (127, 255, 212)
; "azure", (240, 255, 255)
; "beige", (245, 245, 220)
; "bisque", (255, 228, 196)
; "black", (0, 0, 0)
; "blanchedalmond", (255, 235, 205)
; "blue", (0, 0, 255)
; "blueviolet", (138, 43, 226)
; "brown", (165, 42, 42)
; "burlywood", (222, 184, 135)
; "cadetblue", (95, 158, 160)
; "chartreuse", (127, 255, 0)
; "chocolate", (210, 105, 30)
; "coral", (255, 127, 80)
; "cornflowerblue", (100, 149, 237)
; "cornsilk", (255, 248, 220)
; "crimson", (220, 20, 60)
; "cyan", (0, 255, 255)
; "darkblue", (0, 0, 139)
; "darkcyan", (0, 139, 139)
; "darkgoldenrod", (184, 134, 11)
; "darkgray", (169, 169, 169)
; "darkgreen", (0, 100, 0)
; "darkgrey", (169, 169, 169)
; "darkkhaki", (189, 183, 107)
; "darkmagenta", (139, 0, 139)
; "darkolivegreen", (85, 107, 47)
; "darkorange", (255, 140, 0)
; "darkorchid", (153, 50, 204)
; "darkred", (139, 0, 0)
; "darksalmon", (233, 150, 122)
; "darkseagreen", (143, 188, 143)
; "darkslateblue", (72, 61, 139)
; "darkslategray", (47, 79, 79)
; "darkslategrey", (47, 79, 79)
; "darkturquoise", (0, 206, 209)
; "darkviolet", (148, 0, 211)
; "deeppink", (255, 20, 147)
; "deepskyblue", (0, 191, 255)
; "dimgray", (105, 105, 105)
; "dimgrey", (105, 105, 105)
; "dodgerblue", (30, 144, 255)
; "firebrick", (178, 34, 34)
; "floralwhite", (255, 250, 240)
; "forestgreen", (34, 139, 34)
; "fuchsia", (255, 0, 255)
; "gainsboro", (220, 220, 220)
; "ghostwhite", (248, 248, 255)
; "gold", (255, 215, 0)
; "goldenrod", (218, 165, 32)
; "gray", (128, 128, 128)
; "grey", (128, 128, 128)
; "green", (0, 128, 0)
; "greenyellow", (173, 255, 47)
; "honeydew", (240, 255, 240)
; "hotpink", (255, 105, 180)
; "indianred", (205, 92, 92)
; "indigo", (75, 0, 130)
; "ivory", (255, 255, 240)
; "khaki", (240, 230, 140)
; "lavender", (230, 230, 250)
; "lavenderblush", (255, 240, 245)
; "lawngreen", (124, 252, 0)
; "lemonchiffon", (255, 250, 205)
; "lightblue", (173, 216, 230)
; "lightcoral", (240, 128, 128)
; "lightcyan", (224, 255, 255)
; "lightgoldenrodyellow", (250, 250, 210)
; "lightgray", (211, 211, 211)
; "lightgreen", (144, 238, 144)
; "lightgrey", (211, 211, 211)
; "lightpink", (255, 182, 193)
; "lightsalmon", (255, 160, 122)
; "lightseagreen", (32, 178, 170)
; "lightskyblue", (135, 206, 250)
; "lightslategray", (119, 136, 153)
; "lightslategrey", (119, 136, 153)
; "lightsteelblue", (176, 196, 222)
; "lightyellow", (255, 255, 224)
; "lime", (0, 255, 0)
; "limegreen", (50, 205, 50)
; "linen", (250, 240, 230)
; "magenta", (255, 0, 255)
; "maroon", (128, 0, 0)
; "mediumaquamarine", (102, 205, 170)
; "mediumblue", (0, 0, 205)
; "mediumorchid", (186, 85, 211)
; "mediumpurple", (147, 112, 219)
; "mediumseagreen", (60, 179, 113)
; "mediumslateblue", (123, 104, 238)
; "mediumspringgreen", (0, 250, 154)
; "mediumturquoise", (72, 209, 204)
; "mediumvioletred", (199, 21, 133)
; "midnightblue", (25, 25, 112)
; "mintcream", (245, 255, 250)
; "mistyrose", (255, 228, 225)
; "moccasin", (255, 228, 181)
; "navajowhite", (255, 222, 173)
; "navy", (0, 0, 128)
; "oldlace", (253, 245, 230)
; "olive", (128, 128, 0)
; "olivedrab", (107, 142, 35)
; "orange", (255, 165, 0)
; "orangered", (255, 69, 0)
; "orchid", (218, 112, 214)
; "palegoldenrod", (238, 232, 170)
; "palegreen", (152, 251, 152)
; "paleturquoise", (175, 238, 238)
; "palevioletred", (219, 112, 147)
; "papayawhip", (255, 239, 213)
; "peachpuff", (255, 218, 185)
; "peru", (205, 133, 63)
; "pink", (255, 192, 203)
; "plum", (221, 160, 221)
; "powderblue", (176, 224, 230)
; "purple", (128, 0, 128)
; "red", (255, 0, 0)
; "rosybrown", (188, 143, 143)
; "royalblue", (65, 105, 225)
; "saddlebrown", (139, 69, 19)
; "salmon", (250, 128, 114)
; "sandybrown", (244, 164, 96)
; "seagreen", (46, 139, 87)
; "seashell", (255, 245, 238)
; "sienna", (160, 82, 45)
; "silver", (192, 192, 192)
; "skyblue", (135, 206, 235)
; "slateblue", (106, 90, 205)
; "slategray", (112, 128, 144)
; "slategrey", (112, 128, 144)
; "snow", (255, 250, 250)
; "springgreen", (0, 255, 127)
; "steelblue", (70, 130, 180)
; "tan", (210, 180, 140)
; "teal", (0, 128, 128)
; "thistle", (216, 191, 216)
; "tomato", (255, 99, 71)
; "turquoise", (64, 224, 208)
; "violet", (238, 130, 238)
; "wheat", (245, 222, 179)
; "white", (255, 255, 255)
; "whitesmoke", (245, 245, 245)
; "yellow", (255, 255, 0)
; "yellowgreen", (154, 205, 50)
];
colors
let svg_name nm = "", nm
let d_attr = "", "d"
let x_attr = "", "x"
let y_attr = "", "y"
let cx_attr = "", "cx"
let cy_attr = "", "cy"
let rx_attr = "", "rx"
let ry_attr = "", "ry"
let points_attr = "", "points"
let taxt_anchor_attr = "", "text-anchor"
let font_family_attr = "", "font-family"
let font_size_attr = "", "font-size"
let fill_attr = "", "fill"
let stroke_attr = "", "stroke"
let stack = ref []
let push e = stack := e :: !stack
let skip_whitespace i =
(* XXX Check white-space only *)
match Xmlm.peek i with
| `Data s -> ignore (Xmlm.input i)
| _ -> ()
let end_tag i =
let e = Xmlm.input i in
assert (e = `El_end)
let rec empty_tag i =
match Xmlm.input i with
Whitespace
| `El_end -> ()
| _ -> assert false
let rec text_tag i =
match Xmlm.input i with
| `Data s ->
empty_tag i;
s
| `El_end -> ""
| _ -> assert false
let comma_wsp = Str.regexp "[\x20\x09\x0D\x0A,]+"
let cmd = Str.regexp "[a-zA-Z]"
let rec parse_curve_to args rem =
match args with
| [] -> rem
| x1 :: y1 :: x2 :: y2 :: x3 :: z3 :: r ->
Curve_to (x1, y1, x2, y2, x3, z3) :: parse_curve_to r rem
| _ -> assert false
let rec parse_cmds l =
match l with
| Str.Delim cmd :: Str.Text args :: rem -> (
let args = List.map float_of_string (Str.split comma_wsp args) in
let rem = parse_cmds rem in
match cmd, args with
| "M", [ x; y ] -> Move_to (x, y) :: rem
| "C", (_ :: _ as args) -> parse_curve_to args rem
| _ -> assert false)
| [] -> []
| _ -> assert false
let parse_path s =
let l = Str.full_split cmd s in
parse_cmds l
let parse_color c =
if c = "none"
then None
else if String.length c = 7 && c.[0] = '#'
then
let conv s = int_of_string ("0x" ^ s) in
let c = conv (String.sub c 1 2), conv (String.sub c 3 2), conv (String.sub c 5 2) in
Some (convert c)
else
Some
(try Hashtbl.find named_colors c
with Not_found ->
Format.eprintf "%s@." c;
assert false)
let read_path attrs i =
let d = List.assoc d_attr attrs in
(*Format.eprintf "d=%s@." d;*)
let cmd = parse_path d in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Path (cmd, fill, stroke) in
push e;
empty_tag i
let read_ellipse attrs i =
let cx = float_of_string (List.assoc cx_attr attrs) in
let cy = float_of_string (List.assoc cy_attr attrs) in
let rx = float_of_string (List.assoc rx_attr attrs) in
let ry = float_of_string (List.assoc ry_attr attrs) in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Ellipse (cx, cy, rx, ry, fill, stroke) in
push e;
empty_tag i
let rec group l =
match l with
| x :: y :: r -> (x, y) :: group r
| [] -> []
| _ -> assert false
let read_polygon attrs i =
let points = List.assoc points_attr attrs in
let points = group (List.map float_of_string (Str.split comma_wsp points)) in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Polygon (points, fill, stroke) in
push e;
empty_tag i
let read_text attrs i =
let fill = parse_color (try List.assoc fill_attr attrs with Not_found -> "black") in
let stroke =
parse_color (try List.assoc stroke_attr attrs with Not_found -> "none")
in
let x = float_of_string (List.assoc x_attr attrs) in
let y = float_of_string (List.assoc y_attr attrs) in
let font = List.assoc font_family_attr attrs in
let font_size = float_of_string (List.assoc font_size_attr attrs) in
let txt = text_tag i in
let e = Text (x, y, txt, font, font_size, fill, stroke) in
push e
let rec read_element nm attrs i =
skip_whitespace i;
match Xmlm.input i with
| `El_end -> ()
| `Data d -> (
match Xmlm.input i with
| `El_end -> ()
| _ -> assert false)
| `El_start ((_, nm'), attrs') ->
Format.eprintf " % s " nm ' ;
List.iter ( fun ( ( _ , ) , _ ) - > Format.eprintf " % s " nm ) attrs ' ;
Format.eprintf " @. " ;
Format.eprintf "%s" nm';
List.iter (fun ((_, nm), _) -> Format.eprintf " %s" nm) attrs';
Format.eprintf "@.";
*)
(match nm' with
| "path" -> ignore (read_path attrs' i)
| "ellipse" -> ignore (read_ellipse attrs' i)
| "polygon" -> ignore (read_polygon attrs' i)
| "text" -> ignore (read_text attrs' i)
| _ -> read_element nm' attrs' i);
read_element nm attrs i
| _ -> assert false
let _ =
let ch = open_in "/tmp/foo.svg" in
let i = Xmlm.make_input (`Channel ch) in
(match Xmlm.input i with
| `Dtd (Some nm) -> ()
| _ -> assert false);
match Xmlm.input i with
| `El_start ((_, nm), attrs) ->
assert (nm = "svg");
read_element nm attrs i
| _ -> assert false
let l = List.rev !stack
let bboxes = ref []
let intersects (x1, y1, x2, y2) (x3, y3, x4, y4) =
x1 <= x4 && y1 <= y4 && x3 <= x2 && y3 <= y4
let redraw w range ev =
let t1 = Unix.gettimeofday ( ) in
let t1 = Unix.gettimeofday () in
*)
let ctx = Cairo_lablgtk.create w#misc#window in
Cairo.save ctx;
if !bboxes = [] then bboxes := List.map (fun e -> compute_extent ctx e) l;
Cairo.new_path ctx;
Cairo_lablgtk.region ctx (GdkEvent.Expose.region ev);
let rect = Gdk.Rectangle.create 0 0 0 0 in
Gdk.Region.get_clipbox (GdkEvent.Expose.region ev) rect;
Cairo.clip ctx;
let scale = scale *. ((1. /. scale) ** range#adjustment#value) in
Cairo.scale ctx scale scale;
Cairo.translate ctx 364. 22443.;
let bbox =
let x = (float (Gdk.Rectangle.x rect) /. scale) -. 364. in
let y = (float (Gdk.Rectangle.y rect) /. scale) -. 22443. in
( x
, y
, x +. (float (Gdk.Rectangle.width rect) /. scale)
, y +. (float (Gdk.Rectangle.height rect) /. scale) )
in
let ( x1 , y1 , x2 , y2 ) = bbox in
Format.eprintf " % f % f % f % f ( % f)@. " x1 y1 x2 y2 scale ;
let (x1, y1, x2, y2) = bbox in
Format.eprintf "%f %f %f %f (%f)@." x1 y1 x2 y2 scale;
*)
List.iter2 (fun box e -> if intersects box bbox then draw_element ctx e) !bboxes l;
Cairo.restore ctx;
let t2 = Unix.gettimeofday ( ) in
Format.eprintf " % f@. " ( t2 - . t1 ) ;
let t2 = Unix.gettimeofday () in
Format.eprintf "%f@." (t2 -. t1);
*)
true
let slider_changed (area : GMisc.drawing_area) range () =
let scale = scale *. ((1. /. scale) ** range#adjustment#value) in
area#misc#set_size_request
~width:(truncate (width *. scale))
~height:(truncate (height *. scale))
();
GtkBase.Widget.queue_draw area#as_widget
let _ =
ignore (GMain.Main.init ());
let initial_size = 600 in
let w = GWindow.window () in
ignore (w#connect#destroy GMain.quit);
let b = GPack.vbox ~spacing:6 ~border_width:12 ~packing:w#add () in
(*
let f = GBin.frame ~shadow_type:`IN
~packing:(b#pack ~expand:true ~fill:true) () in
*)
let f =
GBin.scrolled_window
~packing:(b#pack ~expand:true)
(* ~hpolicy:`AUTOMATIC ~vpolicy:`AUTOMATIC *) ()
in
let area =
GMisc.drawing_area
~width:initial_size
~height:initial_size
~packing:f#add_with_viewport
()
in
area#misc#set_size_request
~width:(truncate (width *. scale))
~height:(truncate (height *. scale))
();
let slider = GRange.scale `HORIZONTAL ~draw_value:false ~packing:b#pack () in
slider#adjustment#set_bounds ~lower:0. ~upper:1. ~step_incr:0.1 ();
(*
let button = GButton.check_button ~label:"Animate"
~packing:b#pack () in
ignore (area#event#connect#expose
(redraw area slider)) ;
ignore (slider#connect#value_changed
(slider_changed area)) ;
ignore (button#connect#toggled
(animate_toggled button slider)) ;
*)
ignore (area#event#connect#expose (redraw area slider));
ignore (slider#connect#value_changed (slider_changed area slider));
w#show ();
GMain.main ()
let _ =
let l = List.rev ! stack in
Format.eprintf " len : % d@. " ( l ) ;
let t1 = Unix.gettimeofday ( ) in
List.iter ( fun e - > draw_element ctx e ) l ;
let t2 = Unix.gettimeofday ( ) in
Format.eprintf " % f@. " ( t2 - . t1 ) ;
( *
let ch = open_out " /tmp / foo.mar " in
Marshal.to_channel ch l [ ] ;
close_out ch ;
let _ =
let l = List.rev !stack in
Format.eprintf "len: %d@." (List.length l);
let t1 = Unix.gettimeofday () in
List.iter (fun e -> draw_element ctx e) l;
let t2 = Unix.gettimeofday () in
Format.eprintf "%f@." (t2 -. t1);
(*
let ch = open_out "/tmp/foo.mar" in
Marshal.to_channel ch l [];
close_out ch;
*)
Cairo_png.surface_write_to_file s "/tmp/foo.png"
*)
| null | https://raw.githubusercontent.com/ocsigen/js_of_ocaml/58210fabc947c4839b6e71ffbbf353a4ede0dbb7/examples/graph_viewer/svg.ml | ocaml | **
print_extent ctx fill stroke;
**
XXX Check white-space only
Format.eprintf "d=%s@." d;
let f = GBin.frame ~shadow_type:`IN
~packing:(b#pack ~expand:true ~fill:true) () in
~hpolicy:`AUTOMATIC ~vpolicy:`AUTOMATIC
let button = GButton.check_button ~label:"Animate"
~packing:b#pack () in
ignore (area#event#connect#expose
(redraw area slider)) ;
ignore (slider#connect#value_changed
(slider_changed area)) ;
ignore (button#connect#toggled
(animate_toggled button slider)) ;
let ch = open_out "/tmp/foo.mar" in
Marshal.to_channel ch l [];
close_out ch;
| Graph viewer
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
type command =
| Move_to of float * float
| Curve_to of float * float * float * float * float * float
type color = float * float * float
type element =
| Path of command list * color option * color option
| Ellipse of float * float * float * float * color option * color option
| Polygon of (float * float) list * color option * color option
| Text of float * float * string * string * float * color option * color option
let width = 16499.
let height = 22807.
8192
let w = truncate ((width *. float h /. height) +. 0.5)
let s = Cairo.image_surface_create Cairo.FORMAT_ARGB32 w h
let perform_draw ctx fill stroke =
(match fill with
| Some (r, g, b) ->
Cairo.set_source_rgb ctx r g b;
if stroke <> None then Cairo.fill_preserve ctx else Cairo.fill ctx
| None -> ());
match stroke with
| Some (r, g, b) ->
Cairo.set_source_rgb ctx r g b;
Cairo.stroke ctx
| None -> ()
let pi = 4. *. atan 1.
let draw_element ctx e =
match e with
| Path (cmd, fill, stroke) ->
List.iter
(fun c ->
match c with
| Move_to (x, y) -> Cairo.move_to ctx x y
| Curve_to (x1, y1, x2, y2, x3, y3) -> Cairo.curve_to ctx x1 y1 x2 y2 x3 y3)
cmd;
perform_draw ctx fill stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Cairo.save ctx;
Cairo.translate ctx cx cy;
Cairo.scale ctx rx ry;
Cairo.arc ctx 0. 0. 1. 0. (2. *. pi);
Cairo.restore ctx;
perform_draw ctx fill stroke
| Polygon (points, fill, stroke) -> (
match points with
| (x, y) :: rem ->
Cairo.move_to ctx x y;
List.iter (fun (x, y) -> Cairo.line_to ctx x y) rem;
Cairo.close_path ctx;
perform_draw ctx fill stroke
| [] -> ())
| Text (x, y, txt, font, font_size, fill, stroke) ->
let ext = Cairo.text_extents ctx txt in
Cairo.move_to ctx (x -. ext.Cairo.x_bearing -. (ext.Cairo.text_width /. 2.)) y;
Cairo.select_font_face ctx font Cairo.FONT_SLANT_NORMAL Cairo.FONT_WEIGHT_NORMAL;
Cairo.set_font_size ctx font_size;
Cairo.show_text ctx txt;
perform_draw ctx fill stroke
let path_extent ctx fill stroke =
if stroke <> None then Cairo.stroke_extents ctx else Cairo.fill_extents ctx
let compute_extent ctx e =
Cairo.new_path ctx;
match e with
| Path (cmd, fill, stroke) ->
List.iter
(fun c ->
match c with
| Move_to (x, y) -> Cairo.move_to ctx x y
| Curve_to (x1, y1, x2, y2, x3, y3) -> Cairo.curve_to ctx x1 y1 x2 y2 x3 y3)
cmd;
path_extent ctx fill stroke
| Ellipse (cx, cy, rx, ry, fill, stroke) ->
Cairo.save ctx;
Cairo.translate ctx cx cy;
Cairo.scale ctx rx ry;
Cairo.arc ctx 0. 0. 1. 0. (2. *. pi);
Cairo.restore ctx;
path_extent ctx fill stroke
| Polygon (points, fill, stroke) -> (
match points with
| (x, y) :: rem ->
Cairo.move_to ctx x y;
List.iter (fun (x, y) -> Cairo.line_to ctx x y) rem;
Cairo.close_path ctx;
path_extent ctx fill stroke
| [] -> assert false)
| Text (x, y, txt, font, font_size, fill, stroke) ->
let ext = Cairo.text_extents ctx txt in
( x -. (ext.Cairo.text_width /. 2.)
, y +. ext.Cairo.y_bearing
, x +. (ext.Cairo.text_width /. 2.)
, y +. ext.Cairo.y_bearing +. ext.Cairo.text_height )
let ctx = Cairo.create s
let scale = float h /. height
let _ =
Cairo.scale ctx scale scale;
Cairo.translate ctx 364. 22443.
let convert (r, g, b) =
let c i = float i /. 255.99 in
c r, c g, c b
let named_colors =
let colors = Hashtbl.create 101 in
List.iter
(fun (nm, v) -> Hashtbl.add colors nm (convert v))
[ "aliceblue", (240, 248, 255)
; "antiquewhite", (250, 235, 215)
; "aqua", (0, 255, 255)
; "aquamarine", (127, 255, 212)
; "azure", (240, 255, 255)
; "beige", (245, 245, 220)
; "bisque", (255, 228, 196)
; "black", (0, 0, 0)
; "blanchedalmond", (255, 235, 205)
; "blue", (0, 0, 255)
; "blueviolet", (138, 43, 226)
; "brown", (165, 42, 42)
; "burlywood", (222, 184, 135)
; "cadetblue", (95, 158, 160)
; "chartreuse", (127, 255, 0)
; "chocolate", (210, 105, 30)
; "coral", (255, 127, 80)
; "cornflowerblue", (100, 149, 237)
; "cornsilk", (255, 248, 220)
; "crimson", (220, 20, 60)
; "cyan", (0, 255, 255)
; "darkblue", (0, 0, 139)
; "darkcyan", (0, 139, 139)
; "darkgoldenrod", (184, 134, 11)
; "darkgray", (169, 169, 169)
; "darkgreen", (0, 100, 0)
; "darkgrey", (169, 169, 169)
; "darkkhaki", (189, 183, 107)
; "darkmagenta", (139, 0, 139)
; "darkolivegreen", (85, 107, 47)
; "darkorange", (255, 140, 0)
; "darkorchid", (153, 50, 204)
; "darkred", (139, 0, 0)
; "darksalmon", (233, 150, 122)
; "darkseagreen", (143, 188, 143)
; "darkslateblue", (72, 61, 139)
; "darkslategray", (47, 79, 79)
; "darkslategrey", (47, 79, 79)
; "darkturquoise", (0, 206, 209)
; "darkviolet", (148, 0, 211)
; "deeppink", (255, 20, 147)
; "deepskyblue", (0, 191, 255)
; "dimgray", (105, 105, 105)
; "dimgrey", (105, 105, 105)
; "dodgerblue", (30, 144, 255)
; "firebrick", (178, 34, 34)
; "floralwhite", (255, 250, 240)
; "forestgreen", (34, 139, 34)
; "fuchsia", (255, 0, 255)
; "gainsboro", (220, 220, 220)
; "ghostwhite", (248, 248, 255)
; "gold", (255, 215, 0)
; "goldenrod", (218, 165, 32)
; "gray", (128, 128, 128)
; "grey", (128, 128, 128)
; "green", (0, 128, 0)
; "greenyellow", (173, 255, 47)
; "honeydew", (240, 255, 240)
; "hotpink", (255, 105, 180)
; "indianred", (205, 92, 92)
; "indigo", (75, 0, 130)
; "ivory", (255, 255, 240)
; "khaki", (240, 230, 140)
; "lavender", (230, 230, 250)
; "lavenderblush", (255, 240, 245)
; "lawngreen", (124, 252, 0)
; "lemonchiffon", (255, 250, 205)
; "lightblue", (173, 216, 230)
; "lightcoral", (240, 128, 128)
; "lightcyan", (224, 255, 255)
; "lightgoldenrodyellow", (250, 250, 210)
; "lightgray", (211, 211, 211)
; "lightgreen", (144, 238, 144)
; "lightgrey", (211, 211, 211)
; "lightpink", (255, 182, 193)
; "lightsalmon", (255, 160, 122)
; "lightseagreen", (32, 178, 170)
; "lightskyblue", (135, 206, 250)
; "lightslategray", (119, 136, 153)
; "lightslategrey", (119, 136, 153)
; "lightsteelblue", (176, 196, 222)
; "lightyellow", (255, 255, 224)
; "lime", (0, 255, 0)
; "limegreen", (50, 205, 50)
; "linen", (250, 240, 230)
; "magenta", (255, 0, 255)
; "maroon", (128, 0, 0)
; "mediumaquamarine", (102, 205, 170)
; "mediumblue", (0, 0, 205)
; "mediumorchid", (186, 85, 211)
; "mediumpurple", (147, 112, 219)
; "mediumseagreen", (60, 179, 113)
; "mediumslateblue", (123, 104, 238)
; "mediumspringgreen", (0, 250, 154)
; "mediumturquoise", (72, 209, 204)
; "mediumvioletred", (199, 21, 133)
; "midnightblue", (25, 25, 112)
; "mintcream", (245, 255, 250)
; "mistyrose", (255, 228, 225)
; "moccasin", (255, 228, 181)
; "navajowhite", (255, 222, 173)
; "navy", (0, 0, 128)
; "oldlace", (253, 245, 230)
; "olive", (128, 128, 0)
; "olivedrab", (107, 142, 35)
; "orange", (255, 165, 0)
; "orangered", (255, 69, 0)
; "orchid", (218, 112, 214)
; "palegoldenrod", (238, 232, 170)
; "palegreen", (152, 251, 152)
; "paleturquoise", (175, 238, 238)
; "palevioletred", (219, 112, 147)
; "papayawhip", (255, 239, 213)
; "peachpuff", (255, 218, 185)
; "peru", (205, 133, 63)
; "pink", (255, 192, 203)
; "plum", (221, 160, 221)
; "powderblue", (176, 224, 230)
; "purple", (128, 0, 128)
; "red", (255, 0, 0)
; "rosybrown", (188, 143, 143)
; "royalblue", (65, 105, 225)
; "saddlebrown", (139, 69, 19)
; "salmon", (250, 128, 114)
; "sandybrown", (244, 164, 96)
; "seagreen", (46, 139, 87)
; "seashell", (255, 245, 238)
; "sienna", (160, 82, 45)
; "silver", (192, 192, 192)
; "skyblue", (135, 206, 235)
; "slateblue", (106, 90, 205)
; "slategray", (112, 128, 144)
; "slategrey", (112, 128, 144)
; "snow", (255, 250, 250)
; "springgreen", (0, 255, 127)
; "steelblue", (70, 130, 180)
; "tan", (210, 180, 140)
; "teal", (0, 128, 128)
; "thistle", (216, 191, 216)
; "tomato", (255, 99, 71)
; "turquoise", (64, 224, 208)
; "violet", (238, 130, 238)
; "wheat", (245, 222, 179)
; "white", (255, 255, 255)
; "whitesmoke", (245, 245, 245)
; "yellow", (255, 255, 0)
; "yellowgreen", (154, 205, 50)
];
colors
let svg_name nm = "", nm
let d_attr = "", "d"
let x_attr = "", "x"
let y_attr = "", "y"
let cx_attr = "", "cx"
let cy_attr = "", "cy"
let rx_attr = "", "rx"
let ry_attr = "", "ry"
let points_attr = "", "points"
let taxt_anchor_attr = "", "text-anchor"
let font_family_attr = "", "font-family"
let font_size_attr = "", "font-size"
let fill_attr = "", "fill"
let stroke_attr = "", "stroke"
let stack = ref []
let push e = stack := e :: !stack
let skip_whitespace i =
match Xmlm.peek i with
| `Data s -> ignore (Xmlm.input i)
| _ -> ()
let end_tag i =
let e = Xmlm.input i in
assert (e = `El_end)
let rec empty_tag i =
match Xmlm.input i with
Whitespace
| `El_end -> ()
| _ -> assert false
let rec text_tag i =
match Xmlm.input i with
| `Data s ->
empty_tag i;
s
| `El_end -> ""
| _ -> assert false
let comma_wsp = Str.regexp "[\x20\x09\x0D\x0A,]+"
let cmd = Str.regexp "[a-zA-Z]"
let rec parse_curve_to args rem =
match args with
| [] -> rem
| x1 :: y1 :: x2 :: y2 :: x3 :: z3 :: r ->
Curve_to (x1, y1, x2, y2, x3, z3) :: parse_curve_to r rem
| _ -> assert false
let rec parse_cmds l =
match l with
| Str.Delim cmd :: Str.Text args :: rem -> (
let args = List.map float_of_string (Str.split comma_wsp args) in
let rem = parse_cmds rem in
match cmd, args with
| "M", [ x; y ] -> Move_to (x, y) :: rem
| "C", (_ :: _ as args) -> parse_curve_to args rem
| _ -> assert false)
| [] -> []
| _ -> assert false
let parse_path s =
let l = Str.full_split cmd s in
parse_cmds l
let parse_color c =
if c = "none"
then None
else if String.length c = 7 && c.[0] = '#'
then
let conv s = int_of_string ("0x" ^ s) in
let c = conv (String.sub c 1 2), conv (String.sub c 3 2), conv (String.sub c 5 2) in
Some (convert c)
else
Some
(try Hashtbl.find named_colors c
with Not_found ->
Format.eprintf "%s@." c;
assert false)
let read_path attrs i =
let d = List.assoc d_attr attrs in
let cmd = parse_path d in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Path (cmd, fill, stroke) in
push e;
empty_tag i
let read_ellipse attrs i =
let cx = float_of_string (List.assoc cx_attr attrs) in
let cy = float_of_string (List.assoc cy_attr attrs) in
let rx = float_of_string (List.assoc rx_attr attrs) in
let ry = float_of_string (List.assoc ry_attr attrs) in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Ellipse (cx, cy, rx, ry, fill, stroke) in
push e;
empty_tag i
let rec group l =
match l with
| x :: y :: r -> (x, y) :: group r
| [] -> []
| _ -> assert false
let read_polygon attrs i =
let points = List.assoc points_attr attrs in
let points = group (List.map float_of_string (Str.split comma_wsp points)) in
let fill = parse_color (List.assoc fill_attr attrs) in
let stroke = parse_color (List.assoc stroke_attr attrs) in
let e = Polygon (points, fill, stroke) in
push e;
empty_tag i
let read_text attrs i =
let fill = parse_color (try List.assoc fill_attr attrs with Not_found -> "black") in
let stroke =
parse_color (try List.assoc stroke_attr attrs with Not_found -> "none")
in
let x = float_of_string (List.assoc x_attr attrs) in
let y = float_of_string (List.assoc y_attr attrs) in
let font = List.assoc font_family_attr attrs in
let font_size = float_of_string (List.assoc font_size_attr attrs) in
let txt = text_tag i in
let e = Text (x, y, txt, font, font_size, fill, stroke) in
push e
let rec read_element nm attrs i =
skip_whitespace i;
match Xmlm.input i with
| `El_end -> ()
| `Data d -> (
match Xmlm.input i with
| `El_end -> ()
| _ -> assert false)
| `El_start ((_, nm'), attrs') ->
Format.eprintf " % s " nm ' ;
List.iter ( fun ( ( _ , ) , _ ) - > Format.eprintf " % s " nm ) attrs ' ;
Format.eprintf " @. " ;
Format.eprintf "%s" nm';
List.iter (fun ((_, nm), _) -> Format.eprintf " %s" nm) attrs';
Format.eprintf "@.";
*)
(match nm' with
| "path" -> ignore (read_path attrs' i)
| "ellipse" -> ignore (read_ellipse attrs' i)
| "polygon" -> ignore (read_polygon attrs' i)
| "text" -> ignore (read_text attrs' i)
| _ -> read_element nm' attrs' i);
read_element nm attrs i
| _ -> assert false
let _ =
let ch = open_in "/tmp/foo.svg" in
let i = Xmlm.make_input (`Channel ch) in
(match Xmlm.input i with
| `Dtd (Some nm) -> ()
| _ -> assert false);
match Xmlm.input i with
| `El_start ((_, nm), attrs) ->
assert (nm = "svg");
read_element nm attrs i
| _ -> assert false
let l = List.rev !stack
let bboxes = ref []
let intersects (x1, y1, x2, y2) (x3, y3, x4, y4) =
x1 <= x4 && y1 <= y4 && x3 <= x2 && y3 <= y4
let redraw w range ev =
let t1 = Unix.gettimeofday ( ) in
let t1 = Unix.gettimeofday () in
*)
let ctx = Cairo_lablgtk.create w#misc#window in
Cairo.save ctx;
if !bboxes = [] then bboxes := List.map (fun e -> compute_extent ctx e) l;
Cairo.new_path ctx;
Cairo_lablgtk.region ctx (GdkEvent.Expose.region ev);
let rect = Gdk.Rectangle.create 0 0 0 0 in
Gdk.Region.get_clipbox (GdkEvent.Expose.region ev) rect;
Cairo.clip ctx;
let scale = scale *. ((1. /. scale) ** range#adjustment#value) in
Cairo.scale ctx scale scale;
Cairo.translate ctx 364. 22443.;
let bbox =
let x = (float (Gdk.Rectangle.x rect) /. scale) -. 364. in
let y = (float (Gdk.Rectangle.y rect) /. scale) -. 22443. in
( x
, y
, x +. (float (Gdk.Rectangle.width rect) /. scale)
, y +. (float (Gdk.Rectangle.height rect) /. scale) )
in
let ( x1 , y1 , x2 , y2 ) = bbox in
Format.eprintf " % f % f % f % f ( % f)@. " x1 y1 x2 y2 scale ;
let (x1, y1, x2, y2) = bbox in
Format.eprintf "%f %f %f %f (%f)@." x1 y1 x2 y2 scale;
*)
List.iter2 (fun box e -> if intersects box bbox then draw_element ctx e) !bboxes l;
Cairo.restore ctx;
let t2 = Unix.gettimeofday ( ) in
Format.eprintf " % f@. " ( t2 - . t1 ) ;
let t2 = Unix.gettimeofday () in
Format.eprintf "%f@." (t2 -. t1);
*)
true
let slider_changed (area : GMisc.drawing_area) range () =
let scale = scale *. ((1. /. scale) ** range#adjustment#value) in
area#misc#set_size_request
~width:(truncate (width *. scale))
~height:(truncate (height *. scale))
();
GtkBase.Widget.queue_draw area#as_widget
let _ =
ignore (GMain.Main.init ());
let initial_size = 600 in
let w = GWindow.window () in
ignore (w#connect#destroy GMain.quit);
let b = GPack.vbox ~spacing:6 ~border_width:12 ~packing:w#add () in
let f =
GBin.scrolled_window
~packing:(b#pack ~expand:true)
in
let area =
GMisc.drawing_area
~width:initial_size
~height:initial_size
~packing:f#add_with_viewport
()
in
area#misc#set_size_request
~width:(truncate (width *. scale))
~height:(truncate (height *. scale))
();
let slider = GRange.scale `HORIZONTAL ~draw_value:false ~packing:b#pack () in
slider#adjustment#set_bounds ~lower:0. ~upper:1. ~step_incr:0.1 ();
ignore (area#event#connect#expose (redraw area slider));
ignore (slider#connect#value_changed (slider_changed area slider));
w#show ();
GMain.main ()
let _ =
let l = List.rev ! stack in
Format.eprintf " len : % d@. " ( l ) ;
let t1 = Unix.gettimeofday ( ) in
List.iter ( fun e - > draw_element ctx e ) l ;
let t2 = Unix.gettimeofday ( ) in
Format.eprintf " % f@. " ( t2 - . t1 ) ;
( *
let ch = open_out " /tmp / foo.mar " in
Marshal.to_channel ch l [ ] ;
close_out ch ;
let _ =
let l = List.rev !stack in
Format.eprintf "len: %d@." (List.length l);
let t1 = Unix.gettimeofday () in
List.iter (fun e -> draw_element ctx e) l;
let t2 = Unix.gettimeofday () in
Format.eprintf "%f@." (t2 -. t1);
Cairo_png.surface_write_to_file s "/tmp/foo.png"
*)
|
18c526dec5d33504c20d0c1f3b9eee157b3d9d1a7a6394b2afe79b5c27b99e42 | byorgey/species | Interval.hs | # LANGUAGE NoImplicitPrelude
, CPP
#
, CPP
#-}
-----------------------------------------------------------------------------
-- |
-- Module : Math.Combinatorics.Species.Util.Interval
Copyright : ( c ) 2010
-- License : BSD-style (see LICENSE)
-- Maintainer :
-- Stability : experimental
--
-- A simple implementation of intervals of natural numbers, for use in
-- tracking the possible sizes of structures of a species. For
example , the species @x + x^2 + x^3@ will correspond to the
-- interval [1,3].
--
-----------------------------------------------------------------------------
module Math.Combinatorics.Species.Util.Interval
(
-- * The 'NatO' type
NatO, omega, natO
-- * The 'Interval' type
, Interval, iLow, iHigh
-- * Interval operations
, decrI, union, intersect, elem, toList
-- * Constructing intervals
, natsI, fromI, emptyI, omegaI
) where
#if MIN_VERSION_numeric_prelude(0,2,0)
import NumericPrelude hiding (min, max, elem)
import Prelude (min, max)
#else
import NumericPrelude
import PreludeBase hiding (elem)
#endif
import qualified Algebra.Additive as Additive
import qualified Algebra.Ring as Ring
| ' NatO ' is an explicit representation of the co - inductive type
-- which admits an infinite value, omega. Our intuition for the
-- semantics of 'NatO' comes from thinking of it as an efficient
-- representation of lazy unary natural numbers, except that we can
-- actually test for omega in finite time.
data NatO = Nat Integer | Omega
deriving (Eq, Ord, Show)
-- | The infinite 'NatO' value.
omega :: NatO
omega = Omega
-- | Eliminator for 'NatO' values.
natO :: (Integer -> a) -> a -> NatO -> a
natO _ o Omega = o
natO f _ (Nat n) = f n
| Decrement a possibly infinite natural . TZero and omega are both
-- fixed points of 'decr'.
decr :: NatO -> NatO
decr (Nat 0) = Nat 0
decr (Nat n) = Nat (n-1)
decr Omega = Omega
| ' NatO ' forms an additive monoid , with zero as the identity . This
-- doesn't quite fit since Additive.C is supposed to be for groups,
-- so the 'negate' method just throws an error. But we'll never use
-- it and 'NatO' won't be directly exposed to users of the species
-- library anyway.
instance Additive.C NatO where
zero = Nat 0
Nat m + Nat n = Nat (m + n)
_ + _ = Omega
negate = error "naturals with omega only form a semiring"
| In fact , ' NatO ' forms a semiring , with 1 as the multiplicative
-- unit.
instance Ring.C NatO where
one = Nat 1
Nat 0 * _ = Nat 0
_ * Nat 0 = Nat 0
Nat m * Nat n = Nat (m * n)
_ * _ = Omega
fromInteger = Nat
-- | An 'Interval' is a closed range of consecutive integers. Both
endpoints are represented as ' NatO ' values . For example , [ 2,5 ]
represents the values 2,3,4,5 ; [ 2,omega ] represents all integers
greater than 1 ; intervals where the first endpoint is greater than the
second also represent the empty interval .
data Interval = I { iLow :: NatO -- ^ Get the lower endpoint of an 'Interval'
, iHigh :: NatO -- ^ Get the upper endpoint of an 'Interval'
}
deriving Show
-- | Decrement both endpoints of an interval.
decrI :: Interval -> Interval
decrI (I l h) = I (decr l) (decr h)
| The union of two intervals is the smallest interval containing
-- both.
union :: Interval -> Interval -> Interval
union (I l1 h1) (I l2 h2) = I (min l1 l2) (max h1 h2)
| The intersection of two intervals is the largest interval
-- contained in both.
intersect :: Interval -> Interval -> Interval
intersect (I l1 h1) (I l2 h2) = I (max l1 l2) (min h1 h2)
-- | Intervals can be added by adding their endpoints pointwise.
instance Additive.C Interval where
zero = I zero zero
(I l1 h1) + (I l2 h2) = I (l1 + l2) (h1 + h2)
negate = error "Interval negation: intervals only form a semiring"
-- | Intervals form a semiring, with the multiplication operation
-- being pointwise multiplication of their endpoints.
instance Ring.C Interval where
one = I one one
(I l1 h1) * (I l2 h2) = I (l1 * l2) (h1 * h2)
fromInteger n = I (Nat n) (Nat n)
-- | Test a given integer for interval membership.
elem :: Integer -> Interval -> Bool
elem n (I lo Omega) = lo <= fromInteger n
elem n (I lo (Nat hi)) = lo <= fromInteger n && n <= hi
| Convert an interval to a list of Integers .
toList :: Interval -> [Integer]
toList (I Omega Omega) = []
toList (I lo hi) | lo > hi = []
toList (I (Nat lo) Omega) = [lo..]
toList (I (Nat lo) (Nat hi)) = [lo..hi]
-- | The range [0,omega] containing all natural numbers.
natsI :: Interval
natsI = I zero Omega
-- | Construct an open range [n,omega].
fromI :: NatO -> Interval
fromI n = I n Omega
-- | The empty interval.
emptyI :: Interval
emptyI = I one zero
-- | The interval which contains only omega.
omegaI :: Interval
omegaI = I Omega Omega | null | https://raw.githubusercontent.com/byorgey/species/5f1d99095e41b860e2bfbc3ba034109a8101846f/Math/Combinatorics/Species/Util/Interval.hs | haskell | ---------------------------------------------------------------------------
|
Module : Math.Combinatorics.Species.Util.Interval
License : BSD-style (see LICENSE)
Maintainer :
Stability : experimental
A simple implementation of intervals of natural numbers, for use in
tracking the possible sizes of structures of a species. For
interval [1,3].
---------------------------------------------------------------------------
* The 'NatO' type
* The 'Interval' type
* Interval operations
* Constructing intervals
which admits an infinite value, omega. Our intuition for the
semantics of 'NatO' comes from thinking of it as an efficient
representation of lazy unary natural numbers, except that we can
actually test for omega in finite time.
| The infinite 'NatO' value.
| Eliminator for 'NatO' values.
fixed points of 'decr'.
doesn't quite fit since Additive.C is supposed to be for groups,
so the 'negate' method just throws an error. But we'll never use
it and 'NatO' won't be directly exposed to users of the species
library anyway.
unit.
| An 'Interval' is a closed range of consecutive integers. Both
^ Get the lower endpoint of an 'Interval'
^ Get the upper endpoint of an 'Interval'
| Decrement both endpoints of an interval.
both.
contained in both.
| Intervals can be added by adding their endpoints pointwise.
| Intervals form a semiring, with the multiplication operation
being pointwise multiplication of their endpoints.
| Test a given integer for interval membership.
| The range [0,omega] containing all natural numbers.
| Construct an open range [n,omega].
| The empty interval.
| The interval which contains only omega. | # LANGUAGE NoImplicitPrelude
, CPP
#
, CPP
#-}
Copyright : ( c ) 2010
example , the species @x + x^2 + x^3@ will correspond to the
module Math.Combinatorics.Species.Util.Interval
(
NatO, omega, natO
, Interval, iLow, iHigh
, decrI, union, intersect, elem, toList
, natsI, fromI, emptyI, omegaI
) where
#if MIN_VERSION_numeric_prelude(0,2,0)
import NumericPrelude hiding (min, max, elem)
import Prelude (min, max)
#else
import NumericPrelude
import PreludeBase hiding (elem)
#endif
import qualified Algebra.Additive as Additive
import qualified Algebra.Ring as Ring
| ' NatO ' is an explicit representation of the co - inductive type
data NatO = Nat Integer | Omega
deriving (Eq, Ord, Show)
omega :: NatO
omega = Omega
natO :: (Integer -> a) -> a -> NatO -> a
natO _ o Omega = o
natO f _ (Nat n) = f n
| Decrement a possibly infinite natural . TZero and omega are both
decr :: NatO -> NatO
decr (Nat 0) = Nat 0
decr (Nat n) = Nat (n-1)
decr Omega = Omega
| ' NatO ' forms an additive monoid , with zero as the identity . This
instance Additive.C NatO where
zero = Nat 0
Nat m + Nat n = Nat (m + n)
_ + _ = Omega
negate = error "naturals with omega only form a semiring"
| In fact , ' NatO ' forms a semiring , with 1 as the multiplicative
instance Ring.C NatO where
one = Nat 1
Nat 0 * _ = Nat 0
_ * Nat 0 = Nat 0
Nat m * Nat n = Nat (m * n)
_ * _ = Omega
fromInteger = Nat
endpoints are represented as ' NatO ' values . For example , [ 2,5 ]
represents the values 2,3,4,5 ; [ 2,omega ] represents all integers
greater than 1 ; intervals where the first endpoint is greater than the
second also represent the empty interval .
}
deriving Show
decrI :: Interval -> Interval
decrI (I l h) = I (decr l) (decr h)
| The union of two intervals is the smallest interval containing
union :: Interval -> Interval -> Interval
union (I l1 h1) (I l2 h2) = I (min l1 l2) (max h1 h2)
| The intersection of two intervals is the largest interval
intersect :: Interval -> Interval -> Interval
intersect (I l1 h1) (I l2 h2) = I (max l1 l2) (min h1 h2)
instance Additive.C Interval where
zero = I zero zero
(I l1 h1) + (I l2 h2) = I (l1 + l2) (h1 + h2)
negate = error "Interval negation: intervals only form a semiring"
instance Ring.C Interval where
one = I one one
(I l1 h1) * (I l2 h2) = I (l1 * l2) (h1 * h2)
fromInteger n = I (Nat n) (Nat n)
elem :: Integer -> Interval -> Bool
elem n (I lo Omega) = lo <= fromInteger n
elem n (I lo (Nat hi)) = lo <= fromInteger n && n <= hi
| Convert an interval to a list of Integers .
toList :: Interval -> [Integer]
toList (I Omega Omega) = []
toList (I lo hi) | lo > hi = []
toList (I (Nat lo) Omega) = [lo..]
toList (I (Nat lo) (Nat hi)) = [lo..hi]
natsI :: Interval
natsI = I zero Omega
fromI :: NatO -> Interval
fromI n = I n Omega
emptyI :: Interval
emptyI = I one zero
omegaI :: Interval
omegaI = I Omega Omega |
16069f265a0a582c9cc14da8eb7177902426db845c3d23e6b87a61de6e6f27f9 | wargrey/w3s | parser.rkt | #lang typed/racket/base
;;; -syntax/#parsing
(provide (all-defined-out))
(require racket/string)
(require racket/symbol)
(require "digicore.rkt")
(require "condition.rkt")
(require "variables.rkt")
(require "selector.rkt")
(require "stdin.rkt")
(require "misc.rkt")
(require (for-syntax racket/base))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define-syntax (define-css-parser-entry stx)
;;; -syntax/#parser-entry-points
(syntax-case stx [: lambda]
[(_ id #:-> ->T (lambda [/dev/cssin [args : T defval ...] ...] body ...))
(syntax/loc stx
(begin (define (css-parse [/dev/cssin : Input-Port] [args : T defval ...] ...) : ->T body ...)
(define (id [/dev/stdin : CSS-Stdin (current-input-port)] [args : T defval ...] ...) : ->T
(define /dev/cssin : Input-Port (css-open-input-port /dev/stdin))
(dynamic-wind (λ [] '(css-open-input-port has already enabled line counting))
(λ [] (css-parse /dev/cssin args ...))
(λ [] (close-input-port /dev/cssin))))))]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; -syntax/#parser-entry-points
(define-css-parser-entry css-parse-stylesheet #:-> (Listof CSS-Syntax-Rule)
;;; -syntax/#parse-stylesheet
;;; -syntax/#declaration-rule-list
(lambda [/dev/cssin]
(css-consume-stylesheet /dev/cssin)))
(define-css-parser-entry css-parse-rules #:-> (Listof CSS-Syntax-Rule)
;;; -syntax/#parse-list-of-rules
;;; -syntax/#declaration-rule-list
(lambda [/dev/cssin]
(css-consume-rules /dev/cssin #false)))
(define-css-parser-entry css-parse-rule #:-> (U CSS-Syntax-Rule CSS-Syntax-Error)
;;; -syntax/#parse-rule
(lambda [/dev/cssin]
(define stx (css-read-syntax/skip-whitespace /dev/cssin))
(define retval : (U CSS-Qualified-Rule CSS-@Rule CSS-Syntax-Error)
(cond [(eof-object? stx) (make+exn:css:empty #false)]
[(css:@keyword? stx) (css-consume-@rule /dev/cssin stx)]
[else (css-consume-qualified-rule /dev/cssin stx)]))
(define end (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(or (eof-object? end) (exn? retval)) retval]
[else (make+exn:css:overconsumption end)])))
(define-css-parser-entry css-parse-declaration #:-> (U CSS-Declaration CSS-Syntax-Error)
;;; -syntax/#declaration
;;; -syntax/#parse-declaration
;;; -conditional/#at-ruledef-supports
(lambda [/dev/cssin]
(define token (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(not (css:ident? token)) (make+exn:css:type:identifier (and (css-token? token) token))]
[else (let-values ([(components _) (css-consume-components /dev/cssin)])
(css-components->declaration token components))])))
(define-css-parser-entry css-parse-declarations #:-> (Listof (U CSS-Declaration CSS-@Rule))
;;; -syntax/#parse-list-of-declarations
;;; -syntax/#consume-a-list-of-declarations
(lambda [/dev/cssin]
(let consume-declaration+@rule ([mixed-list : (Listof (U CSS-Declaration CSS-@Rule)) null])
(define token (css-read-syntax /dev/cssin))
(cond [(eof-object? token) (reverse mixed-list)]
[(or (css:whitespace? token) (css:semicolon? token)) (consume-declaration+@rule mixed-list)]
[(css:@keyword? token) (consume-declaration+@rule (cons (css-consume-@rule /dev/cssin token) mixed-list))]
[else (let-values ([(components _) (css-consume-components /dev/cssin #\;)])
(define ?declaration : (U CSS-Declaration CSS-Syntax-Error)
(cond [(css:ident? token) (css-components->declaration token components)]
[else (make+exn:css:type:identifier token)]))
(consume-declaration+@rule (css-cons ?declaration mixed-list)))]))))
(define-css-parser-entry css-parse-component-value #:-> (U CSS-Token CSS-Syntax-Error)
;;; -syntax/#parse-component-value
(lambda [/dev/cssin]
(define token (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(eof-object? token) (make+exn:css:empty #false)]
[else (let ([retval (css-consume-component-value /dev/cssin token)])
(define end (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(eof-object? end) retval]
[else (make+exn:css:overconsumption end)]))])))
(define-css-parser-entry css-parse-component-values #:-> (Listof CSS-Token)
;;; -syntax/#parse-list-of-component-values
(lambda [/dev/cssin]
(define-values (components _) (css-consume-components /dev/cssin))
components))
(define-css-parser-entry css-parse-component-valueses #:-> (Listof (Listof CSS-Token))
;;; -syntax/#parse-comma-separated-list-of-component-values
(lambda [/dev/cssin]
(css-consume-componentses /dev/cssin #:omit-comma? #false)))
(define-css-parser-entry css-parse-media-queries #:-> (Listof CSS-Media-Query)
;;; /#media-types
;;; /#mq-list
;;; /#mq-syntax
;;; /#typedef-media-query-list
;;; /#error-handling
(lambda [/dev/cssin [rulename : CSS-Syntax-Any #false]]
(for/list : (Listof CSS-Media-Query) ([entry (in-list (css-consume-componentses /dev/cssin #:omit-comma? #true))])
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(define-values (token tokens) (css-car entry))
(define-values (next rest) (css-car tokens))
(cond [(css:ident-norm=:=? token 'not)
(cond [(css:ident? next) (css-components->media-type+query next #false rest)]
[else (css-components->negation token tokens #true)])]
[(css:ident? token)
(define-values (?type ?<and>)
(cond [(css:ident-norm=:=? token 'only) (values next rest)]
[else (values token tokens)]))
(cond [(not ?type) (make+exn:css:malformed ?type)]
[(css:ident? ?type) (css-components->media-type+query ?type #true ?<and>)]
[else (make+exn:css:type:identifier ?type)])]
[else (css-components->feature-query entry #true rulename)])))))
(define-css-parser-entry css-parse-feature-query #:-> CSS-Feature-Query
;;; /#media-types
;;; -conditional/#at-supports
(lambda [/dev/cssin [rulename : CSS-Syntax-Any #false]]
(define-values (conditions _) (css-consume-components /dev/cssin))
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(css-components->feature-query conditions #false rulename))))
(define-css-parser-entry css-parse-selectors #:-> (U (Listof+ CSS-Complex-Selector) CSS-Syntax-Error)
;;; /#structure
;;; /#parse-selector
/#selector-list
;;; /#grouping
(lambda [/dev/cssin]
(define-values (components _) (css-consume-components /dev/cssin))
(css-components->selectors components #false)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define css-consume-stylesheet : (-> Input-Port (Listof CSS-Syntax-Rule))
;;; -syntax/#parse-stylesheet
;;; -syntax/#declaration-rule-list
(lambda [css]
(define rules : (Listof CSS-Syntax-Rule) (css-consume-rules css #true))
(define rule : (Option CSS-Syntax-Rule) (and (pair? rules) (car rules)))
(if (and (css-@rule? rule) (css:@keyword-norm=:=? (css-@rule-name rule) '#:@charset))
(cdr rules)
rules)))
(define css-consume-rules : (-> Input-Port Boolean (Listof CSS-Syntax-Rule))
;;; -syntax/#consume-list-of-rules
(lambda [css toplevel?]
(let consume-rules ([rules : (Listof CSS-Syntax-Rule) null])
(define token (css-read-syntax css))
(cond [(eof-object? token) (reverse rules)]
[(css:whitespace? token) (consume-rules rules)]
[(css:@keyword? token) (consume-rules (css-cons (css-consume-@rule css token) rules))]
[(css:cd? token) (consume-rules (if toplevel? rules (css-cons (css-consume-qualified-rule css token) rules)))]
[else (consume-rules (css-cons (css-consume-qualified-rule css token) rules))]))))
(define css-consume-@rule : (-> Input-Port CSS:@Keyword CSS-@Rule)
;;; -syntax/#at-rule
;;; -syntax/#consume-an-at-rule
(lambda [css reconsumed-at-token]
(define-values (prelude ?block) (css-consume-rule-item css #:@rule? #true))
(css-@rule reconsumed-at-token prelude ?block)))
(define css-consume-qualified-rule : (-> Input-Port CSS-Token (U CSS-Qualified-Rule CSS-@Rule CSS-Syntax-Error))
;;; -syntax/#qualified-rule
(lambda [css reconsumed]
(define head (css-consume-component-value css reconsumed))
(define-values (prelude ?block) (css-consume-rule-item css #:@rule? #false))
(cond [(css:block? ?block) (css-qualified-rule (cons head prelude) ?block)]
[else (make+exn:css:missing-block (cons head prelude))])))
(define css-consume-component-value : (-> Input-Port CSS-Token CSS-Token)
;;; -syntax/#component-value
;;; -syntax/#consume-a-component-value
;;; -syntax/#consume-a-function
;;; -syntax/#consume-simple-block
(lambda [css reconsumed]
(cond [(css:delim? reconsumed)
(case (css:delim-datum reconsumed)
[(#\{) (css-consume-simple-block css reconsumed #\})]
[(#\[) (css-consume-simple-block css reconsumed #\])]
[(#\() (css-consume-simple-block css reconsumed #\))]
[else reconsumed])]
[(css:function? reconsumed) (css-consume-function css reconsumed)]
[else reconsumed])))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(struct css-@rule ([name : CSS:@Keyword] [prelude : (Listof CSS-Token)] [block : (Option CSS:Block)]) #:transparent #:type-name CSS-@Rule)
(struct css-qualified-rule ([prelude : (Listof+ CSS-Token)] [block : CSS:Block]) #:transparent #:type-name CSS-Qualified-Rule)
(define css-consume-rule-item : (-> Input-Port #:@rule? Boolean (Values (Listof CSS-Token) (Option CSS:Block)))
;;; -syntax/#qualified-rule
;;; -syntax/#consume-a-qualified-rule
(lambda [css #:@rule? at-rule?]
(let consume-item ([prelude : (Listof CSS-Token) null]
[simple-block : (Option CSS:Block) #false])
(define token (css-read-syntax css))
(cond [(or (eof-object? token) (and at-rule? (css:semicolon? token)))
(when (eof-object? token) (make+exn:css:missing-delimiter prelude))
(values (reverse prelude) simple-block)]
[(css:delim=:=? token #\{) (values (reverse prelude) (css-consume-simple-block css token #\}))]
[(css:block=:=? token #\{) (values (reverse prelude) token)]
[else (consume-item (cons (css-consume-component-value css token) prelude) simple-block)]))))
(define css-consume-simple-block : (-> Input-Port CSS:Delim Char CSS:Block)
;;; -syntax/#consume-simple-block
(lambda [css open close-char]
(define-values (components close end-token) (css-consume-block-body css open close-char))
(syn-remake-token [open end-token] css:block (css:delim-datum open) components #false)))
(define css-consume-function : (-> Input-Port CSS:Function (U CSS:Function CSS:URL))
;;; -syntax/#consume-a-function
;;; -values/#functional-notations
;;; -values/#urls
(lambda [css func]
(define fname : Symbol (css:function-datum func))
(cond [(not (symbol-unreadable? fname)) func]
[else (let-values ([(components close end-token) (css-consume-block-body css func #\))]
[(fnorm) (css:function-norm func)])
(if (eq? fnorm 'url)
(let-values ([(href modifiers) (css-car components)])
(syn-remake-token func css:url
(if (css:string? href) (css:string-datum href) "")
(css-url-modifiers-filter func modifiers)
#false))
(let ([freadable (string->symbol (symbol->immutable-string fname))])
(syn-remake-token [func end-token] css:function freadable fnorm
(cond [(eq? fnorm 'var) components] ; whitespaces are meaningful in var()
[else (filter-not css:whitespace? components)])
#false))))])))
(define css-consume-block-body : (-> Input-Port CSS-Token Char (Values (Listof CSS-Token) CSS-Syntax-Terminal CSS-Token))
;;; -syntax/#consume-simple-block
;;; -syntax/#consume-a-function
(lambda [css start-token close-char]
(let consume-body ([components : (Listof CSS-Token) null])
(define token (css-read-syntax css))
(cond [(css:close=:=? token close-char) (values (reverse components) token token)]
[(not (eof-object? token)) (consume-body (cons (css-consume-component-value css token) components))]
[else (let ([end-token (if (null? components) start-token (car components))])
(make+exn:css:missing-delimiter #false)
(values (reverse components) #false end-token))]))))
(define css-consume-components : (->* (Input-Port) ((Option Char) Boolean) (Values (Listof CSS-Token) CSS-Syntax-Terminal))
;;; -syntax/#parse-list-of-component-values
(lambda [css [terminating-char #false] [omit-terminate? #false]]
(let consume-component ([stnenopmoc : (Listof CSS-Token) null])
(define token (css-read-syntax css))
(cond [(eof-object? token) (values (reverse stnenopmoc) #false)]
[(and terminating-char (css:delim=:=? token terminating-char))
(define next (css-peek-syntax/skip-whitespace css))
(cond [(and omit-terminate? (css-null? stnenopmoc))
(cond [(and (eof-object? next) (css-read-syntax/skip-whitespace css))
(make+exn:css:overconsumption token)
(values (reverse stnenopmoc) #false)]
[else (make+exn:css:empty token)
(css-consume-components css terminating-char omit-terminate?)])]
[(eof-object? next)
(css-read-syntax/skip-whitespace css)
(values (reverse stnenopmoc) #false)]
[else (values (reverse stnenopmoc) token)])]
[else (consume-component (cons (css-consume-component-value css token) stnenopmoc))]))))
(define css-consume-componentses : (-> Input-Port [#:omit-comma? Boolean] (Listof (Listof CSS-Token)))
;;; -syntax/#parse-comma-separated-list-of-component-values
(lambda [css #:omit-comma? [omit-comma? #true]]
(let consume-components ([componentses : (Listof (Listof CSS-Token)) null])
(define-values (components terminating-token) (css-consume-components css #\, omit-comma?))
(cond [(or terminating-token) (consume-components (cons components componentses))]
[(not omit-comma?) (reverse (cons components componentses))]
[else (filter (inst css-pair? CSS-Token) (reverse (cons components componentses)))]))))
(define css-components->declaration : (-> CSS:Ident (Listof CSS-Token) (U CSS-Declaration CSS-Syntax-Error))
;;; -syntax/#consume-declaration
;;; -cascade/#importance
;;; -values/#component-whitespace
-syntax/#typedef-declaration-value
;;; -variables/#defining-variables
(lambda [id-token components]
(define-values (?: value-list) (css-car components))
(cond [(not (css:colon? ?:)) (make+exn:css:missing-colon id-token)]
[else (let ([var? (and (css:ident=<-? id-token symbol-unreadable?) #true)])
(define-values (?values important? lazy?) (css-any->declaration-value id-token value-list var?))
(if (exn? ?values) ?values (css-declaration id-token ?values important? lazy?)))])))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define css-components->media-type+query : (-> CSS:Ident Boolean (Listof CSS-Token) CSS-Media-Query)
;;; /#media-types
;;; /#typedef-media-query
(lambda [media only? conditions]
(define downcased-type : Symbol (css:ident-norm media))
(define-values (?and ?conditions) (css-car conditions))
(when (css-deprecate-media-type) (make+exn:css:deprecated media))
(cond [(memq downcased-type '(only not and or)) (make+exn:css:misplaced media)]
[(not ?and) (if only? (box downcased-type) (CSS-Not (box downcased-type)))]
[(not (css:ident-norm=:=? ?and 'and)) (make+exn:css:unrecognized ?and)]
[(css-null? ?conditions) (make+exn:css:missing-feature ?and)]
[else (cons (if only? (box downcased-type) (CSS-Not (box downcased-type)))
(css-components->junction ?conditions 'and #false #true))])))
(define css-components->feature-query : (-> (Listof CSS-Token) Boolean CSS-Syntax-Any CSS-Feature-Query)
;;; /#mq-only
;;; /#mq-syntax
(lambda [conditions media? alt]
(define-values (token rest) (css-car conditions))
(define-values (op chain) (css-car rest))
(cond [(not token) (throw-exn:css:missing-feature alt)]
[(css:ident-norm=:=? token 'not) (css-components->negation token rest media?)]
[(not op) (css-component->feature-query token media?)]
[(css:ident-norm=<-? op '(and or)) (css-components->junction chain (css:ident-norm op) token media?)]
[else (throw-exn:css:unrecognized op)])))
(define css-component->feature-query : (-> CSS-Token Boolean CSS-Feature-Query)
;;; -syntax/#preserved-tokens
;;; -conditional/#at-supports
;;; /#mq-features
;;; /#mq-syntax
/#mq-boolean-context
;;; /#mq-range-context
(lambda [condition media?]
(cond [(css:block=:=? condition #\()
(define subany : (Listof CSS-Token) (css:block-components condition))
(define-values (name any-values) (css-car subany))
(define-values (op value-list) (css-car any-values))
(cond [(css:block=:=? name #\() (css-components->feature-query subany media? condition)]
[(css:ident-norm=:=? name 'not) (css-components->negation name any-values media?)]
[(and (css:ident? name) (css:colon? op))
(define descriptor (css-components->declaration name any-values))
(cond [(exn? descriptor) (if media? (throw-exn:css:enclosed condition) (raise descriptor))]
[(and media?) (css-declaration->media-query descriptor condition)]
[else descriptor])]
[(and media?)
(cond [(and (css:ident? name) (not op)) (css-make-media-feature name #false #\? #false)]
[else (css-components->media-range-query subany condition)])]
[(not name) (throw-exn:css:empty condition)]
[(css:ident? name) (throw-exn:css:missing-colon condition)]
[(css:function? condition) (throw-exn:css:enclosed condition)]
[else (throw-exn:css:type:identifier condition)])]
[else (throw-exn:css:missing-feature condition)])))
(define css-components->negation : (-> CSS:Ident (Listof CSS-Token) Boolean CSS-Not)
;;; /#typedef-media-not
(lambda [<not> tokens media?]
(define-values (token rest) (css-car tokens))
(cond [(not token) (throw-exn:css:missing-feature <not>)]
[(css:ident-norm=:=? token 'not) (throw-exn:css:misplaced token)]
[(css-null? rest) (CSS-Not (css-component->feature-query token media?))]
[else (throw-exn:css:overconsumption rest)])))
(define css-components->junction : (-> (Listof CSS-Token) Symbol (Option CSS-Token) Boolean (U CSS-And CSS-Or))
/#typedef-media-and
/#typedef-media-or
(lambda [conditions op ?head media?]
(let components->junction ([junctions : (Listof CSS-Token) (if (false? ?head) null (list ?head))]
[--conditions : (Listof CSS-Token) conditions])
(define-values (condition rest) (css-car --conditions))
(define-values (token others) (css-car rest))
(cond [(not condition) (junctions->conditional-query junctions op media?)]
[(css:ident-norm=:=? condition 'not) (throw-exn:css:misplaced condition)]
[(or (not token) (css:ident-norm=:=? token op)) (components->junction (cons condition junctions) others)]
[(css:ident-norm=<-? token '(and or)) (throw-exn:css:misplaced token)]
[else (throw-exn:css:overconsumption token)]))))
(define css-components->media-range-query : (-> (Listof CSS-Token) CSS:Block CSS-Feature-Query)
;;; /#mq-features
;;; /#mq-range-context
(lambda [components broken-condition]
(define-values (value0 rest0) (css-car-media-value components))
(define-values (d0 op0 po0 rest1) (css-car-comparison-operator rest0))
(define-values (value1 rest2) (css-car-media-value rest1))
(define-values (d1 op1 po1 rest3) (css-car-comparison-operator rest2))
(define-values (value2 terminal) (css-car-media-value rest3))
(cond [(not value0) (throw-exn:css:empty broken-condition)]
[(not d0) (throw-exn:css:missing-delimiter components)]
[(not value1) (throw-exn:css:missing-value rest0)]
[(and (css:ident? value0) (css:delim? d1)) (throw-exn:css:enclosed broken-condition)]
[(and (eq? op0 #\=) (css:delim? d1)) (throw-exn:css:overconsumption broken-condition)]
[(css:ident? value0) (css-make-media-feature value0 value1 op0 d0)]
[(and (not d1) (css:ident? value1)) (css-make-media-feature value1 value0 po0 d0)]
[(not (css:ident? value1)) (throw-exn:css:type:identifier value1)]
[(or (not value2) (css:ident? value2)) (throw-exn:css:missing-value rest2)]
[(css-pair? terminal) (throw-exn:css:overconsumption terminal)]
[(not (eq? (css:delim-datum d0) (css:delim-datum d1))) (throw-exn:css:malformed (list d0 value1 d1))]
[else (CSS-And (list (css-make-media-feature value1 value0 po0 d0)
(css-make-media-feature value1 value2 op1 d1)))])))
(define css-declaration->media-query : (-> CSS-Declaration CSS:Block CSS-Feature-Query)
;;; /#mq-features
(lambda [property broken-condition]
(define-values (media-value rest) (css-car-media-value (css-declaration-values property)))
(cond [(not media-value) (throw-exn:css:enclosed broken-condition)]
[(css-pair? rest) (throw-exn:css:enclosed broken-condition)]
[else (css-make-media-feature (css-declaration-name property) media-value #\: #false)])))
(define css-car-comparison-operator : (-> (Listof CSS-Token) (Values (Option CSS:Delim) Char Char (Listof CSS-Token)))
;;; /#mq-range-context
(lambda [components]
(define-values (d rest) (css-car components))
(define-values (?= terminal) (css-car/cdr rest))
(cond [(not d) (values #false #\≠ #\≠ rest)]
[(not (css:delim? d)) (throw-exn:css:type d)]
[else (case (css:delim-datum d)
[(#\=) (values d #\= #\= rest)]
[(#\>) (if (css:delim=:=? ?= #\=) (values d #\≥ #\≤ terminal) (values d #\> #\< rest))]
[(#\<) (if (css:delim=:=? ?= #\=) (values d #\≤ #\≥ terminal) (values d #\< #\> rest))]
[else (throw-exn:css:range d)])])))
(define css-car-media-value : (-> (Listof CSS-Token) (Values (Option CSS-Media-Value) (Listof CSS-Token)))
;;; /#typedef-mf-value
;;; /#typedef-ratio
(lambda [components]
(define-values (value rest) (css-car components))
(define-values (?/ ?rest) (css-car rest))
(define-values (?int terminal) (css-car ?rest))
(cond [(not value) (values #false rest)]
[(css:slash? ?/)
(define width : (Option Positive-Integer) (css:integer=<-? value exact-positive-integer?))
(define height : (Option Positive-Integer) (css:integer=<-? ?int exact-positive-integer?))
(values (cond [(and width height (css-token? ?int)) (syn-remake-token [value ?int] css:ratio (/ width height))]
[(css-number? value) (throw-exn:css:range value)]
[(css-number? ?int) (throw-exn:css:range ?int)]
[else (throw-exn:css:type (filter css-token? (list value ?/ ?int)))])
terminal)]
[(or (css:ident? value) (css-numeric? value)) (values value rest)]
[else (values (throw-exn:css:type value) rest)])))
(define css-make-media-feature : (-> CSS:Ident (Option CSS-Media-Value) Char (Option CSS:Delim) (U Symbol CSS-Media-Feature-Query))
;;; /#mq-features
(lambda [desc-name ?value ophint ?op]
(define errobj : (Listof CSS-Token) (filter css-token? (list desc-name ?op ?value)))
(define name : String (symbol->immutable-string (css:ident-norm desc-name)))
(define-values (downcased-name op min/max?)
(cond [(string-prefix? name "min-") (values (string->symbol (substring name 4)) #\≥ #true)]
[(string-prefix? name "max-") (values (string->symbol (substring name 4)) #\≤ #true)]
[else (values (string->symbol name) ophint #false)]))
(when (and min/max?)
(cond [(or (not ?value) (css:delim? ?op)) (throw-exn:css:misplaced errobj)]
[(not (css-numeric? ?value)) (throw-exn:css:type errobj)]))
(define feature-filter : (U Void (CSS:Filter CSS-Media-Datum))
((default-css-media-feature-filters) downcased-name min/max? (λ [] (void (make+exn:css:deprecated desc-name)))))
(cond [(void? feature-filter) (throw-exn:css:unrecognized errobj)]
[(false? ?value) downcased-name]
[else (let ([datum (feature-filter ?value)])
(cond [(false? datum) (throw-exn:css:type ?value desc-name)]
[(exn:css? datum) (css-log-syntax-error datum desc-name) (raise datum)]
[else (vector downcased-name op datum)]))])))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define css-components->selectors : (-> (Listof CSS-Token) CSS-Namespace-Hint (U (Listof+ CSS-Complex-Selector) CSS-Syntax-Error))
;;; /#structure
;;; /#parse-selector
/#selector-list
;;; /#grouping
(lambda [components namespaces]
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(define-values (head-complex-selector ?eof ?rest) (css-car-complex-selector components namespaces))
(let extract-complex-selector ([srotceles : (Listof CSS-Complex-Selector) null]
[terminal : (Option CSS:Delim) ?eof]
[rest : (Listof CSS-Token) ?rest])
(if (css-null? rest)
(cond [(not terminal) (cons head-complex-selector (reverse srotceles))]
[else (throw-exn:css:overconsumption terminal)])
(let-values ([(complex-selector ?terminal ?rest) (css-car-complex-selector rest namespaces)])
(extract-complex-selector (cons complex-selector srotceles) ?terminal ?rest)))))))
(define css-car-complex-selector : (-> (Listof CSS-Token) CSS-Namespace-Hint
(Values CSS-Complex-Selector (Option CSS:Delim) (Listof CSS-Token)))
;;; /#structure
;;; /#combinators
;;; /#grammar
(lambda [components namespaces]
(define-values (head-compound-selector rest) (css-car-compound-selector components #false namespaces))
(let extract-selector ([srotceles : (Listof+ CSS-Compound-Selector) (list head-compound-selector)]
[tokens : (Listof CSS-Token) rest])
(define-values (?terminal rest) (css-car tokens))
(define-values (token ?selectors) (css-car/cdr tokens))
(cond [(or (not ?terminal) (css:comma? ?terminal)) (values srotceles ?terminal rest)]
[(not (css-selector-combinator? token)) (throw-exn:css:unrecognized ?terminal)]
[else (let*-values ([(combinator ?selectors) (css-car-combinator token ?selectors)]
[(?selector ?rest) (css-car ?selectors)])
(cond [(or (not ?selector) (css:comma? ?selector)) (throw-exn:css:overconsumption ?selectors)]
[else (let-values ([(selector rest) (css-car-compound-selector ?selectors combinator namespaces)])
(extract-selector (cons selector srotceles) rest))]))]))))
(define css-car-compound-selector : (-> (Listof CSS-Token) (Option CSS-Selector-Combinator) CSS-Namespace-Hint
(Values CSS-Compound-Selector (Listof CSS-Token)))
;;; /#structure
;;; /#grammar
;;; -namespaces/#css-qnames
-drafts/issues/202
(lambda [components combinator namespaces]
(define-values (head heads) (css-car components))
(define-values (typename namespace simple-selector-components)
(cond [(css:ident? head) (css-car-elemental-selector head heads namespaces)]
[(css:delim=<-? head '(#\| #\*)) (css-car-elemental-selector head heads namespaces)]
[(or (not head) (css:comma? head)) (throw-exn:css:empty head)]
[else (values #true (or (css-declared-namespace namespaces '||) #true) (cons head heads))]))
(define-values (:classes :children selector-components) (css-car-:class-selectors simple-selector-components))
(let extract-simple-selector ([sessalc : (Listof Symbol) null]
[sdi : (Listof Keyword) null]
[setubirtta : (Listof CSS-Attribute-Selector) null]
[pseudo-element : (Option CSS-::Element-Selector) #false]
[selector-tokens : (Listof CSS-Token) selector-components])
(define-values (token tokens) (css-car/cdr selector-tokens))
(cond [(or (not token) (css:comma? token) (css-selector-combinator? token))
(values (CSS-Compound-Selector combinator namespace typename (reverse sdi) (reverse sessalc)
(reverse setubirtta) :classes :children pseudo-element)
selector-tokens)]
[(and pseudo-element) (throw-exn:css:overconsumption token)]
[(css:delim=:=? token #\.)
(define-values (next rest) (css-car/cdr tokens))
(cond [(not (css:ident? next)) (throw-exn:css:type:identifier next)]
[else (extract-simple-selector (cons (css:ident-datum next) sessalc) sdi setubirtta pseudo-element rest)])]
[(css:colon? token)
(define-values (?pseudo-classes ?pseudo-children ?rest) (css-car-:class-selectors tokens))
(define-values (next rest) (css-car/cdr ?rest))
(cond [(null? ?pseudo-classes) (throw-exn:css:misplaced (list token (car tokens)))]
[(pair? ?pseudo-children) (throw-exn:css:malformed token)]
[else (let ([pclass (car ?pseudo-classes)])
(define pelement : CSS-::Element-Selector
(CSS-::Element-Selector (css-:class-selector-name pclass)
#false ; seems no ::element is of form function
(cdr ?pseudo-classes)))
(extract-simple-selector sessalc sdi setubirtta pelement ?rest))])]
[(css:block=:=? token #\[)
(define attribute-selector : CSS-Attribute-Selector (css-simple-block->attribute-selector token namespaces))
(extract-simple-selector sessalc sdi (cons attribute-selector setubirtta) pseudo-element tokens)]
[(css:hash? token) (extract-simple-selector sessalc (cons (css:hash-datum token) sdi) setubirtta pseudo-element tokens)]
[else (throw-exn:css:unrecognized token)]))))
(define css-car-combinator : (-> (U CSS:WhiteSpace CSS:Delim) (Listof CSS-Token) (Values CSS-Selector-Combinator (Listof CSS-Token)))
;;; /#structure
;;; /#grammar
(lambda [token tokens]
(case (cond [(css:whitespace? token) #\space] [(css:delim? token) (css:delim-datum token)] [else #\null])
[(#\space)
(define-values (next tail) (css-car tokens))
(cond [(css-selector-combinator? next) (css-car-combinator next tail)]
[else (values '>> tokens)])]
[(#\>)
(define-values (next tail) (css-car/cdr tokens))
(define-values (next2 tail2) (css-car tail))
(cond [(css:delim=:=? next #\>) (values '>> tail2)]
[else (values '> tail)])]
[(#\+) (values '+ tokens)]
[(#\~) (values '~ tokens)]
[(#\tab) (values '|| tokens)]
[else (throw-exn:css:unrecognized token)])))
(define css-car-elemental-selector : (-> (U CSS:Ident CSS:Delim) (Listof CSS-Token) CSS-Namespace-Hint
(Values (U Symbol True) (U Symbol Boolean) (Listof CSS-Token)))
;;; /#structure
;;; /#elemental-selectors
;;; -namespaces/#css-qnames
(lambda [token tokens namespaces]
(define-values (next rest next2 rest2) (css-car/cadr tokens))
(cond [(css:vbar? token)
(cond [(css:ident? next) (values (css:ident-datum next) #false rest)]
[(css:delim=:=? next #\*) (values #true #false rest)]
[else (throw-exn:css:type:identifier next)])]
[(css:vbar? next)
(define ns : (U Symbol Boolean) (css-declared-namespace namespaces token))
(cond [(false? ns) (throw-exn:css:namespace token)]
[(css:ident? next2) (values (css:ident-datum next2) ns rest2)]
[(css:delim=:=? next2 #\*) (values #true ns rest2)]
[else (throw-exn:css:type:identifier (list token next))])]
[else (let ([ns (or (css-declared-namespace namespaces '||) #true)])
(cond [(css:delim? token) (values #true ns tokens)]
[else (values (css:ident-datum token) ns tokens)]))])))
(define css-car-:class-selectors : (-> (Listof CSS-Token) (Values (Listof CSS-:Class-Selector) (Listof CSS-:Child-Selector) (Listof CSS-Token)))
;;; /#structure
;;; /#elemental-selectors
;;; /#pseudo-classes
(lambda [components]
(let extract-:class-selector ([srotceles : (Listof CSS-:Class-Selector) null]
[nerdlihc : (Listof CSS-:Child-Selector) null]
[tokens : (Listof CSS-Token) components])
(define-values (maybe: rest ?id rest2) (css-car/cadr tokens))
(cond [(or (not (css:colon? maybe:)) (css:colon? ?id)) (values (reverse srotceles) (reverse nerdlihc) tokens)]
[(css:ident? ?id)
(let ([name (css:ident-datum ?id)])
(case name
[(first-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name (css-An+B-predicate 0 1 #false)) nerdlihc) rest2)]
[(last-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name (css-An+B-predicate 0 1 #true)) nerdlihc) rest2)]
[(only-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name :only-child) nerdlihc) rest2)]
[else (extract-:class-selector (cons (CSS-:Class-Selector name) srotceles) nerdlihc rest2)]))]
[(css:function=<-? ?id '(nth-child nth-of-type nth-col))
(let ([A.B (css-extract-An+B (css:function-arguments ?id))])
(cond [(not A.B) (throw-exn:css:type:An+B ?id)]
[else (let* ([predicate (css-An+B-predicate (car A.B) (cdr A.B) #false)]
[selector (CSS-:Child-Selector (css:function-norm ?id) predicate)])
(extract-:class-selector srotceles (cons selector nerdlihc) rest2))]))]
[(css:function=<-? ?id '(nth-last-child nth-of-last-type nth-last-col))
(let ([A.B (css-extract-An+B (css:function-arguments ?id))])
(cond [(not A.B) (throw-exn:css:type:An+B ?id)]
[else (let* ([predicate (css-An+B-predicate (car A.B) (cdr A.B) #true)]
[selector (CSS-:Child-Selector (css:function-norm ?id) predicate)])
(extract-:class-selector srotceles (cons selector nerdlihc) rest2))]))]
[(css:function? ?id)
(let ([selector (CSS-:Function-Selector (css:function-norm ?id) (css:function-arguments ?id))])
(extract-:class-selector (cons selector srotceles) nerdlihc rest2))]
[else (throw-exn:css:type:identifier maybe:)]))))
(define css-simple-block->attribute-selector : (-> CSS:Block CSS-Namespace-Hint CSS-Attribute-Selector)
;;; /#attribute-selectors
;;; /#attrnmsp
/#attribute-case
;;; -namespaces/#css-qnames
(lambda [block namespaces]
(define-values (1st rest1) (css-car (css:block-components block)))
(define-values (2nd rest2 3rd rest3) (css-car/cadr rest1))
(define-values (attrname quirkname namespace op-part)
(cond [(not 1st) (throw-exn:css:empty block)]
[(or (css:match? 1st) (css:delim=:=? 1st #\=))
(throw-exn:css:type:identifier block)]
[(or (not 2nd) (css:match? 2nd) (css:delim=:=? 2nd #\=) (css:whitespace? 2nd))
; WARNING: the namespace behavior for attributes is different from that for elements
(cond [(css:ident? 1st) (values (css:ident-datum 1st) (css:ident-norm 1st) #false rest1)]
[else (throw-exn:css:type:identifier 1st)])]
[(or (not 3rd) (css:match? 3rd) (css:delim=:=? 3rd #\=) (css:whitespace? 3rd))
(cond [(and (css:vbar? 1st) (css:ident? 2nd)) (values (css:ident-datum 2nd) (css:ident-norm 2nd) #false rest2)]
[(css:vbar? 2nd) (throw-exn:css:type:identifier 2nd)]
[else (throw-exn:css:unrecognized 1st)])]
[(and (or (css:ident? 1st) (css:delim=:=? 1st #\*)) (css:vbar? 2nd) (css:ident? 3rd))
(define ns (css-declared-namespace namespaces 1st))
(cond [(false? ns) (throw-exn:css:namespace 1st)]
[else (values (css:ident-datum 3rd) (css:ident-norm 3rd) ns rest3)])]
[(and (or (css:ident? 1st) (css:delim=:=? 1st #\*)) (css:vbar? 2nd))
(throw-exn:css:type:identifier 3rd)]
[(or (css:ident? 1st) (css:delim=:=? 1st #\*))
(throw-exn:css:unrecognized 2nd)]
[else (throw-exn:css:unrecognized 1st)]))
(define-values (op value-part value ci-part) (css-car/cadr op-part))
(define-values (i terminal) (css-car ci-part))
(unless (not op)
(cond [(not value) (throw-exn:css:missing-value op)]
[(nor (not i) (css:ident-norm=:=? i 'i)) (throw-exn:css:overconsumption i)]
[(css-pair? terminal) (throw-exn:css:overconsumption terminal)]))
(define val : (U String Symbol)
(cond [(css:string? value) (css:string-datum value)]
[(css:ident? value) (css:ident-datum value)]
[(or (css:whitespace? value) (not value)) ""]
[else (throw-exn:css:type value)]))
(cond [(or (css:whitespace? op) (not op)) (CSS-Attribute-Selector attrname quirkname namespace)]
[(css:delim=:=? op #\=) (CSS-Attribute~Selector attrname quirkname namespace #\= val (css:ident? i))]
[(css:match? op) (CSS-Attribute~Selector attrname quirkname namespace (css:match-datum op) val (css:ident? i))]
[else (throw-exn:css:unrecognized op)])))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define-type CSS-Syntax-Terminal (U CSS:Delim CSS:Close False))
(define-type CSS-Syntax-Rule (U CSS-Qualified-Rule CSS-@Rule))
(define-type CSS-Media-Value (U CSS-Numeric CSS:Ident CSS:Ratio))
(define css-components->declarations : (-> (Listof CSS-Token) (Listof CSS-Declaration))
(lambda [components]
(let make-style-rule ([seitreporp : (Listof CSS-Declaration) null] [tokens : (Listof CSS-Token) components])
(define-values (id any-values) (css-car tokens))
(define-values (:values rest)
(let collect : (Values (Listof CSS-Token) (Listof CSS-Token)) ([seulav : (Listof CSS-Token) null]
[rest : (Listof CSS-Token) any-values])
(define-values (head tail) (css-car/cdr rest))
(cond [(or (not head) (css:semicolon? head)) (values (reverse seulav) tail)]
[(and (css:block=:=? head #\{) (css:@keyword? id)) (values (reverse (cons head seulav)) tail)]
[else (collect (cons head seulav) tail)])))
(cond [(not id) (reverse seitreporp)]
[(css:ident? id) (make-style-rule (css-cons (css-components->declaration id :values) seitreporp) rest)]
[else (make-style-rule (css-cons (make+exn:css:type:identifier (cons id :values)) seitreporp) rest)]))))
(define css-selector-combinator? : (-> CSS-Syntax-Any Boolean : #:+ (U CSS:WhiteSpace CSS:Delim))
(lambda [token]
(or (css:whitespace? token)
(and (css:delim=<-? token '(#\~ #\+ #\> #\tab))
#true))))
(define junctions->conditional-query : (-> (Listof CSS-Token) Symbol Boolean (U CSS-And CSS-Or))
(lambda [junctions op media?]
(define queries : (Listof CSS-Feature-Query)
(for/list ([junction (in-list (reverse junctions))])
(css-component->feature-query junction media?)))
(if (eq? op 'and) (CSS-And queries) (CSS-Or queries))))
| null | https://raw.githubusercontent.com/wargrey/w3s/2323ac18cdb3a04868485907fdc3b8e8220dd11c/css/digitama/syntax/parser.rkt | racket | -syntax/#parsing
-syntax/#parser-entry-points
-syntax/#parser-entry-points
-syntax/#parse-stylesheet
-syntax/#declaration-rule-list
-syntax/#parse-list-of-rules
-syntax/#declaration-rule-list
-syntax/#parse-rule
-syntax/#declaration
-syntax/#parse-declaration
-conditional/#at-ruledef-supports
-syntax/#parse-list-of-declarations
-syntax/#consume-a-list-of-declarations
)])
-syntax/#parse-component-value
-syntax/#parse-list-of-component-values
-syntax/#parse-comma-separated-list-of-component-values
/#media-types
/#mq-list
/#mq-syntax
/#typedef-media-query-list
/#error-handling
/#media-types
-conditional/#at-supports
/#structure
/#parse-selector
/#grouping
-syntax/#parse-stylesheet
-syntax/#declaration-rule-list
-syntax/#consume-list-of-rules
-syntax/#at-rule
-syntax/#consume-an-at-rule
-syntax/#qualified-rule
-syntax/#component-value
-syntax/#consume-a-component-value
-syntax/#consume-a-function
-syntax/#consume-simple-block
-syntax/#qualified-rule
-syntax/#consume-a-qualified-rule
-syntax/#consume-simple-block
-syntax/#consume-a-function
-values/#functional-notations
-values/#urls
whitespaces are meaningful in var()
-syntax/#consume-simple-block
-syntax/#consume-a-function
-syntax/#parse-list-of-component-values
-syntax/#parse-comma-separated-list-of-component-values
-syntax/#consume-declaration
-cascade/#importance
-values/#component-whitespace
-variables/#defining-variables
/#media-types
/#typedef-media-query
/#mq-only
/#mq-syntax
-syntax/#preserved-tokens
-conditional/#at-supports
/#mq-features
/#mq-syntax
/#mq-range-context
/#typedef-media-not
/#mq-features
/#mq-range-context
/#mq-features
/#mq-range-context
/#typedef-mf-value
/#typedef-ratio
/#mq-features
/#structure
/#parse-selector
/#grouping
/#structure
/#combinators
/#grammar
/#structure
/#grammar
-namespaces/#css-qnames
seems no ::element is of form function
/#structure
/#grammar
/#structure
/#elemental-selectors
-namespaces/#css-qnames
/#structure
/#elemental-selectors
/#pseudo-classes
/#attribute-selectors
/#attrnmsp
-namespaces/#css-qnames
WARNING: the namespace behavior for attributes is different from that for elements
| #lang typed/racket/base
(provide (all-defined-out))
(require racket/string)
(require racket/symbol)
(require "digicore.rkt")
(require "condition.rkt")
(require "variables.rkt")
(require "selector.rkt")
(require "stdin.rkt")
(require "misc.rkt")
(require (for-syntax racket/base))
(define-syntax (define-css-parser-entry stx)
(syntax-case stx [: lambda]
[(_ id #:-> ->T (lambda [/dev/cssin [args : T defval ...] ...] body ...))
(syntax/loc stx
(begin (define (css-parse [/dev/cssin : Input-Port] [args : T defval ...] ...) : ->T body ...)
(define (id [/dev/stdin : CSS-Stdin (current-input-port)] [args : T defval ...] ...) : ->T
(define /dev/cssin : Input-Port (css-open-input-port /dev/stdin))
(dynamic-wind (λ [] '(css-open-input-port has already enabled line counting))
(λ [] (css-parse /dev/cssin args ...))
(λ [] (close-input-port /dev/cssin))))))]))
(define-css-parser-entry css-parse-stylesheet #:-> (Listof CSS-Syntax-Rule)
(lambda [/dev/cssin]
(css-consume-stylesheet /dev/cssin)))
(define-css-parser-entry css-parse-rules #:-> (Listof CSS-Syntax-Rule)
(lambda [/dev/cssin]
(css-consume-rules /dev/cssin #false)))
(define-css-parser-entry css-parse-rule #:-> (U CSS-Syntax-Rule CSS-Syntax-Error)
(lambda [/dev/cssin]
(define stx (css-read-syntax/skip-whitespace /dev/cssin))
(define retval : (U CSS-Qualified-Rule CSS-@Rule CSS-Syntax-Error)
(cond [(eof-object? stx) (make+exn:css:empty #false)]
[(css:@keyword? stx) (css-consume-@rule /dev/cssin stx)]
[else (css-consume-qualified-rule /dev/cssin stx)]))
(define end (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(or (eof-object? end) (exn? retval)) retval]
[else (make+exn:css:overconsumption end)])))
(define-css-parser-entry css-parse-declaration #:-> (U CSS-Declaration CSS-Syntax-Error)
(lambda [/dev/cssin]
(define token (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(not (css:ident? token)) (make+exn:css:type:identifier (and (css-token? token) token))]
[else (let-values ([(components _) (css-consume-components /dev/cssin)])
(css-components->declaration token components))])))
(define-css-parser-entry css-parse-declarations #:-> (Listof (U CSS-Declaration CSS-@Rule))
(lambda [/dev/cssin]
(let consume-declaration+@rule ([mixed-list : (Listof (U CSS-Declaration CSS-@Rule)) null])
(define token (css-read-syntax /dev/cssin))
(cond [(eof-object? token) (reverse mixed-list)]
[(or (css:whitespace? token) (css:semicolon? token)) (consume-declaration+@rule mixed-list)]
[(css:@keyword? token) (consume-declaration+@rule (cons (css-consume-@rule /dev/cssin token) mixed-list))]
(define ?declaration : (U CSS-Declaration CSS-Syntax-Error)
(cond [(css:ident? token) (css-components->declaration token components)]
[else (make+exn:css:type:identifier token)]))
(consume-declaration+@rule (css-cons ?declaration mixed-list)))]))))
(define-css-parser-entry css-parse-component-value #:-> (U CSS-Token CSS-Syntax-Error)
(lambda [/dev/cssin]
(define token (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(eof-object? token) (make+exn:css:empty #false)]
[else (let ([retval (css-consume-component-value /dev/cssin token)])
(define end (css-read-syntax/skip-whitespace /dev/cssin))
(cond [(eof-object? end) retval]
[else (make+exn:css:overconsumption end)]))])))
(define-css-parser-entry css-parse-component-values #:-> (Listof CSS-Token)
(lambda [/dev/cssin]
(define-values (components _) (css-consume-components /dev/cssin))
components))
(define-css-parser-entry css-parse-component-valueses #:-> (Listof (Listof CSS-Token))
(lambda [/dev/cssin]
(css-consume-componentses /dev/cssin #:omit-comma? #false)))
(define-css-parser-entry css-parse-media-queries #:-> (Listof CSS-Media-Query)
(lambda [/dev/cssin [rulename : CSS-Syntax-Any #false]]
(for/list : (Listof CSS-Media-Query) ([entry (in-list (css-consume-componentses /dev/cssin #:omit-comma? #true))])
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(define-values (token tokens) (css-car entry))
(define-values (next rest) (css-car tokens))
(cond [(css:ident-norm=:=? token 'not)
(cond [(css:ident? next) (css-components->media-type+query next #false rest)]
[else (css-components->negation token tokens #true)])]
[(css:ident? token)
(define-values (?type ?<and>)
(cond [(css:ident-norm=:=? token 'only) (values next rest)]
[else (values token tokens)]))
(cond [(not ?type) (make+exn:css:malformed ?type)]
[(css:ident? ?type) (css-components->media-type+query ?type #true ?<and>)]
[else (make+exn:css:type:identifier ?type)])]
[else (css-components->feature-query entry #true rulename)])))))
(define-css-parser-entry css-parse-feature-query #:-> CSS-Feature-Query
(lambda [/dev/cssin [rulename : CSS-Syntax-Any #false]]
(define-values (conditions _) (css-consume-components /dev/cssin))
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(css-components->feature-query conditions #false rulename))))
(define-css-parser-entry css-parse-selectors #:-> (U (Listof+ CSS-Complex-Selector) CSS-Syntax-Error)
/#selector-list
(lambda [/dev/cssin]
(define-values (components _) (css-consume-components /dev/cssin))
(css-components->selectors components #false)))
(define css-consume-stylesheet : (-> Input-Port (Listof CSS-Syntax-Rule))
(lambda [css]
(define rules : (Listof CSS-Syntax-Rule) (css-consume-rules css #true))
(define rule : (Option CSS-Syntax-Rule) (and (pair? rules) (car rules)))
(if (and (css-@rule? rule) (css:@keyword-norm=:=? (css-@rule-name rule) '#:@charset))
(cdr rules)
rules)))
(define css-consume-rules : (-> Input-Port Boolean (Listof CSS-Syntax-Rule))
(lambda [css toplevel?]
(let consume-rules ([rules : (Listof CSS-Syntax-Rule) null])
(define token (css-read-syntax css))
(cond [(eof-object? token) (reverse rules)]
[(css:whitespace? token) (consume-rules rules)]
[(css:@keyword? token) (consume-rules (css-cons (css-consume-@rule css token) rules))]
[(css:cd? token) (consume-rules (if toplevel? rules (css-cons (css-consume-qualified-rule css token) rules)))]
[else (consume-rules (css-cons (css-consume-qualified-rule css token) rules))]))))
(define css-consume-@rule : (-> Input-Port CSS:@Keyword CSS-@Rule)
(lambda [css reconsumed-at-token]
(define-values (prelude ?block) (css-consume-rule-item css #:@rule? #true))
(css-@rule reconsumed-at-token prelude ?block)))
(define css-consume-qualified-rule : (-> Input-Port CSS-Token (U CSS-Qualified-Rule CSS-@Rule CSS-Syntax-Error))
(lambda [css reconsumed]
(define head (css-consume-component-value css reconsumed))
(define-values (prelude ?block) (css-consume-rule-item css #:@rule? #false))
(cond [(css:block? ?block) (css-qualified-rule (cons head prelude) ?block)]
[else (make+exn:css:missing-block (cons head prelude))])))
(define css-consume-component-value : (-> Input-Port CSS-Token CSS-Token)
(lambda [css reconsumed]
(cond [(css:delim? reconsumed)
(case (css:delim-datum reconsumed)
[(#\{) (css-consume-simple-block css reconsumed #\})]
[(#\[) (css-consume-simple-block css reconsumed #\])]
[(#\() (css-consume-simple-block css reconsumed #\))]
[else reconsumed])]
[(css:function? reconsumed) (css-consume-function css reconsumed)]
[else reconsumed])))
(struct css-@rule ([name : CSS:@Keyword] [prelude : (Listof CSS-Token)] [block : (Option CSS:Block)]) #:transparent #:type-name CSS-@Rule)
(struct css-qualified-rule ([prelude : (Listof+ CSS-Token)] [block : CSS:Block]) #:transparent #:type-name CSS-Qualified-Rule)
(define css-consume-rule-item : (-> Input-Port #:@rule? Boolean (Values (Listof CSS-Token) (Option CSS:Block)))
(lambda [css #:@rule? at-rule?]
(let consume-item ([prelude : (Listof CSS-Token) null]
[simple-block : (Option CSS:Block) #false])
(define token (css-read-syntax css))
(cond [(or (eof-object? token) (and at-rule? (css:semicolon? token)))
(when (eof-object? token) (make+exn:css:missing-delimiter prelude))
(values (reverse prelude) simple-block)]
[(css:delim=:=? token #\{) (values (reverse prelude) (css-consume-simple-block css token #\}))]
[(css:block=:=? token #\{) (values (reverse prelude) token)]
[else (consume-item (cons (css-consume-component-value css token) prelude) simple-block)]))))
(define css-consume-simple-block : (-> Input-Port CSS:Delim Char CSS:Block)
(lambda [css open close-char]
(define-values (components close end-token) (css-consume-block-body css open close-char))
(syn-remake-token [open end-token] css:block (css:delim-datum open) components #false)))
(define css-consume-function : (-> Input-Port CSS:Function (U CSS:Function CSS:URL))
(lambda [css func]
(define fname : Symbol (css:function-datum func))
(cond [(not (symbol-unreadable? fname)) func]
[else (let-values ([(components close end-token) (css-consume-block-body css func #\))]
[(fnorm) (css:function-norm func)])
(if (eq? fnorm 'url)
(let-values ([(href modifiers) (css-car components)])
(syn-remake-token func css:url
(if (css:string? href) (css:string-datum href) "")
(css-url-modifiers-filter func modifiers)
#false))
(let ([freadable (string->symbol (symbol->immutable-string fname))])
(syn-remake-token [func end-token] css:function freadable fnorm
[else (filter-not css:whitespace? components)])
#false))))])))
(define css-consume-block-body : (-> Input-Port CSS-Token Char (Values (Listof CSS-Token) CSS-Syntax-Terminal CSS-Token))
(lambda [css start-token close-char]
(let consume-body ([components : (Listof CSS-Token) null])
(define token (css-read-syntax css))
(cond [(css:close=:=? token close-char) (values (reverse components) token token)]
[(not (eof-object? token)) (consume-body (cons (css-consume-component-value css token) components))]
[else (let ([end-token (if (null? components) start-token (car components))])
(make+exn:css:missing-delimiter #false)
(values (reverse components) #false end-token))]))))
(define css-consume-components : (->* (Input-Port) ((Option Char) Boolean) (Values (Listof CSS-Token) CSS-Syntax-Terminal))
(lambda [css [terminating-char #false] [omit-terminate? #false]]
(let consume-component ([stnenopmoc : (Listof CSS-Token) null])
(define token (css-read-syntax css))
(cond [(eof-object? token) (values (reverse stnenopmoc) #false)]
[(and terminating-char (css:delim=:=? token terminating-char))
(define next (css-peek-syntax/skip-whitespace css))
(cond [(and omit-terminate? (css-null? stnenopmoc))
(cond [(and (eof-object? next) (css-read-syntax/skip-whitespace css))
(make+exn:css:overconsumption token)
(values (reverse stnenopmoc) #false)]
[else (make+exn:css:empty token)
(css-consume-components css terminating-char omit-terminate?)])]
[(eof-object? next)
(css-read-syntax/skip-whitespace css)
(values (reverse stnenopmoc) #false)]
[else (values (reverse stnenopmoc) token)])]
[else (consume-component (cons (css-consume-component-value css token) stnenopmoc))]))))
(define css-consume-componentses : (-> Input-Port [#:omit-comma? Boolean] (Listof (Listof CSS-Token)))
(lambda [css #:omit-comma? [omit-comma? #true]]
(let consume-components ([componentses : (Listof (Listof CSS-Token)) null])
(define-values (components terminating-token) (css-consume-components css #\, omit-comma?))
(cond [(or terminating-token) (consume-components (cons components componentses))]
[(not omit-comma?) (reverse (cons components componentses))]
[else (filter (inst css-pair? CSS-Token) (reverse (cons components componentses)))]))))
(define css-components->declaration : (-> CSS:Ident (Listof CSS-Token) (U CSS-Declaration CSS-Syntax-Error))
-syntax/#typedef-declaration-value
(lambda [id-token components]
(define-values (?: value-list) (css-car components))
(cond [(not (css:colon? ?:)) (make+exn:css:missing-colon id-token)]
[else (let ([var? (and (css:ident=<-? id-token symbol-unreadable?) #true)])
(define-values (?values important? lazy?) (css-any->declaration-value id-token value-list var?))
(if (exn? ?values) ?values (css-declaration id-token ?values important? lazy?)))])))
(define css-components->media-type+query : (-> CSS:Ident Boolean (Listof CSS-Token) CSS-Media-Query)
(lambda [media only? conditions]
(define downcased-type : Symbol (css:ident-norm media))
(define-values (?and ?conditions) (css-car conditions))
(when (css-deprecate-media-type) (make+exn:css:deprecated media))
(cond [(memq downcased-type '(only not and or)) (make+exn:css:misplaced media)]
[(not ?and) (if only? (box downcased-type) (CSS-Not (box downcased-type)))]
[(not (css:ident-norm=:=? ?and 'and)) (make+exn:css:unrecognized ?and)]
[(css-null? ?conditions) (make+exn:css:missing-feature ?and)]
[else (cons (if only? (box downcased-type) (CSS-Not (box downcased-type)))
(css-components->junction ?conditions 'and #false #true))])))
(define css-components->feature-query : (-> (Listof CSS-Token) Boolean CSS-Syntax-Any CSS-Feature-Query)
(lambda [conditions media? alt]
(define-values (token rest) (css-car conditions))
(define-values (op chain) (css-car rest))
(cond [(not token) (throw-exn:css:missing-feature alt)]
[(css:ident-norm=:=? token 'not) (css-components->negation token rest media?)]
[(not op) (css-component->feature-query token media?)]
[(css:ident-norm=<-? op '(and or)) (css-components->junction chain (css:ident-norm op) token media?)]
[else (throw-exn:css:unrecognized op)])))
(define css-component->feature-query : (-> CSS-Token Boolean CSS-Feature-Query)
/#mq-boolean-context
(lambda [condition media?]
(cond [(css:block=:=? condition #\()
(define subany : (Listof CSS-Token) (css:block-components condition))
(define-values (name any-values) (css-car subany))
(define-values (op value-list) (css-car any-values))
(cond [(css:block=:=? name #\() (css-components->feature-query subany media? condition)]
[(css:ident-norm=:=? name 'not) (css-components->negation name any-values media?)]
[(and (css:ident? name) (css:colon? op))
(define descriptor (css-components->declaration name any-values))
(cond [(exn? descriptor) (if media? (throw-exn:css:enclosed condition) (raise descriptor))]
[(and media?) (css-declaration->media-query descriptor condition)]
[else descriptor])]
[(and media?)
(cond [(and (css:ident? name) (not op)) (css-make-media-feature name #false #\? #false)]
[else (css-components->media-range-query subany condition)])]
[(not name) (throw-exn:css:empty condition)]
[(css:ident? name) (throw-exn:css:missing-colon condition)]
[(css:function? condition) (throw-exn:css:enclosed condition)]
[else (throw-exn:css:type:identifier condition)])]
[else (throw-exn:css:missing-feature condition)])))
(define css-components->negation : (-> CSS:Ident (Listof CSS-Token) Boolean CSS-Not)
(lambda [<not> tokens media?]
(define-values (token rest) (css-car tokens))
(cond [(not token) (throw-exn:css:missing-feature <not>)]
[(css:ident-norm=:=? token 'not) (throw-exn:css:misplaced token)]
[(css-null? rest) (CSS-Not (css-component->feature-query token media?))]
[else (throw-exn:css:overconsumption rest)])))
(define css-components->junction : (-> (Listof CSS-Token) Symbol (Option CSS-Token) Boolean (U CSS-And CSS-Or))
/#typedef-media-and
/#typedef-media-or
(lambda [conditions op ?head media?]
(let components->junction ([junctions : (Listof CSS-Token) (if (false? ?head) null (list ?head))]
[--conditions : (Listof CSS-Token) conditions])
(define-values (condition rest) (css-car --conditions))
(define-values (token others) (css-car rest))
(cond [(not condition) (junctions->conditional-query junctions op media?)]
[(css:ident-norm=:=? condition 'not) (throw-exn:css:misplaced condition)]
[(or (not token) (css:ident-norm=:=? token op)) (components->junction (cons condition junctions) others)]
[(css:ident-norm=<-? token '(and or)) (throw-exn:css:misplaced token)]
[else (throw-exn:css:overconsumption token)]))))
(define css-components->media-range-query : (-> (Listof CSS-Token) CSS:Block CSS-Feature-Query)
(lambda [components broken-condition]
(define-values (value0 rest0) (css-car-media-value components))
(define-values (d0 op0 po0 rest1) (css-car-comparison-operator rest0))
(define-values (value1 rest2) (css-car-media-value rest1))
(define-values (d1 op1 po1 rest3) (css-car-comparison-operator rest2))
(define-values (value2 terminal) (css-car-media-value rest3))
(cond [(not value0) (throw-exn:css:empty broken-condition)]
[(not d0) (throw-exn:css:missing-delimiter components)]
[(not value1) (throw-exn:css:missing-value rest0)]
[(and (css:ident? value0) (css:delim? d1)) (throw-exn:css:enclosed broken-condition)]
[(and (eq? op0 #\=) (css:delim? d1)) (throw-exn:css:overconsumption broken-condition)]
[(css:ident? value0) (css-make-media-feature value0 value1 op0 d0)]
[(and (not d1) (css:ident? value1)) (css-make-media-feature value1 value0 po0 d0)]
[(not (css:ident? value1)) (throw-exn:css:type:identifier value1)]
[(or (not value2) (css:ident? value2)) (throw-exn:css:missing-value rest2)]
[(css-pair? terminal) (throw-exn:css:overconsumption terminal)]
[(not (eq? (css:delim-datum d0) (css:delim-datum d1))) (throw-exn:css:malformed (list d0 value1 d1))]
[else (CSS-And (list (css-make-media-feature value1 value0 po0 d0)
(css-make-media-feature value1 value2 op1 d1)))])))
(define css-declaration->media-query : (-> CSS-Declaration CSS:Block CSS-Feature-Query)
(lambda [property broken-condition]
(define-values (media-value rest) (css-car-media-value (css-declaration-values property)))
(cond [(not media-value) (throw-exn:css:enclosed broken-condition)]
[(css-pair? rest) (throw-exn:css:enclosed broken-condition)]
[else (css-make-media-feature (css-declaration-name property) media-value #\: #false)])))
(define css-car-comparison-operator : (-> (Listof CSS-Token) (Values (Option CSS:Delim) Char Char (Listof CSS-Token)))
(lambda [components]
(define-values (d rest) (css-car components))
(define-values (?= terminal) (css-car/cdr rest))
(cond [(not d) (values #false #\≠ #\≠ rest)]
[(not (css:delim? d)) (throw-exn:css:type d)]
[else (case (css:delim-datum d)
[(#\=) (values d #\= #\= rest)]
[(#\>) (if (css:delim=:=? ?= #\=) (values d #\≥ #\≤ terminal) (values d #\> #\< rest))]
[(#\<) (if (css:delim=:=? ?= #\=) (values d #\≤ #\≥ terminal) (values d #\< #\> rest))]
[else (throw-exn:css:range d)])])))
(define css-car-media-value : (-> (Listof CSS-Token) (Values (Option CSS-Media-Value) (Listof CSS-Token)))
(lambda [components]
(define-values (value rest) (css-car components))
(define-values (?/ ?rest) (css-car rest))
(define-values (?int terminal) (css-car ?rest))
(cond [(not value) (values #false rest)]
[(css:slash? ?/)
(define width : (Option Positive-Integer) (css:integer=<-? value exact-positive-integer?))
(define height : (Option Positive-Integer) (css:integer=<-? ?int exact-positive-integer?))
(values (cond [(and width height (css-token? ?int)) (syn-remake-token [value ?int] css:ratio (/ width height))]
[(css-number? value) (throw-exn:css:range value)]
[(css-number? ?int) (throw-exn:css:range ?int)]
[else (throw-exn:css:type (filter css-token? (list value ?/ ?int)))])
terminal)]
[(or (css:ident? value) (css-numeric? value)) (values value rest)]
[else (values (throw-exn:css:type value) rest)])))
(define css-make-media-feature : (-> CSS:Ident (Option CSS-Media-Value) Char (Option CSS:Delim) (U Symbol CSS-Media-Feature-Query))
(lambda [desc-name ?value ophint ?op]
(define errobj : (Listof CSS-Token) (filter css-token? (list desc-name ?op ?value)))
(define name : String (symbol->immutable-string (css:ident-norm desc-name)))
(define-values (downcased-name op min/max?)
(cond [(string-prefix? name "min-") (values (string->symbol (substring name 4)) #\≥ #true)]
[(string-prefix? name "max-") (values (string->symbol (substring name 4)) #\≤ #true)]
[else (values (string->symbol name) ophint #false)]))
(when (and min/max?)
(cond [(or (not ?value) (css:delim? ?op)) (throw-exn:css:misplaced errobj)]
[(not (css-numeric? ?value)) (throw-exn:css:type errobj)]))
(define feature-filter : (U Void (CSS:Filter CSS-Media-Datum))
((default-css-media-feature-filters) downcased-name min/max? (λ [] (void (make+exn:css:deprecated desc-name)))))
(cond [(void? feature-filter) (throw-exn:css:unrecognized errobj)]
[(false? ?value) downcased-name]
[else (let ([datum (feature-filter ?value)])
(cond [(false? datum) (throw-exn:css:type ?value desc-name)]
[(exn:css? datum) (css-log-syntax-error datum desc-name) (raise datum)]
[else (vector downcased-name op datum)]))])))
(define css-components->selectors : (-> (Listof CSS-Token) CSS-Namespace-Hint (U (Listof+ CSS-Complex-Selector) CSS-Syntax-Error))
/#selector-list
(lambda [components namespaces]
(with-handlers ([exn:css? (λ [[errcss : exn:css]] errcss)])
(define-values (head-complex-selector ?eof ?rest) (css-car-complex-selector components namespaces))
(let extract-complex-selector ([srotceles : (Listof CSS-Complex-Selector) null]
[terminal : (Option CSS:Delim) ?eof]
[rest : (Listof CSS-Token) ?rest])
(if (css-null? rest)
(cond [(not terminal) (cons head-complex-selector (reverse srotceles))]
[else (throw-exn:css:overconsumption terminal)])
(let-values ([(complex-selector ?terminal ?rest) (css-car-complex-selector rest namespaces)])
(extract-complex-selector (cons complex-selector srotceles) ?terminal ?rest)))))))
(define css-car-complex-selector : (-> (Listof CSS-Token) CSS-Namespace-Hint
(Values CSS-Complex-Selector (Option CSS:Delim) (Listof CSS-Token)))
(lambda [components namespaces]
(define-values (head-compound-selector rest) (css-car-compound-selector components #false namespaces))
(let extract-selector ([srotceles : (Listof+ CSS-Compound-Selector) (list head-compound-selector)]
[tokens : (Listof CSS-Token) rest])
(define-values (?terminal rest) (css-car tokens))
(define-values (token ?selectors) (css-car/cdr tokens))
(cond [(or (not ?terminal) (css:comma? ?terminal)) (values srotceles ?terminal rest)]
[(not (css-selector-combinator? token)) (throw-exn:css:unrecognized ?terminal)]
[else (let*-values ([(combinator ?selectors) (css-car-combinator token ?selectors)]
[(?selector ?rest) (css-car ?selectors)])
(cond [(or (not ?selector) (css:comma? ?selector)) (throw-exn:css:overconsumption ?selectors)]
[else (let-values ([(selector rest) (css-car-compound-selector ?selectors combinator namespaces)])
(extract-selector (cons selector srotceles) rest))]))]))))
(define css-car-compound-selector : (-> (Listof CSS-Token) (Option CSS-Selector-Combinator) CSS-Namespace-Hint
(Values CSS-Compound-Selector (Listof CSS-Token)))
-drafts/issues/202
(lambda [components combinator namespaces]
(define-values (head heads) (css-car components))
(define-values (typename namespace simple-selector-components)
(cond [(css:ident? head) (css-car-elemental-selector head heads namespaces)]
[(css:delim=<-? head '(#\| #\*)) (css-car-elemental-selector head heads namespaces)]
[(or (not head) (css:comma? head)) (throw-exn:css:empty head)]
[else (values #true (or (css-declared-namespace namespaces '||) #true) (cons head heads))]))
(define-values (:classes :children selector-components) (css-car-:class-selectors simple-selector-components))
(let extract-simple-selector ([sessalc : (Listof Symbol) null]
[sdi : (Listof Keyword) null]
[setubirtta : (Listof CSS-Attribute-Selector) null]
[pseudo-element : (Option CSS-::Element-Selector) #false]
[selector-tokens : (Listof CSS-Token) selector-components])
(define-values (token tokens) (css-car/cdr selector-tokens))
(cond [(or (not token) (css:comma? token) (css-selector-combinator? token))
(values (CSS-Compound-Selector combinator namespace typename (reverse sdi) (reverse sessalc)
(reverse setubirtta) :classes :children pseudo-element)
selector-tokens)]
[(and pseudo-element) (throw-exn:css:overconsumption token)]
[(css:delim=:=? token #\.)
(define-values (next rest) (css-car/cdr tokens))
(cond [(not (css:ident? next)) (throw-exn:css:type:identifier next)]
[else (extract-simple-selector (cons (css:ident-datum next) sessalc) sdi setubirtta pseudo-element rest)])]
[(css:colon? token)
(define-values (?pseudo-classes ?pseudo-children ?rest) (css-car-:class-selectors tokens))
(define-values (next rest) (css-car/cdr ?rest))
(cond [(null? ?pseudo-classes) (throw-exn:css:misplaced (list token (car tokens)))]
[(pair? ?pseudo-children) (throw-exn:css:malformed token)]
[else (let ([pclass (car ?pseudo-classes)])
(define pelement : CSS-::Element-Selector
(CSS-::Element-Selector (css-:class-selector-name pclass)
(cdr ?pseudo-classes)))
(extract-simple-selector sessalc sdi setubirtta pelement ?rest))])]
[(css:block=:=? token #\[)
(define attribute-selector : CSS-Attribute-Selector (css-simple-block->attribute-selector token namespaces))
(extract-simple-selector sessalc sdi (cons attribute-selector setubirtta) pseudo-element tokens)]
[(css:hash? token) (extract-simple-selector sessalc (cons (css:hash-datum token) sdi) setubirtta pseudo-element tokens)]
[else (throw-exn:css:unrecognized token)]))))
(define css-car-combinator : (-> (U CSS:WhiteSpace CSS:Delim) (Listof CSS-Token) (Values CSS-Selector-Combinator (Listof CSS-Token)))
(lambda [token tokens]
(case (cond [(css:whitespace? token) #\space] [(css:delim? token) (css:delim-datum token)] [else #\null])
[(#\space)
(define-values (next tail) (css-car tokens))
(cond [(css-selector-combinator? next) (css-car-combinator next tail)]
[else (values '>> tokens)])]
[(#\>)
(define-values (next tail) (css-car/cdr tokens))
(define-values (next2 tail2) (css-car tail))
(cond [(css:delim=:=? next #\>) (values '>> tail2)]
[else (values '> tail)])]
[(#\+) (values '+ tokens)]
[(#\~) (values '~ tokens)]
[(#\tab) (values '|| tokens)]
[else (throw-exn:css:unrecognized token)])))
(define css-car-elemental-selector : (-> (U CSS:Ident CSS:Delim) (Listof CSS-Token) CSS-Namespace-Hint
(Values (U Symbol True) (U Symbol Boolean) (Listof CSS-Token)))
(lambda [token tokens namespaces]
(define-values (next rest next2 rest2) (css-car/cadr tokens))
(cond [(css:vbar? token)
(cond [(css:ident? next) (values (css:ident-datum next) #false rest)]
[(css:delim=:=? next #\*) (values #true #false rest)]
[else (throw-exn:css:type:identifier next)])]
[(css:vbar? next)
(define ns : (U Symbol Boolean) (css-declared-namespace namespaces token))
(cond [(false? ns) (throw-exn:css:namespace token)]
[(css:ident? next2) (values (css:ident-datum next2) ns rest2)]
[(css:delim=:=? next2 #\*) (values #true ns rest2)]
[else (throw-exn:css:type:identifier (list token next))])]
[else (let ([ns (or (css-declared-namespace namespaces '||) #true)])
(cond [(css:delim? token) (values #true ns tokens)]
[else (values (css:ident-datum token) ns tokens)]))])))
(define css-car-:class-selectors : (-> (Listof CSS-Token) (Values (Listof CSS-:Class-Selector) (Listof CSS-:Child-Selector) (Listof CSS-Token)))
(lambda [components]
(let extract-:class-selector ([srotceles : (Listof CSS-:Class-Selector) null]
[nerdlihc : (Listof CSS-:Child-Selector) null]
[tokens : (Listof CSS-Token) components])
(define-values (maybe: rest ?id rest2) (css-car/cadr tokens))
(cond [(or (not (css:colon? maybe:)) (css:colon? ?id)) (values (reverse srotceles) (reverse nerdlihc) tokens)]
[(css:ident? ?id)
(let ([name (css:ident-datum ?id)])
(case name
[(first-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name (css-An+B-predicate 0 1 #false)) nerdlihc) rest2)]
[(last-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name (css-An+B-predicate 0 1 #true)) nerdlihc) rest2)]
[(only-child) (extract-:class-selector srotceles (cons (CSS-:Child-Selector name :only-child) nerdlihc) rest2)]
[else (extract-:class-selector (cons (CSS-:Class-Selector name) srotceles) nerdlihc rest2)]))]
[(css:function=<-? ?id '(nth-child nth-of-type nth-col))
(let ([A.B (css-extract-An+B (css:function-arguments ?id))])
(cond [(not A.B) (throw-exn:css:type:An+B ?id)]
[else (let* ([predicate (css-An+B-predicate (car A.B) (cdr A.B) #false)]
[selector (CSS-:Child-Selector (css:function-norm ?id) predicate)])
(extract-:class-selector srotceles (cons selector nerdlihc) rest2))]))]
[(css:function=<-? ?id '(nth-last-child nth-of-last-type nth-last-col))
(let ([A.B (css-extract-An+B (css:function-arguments ?id))])
(cond [(not A.B) (throw-exn:css:type:An+B ?id)]
[else (let* ([predicate (css-An+B-predicate (car A.B) (cdr A.B) #true)]
[selector (CSS-:Child-Selector (css:function-norm ?id) predicate)])
(extract-:class-selector srotceles (cons selector nerdlihc) rest2))]))]
[(css:function? ?id)
(let ([selector (CSS-:Function-Selector (css:function-norm ?id) (css:function-arguments ?id))])
(extract-:class-selector (cons selector srotceles) nerdlihc rest2))]
[else (throw-exn:css:type:identifier maybe:)]))))
(define css-simple-block->attribute-selector : (-> CSS:Block CSS-Namespace-Hint CSS-Attribute-Selector)
/#attribute-case
(lambda [block namespaces]
(define-values (1st rest1) (css-car (css:block-components block)))
(define-values (2nd rest2 3rd rest3) (css-car/cadr rest1))
(define-values (attrname quirkname namespace op-part)
(cond [(not 1st) (throw-exn:css:empty block)]
[(or (css:match? 1st) (css:delim=:=? 1st #\=))
(throw-exn:css:type:identifier block)]
[(or (not 2nd) (css:match? 2nd) (css:delim=:=? 2nd #\=) (css:whitespace? 2nd))
(cond [(css:ident? 1st) (values (css:ident-datum 1st) (css:ident-norm 1st) #false rest1)]
[else (throw-exn:css:type:identifier 1st)])]
[(or (not 3rd) (css:match? 3rd) (css:delim=:=? 3rd #\=) (css:whitespace? 3rd))
(cond [(and (css:vbar? 1st) (css:ident? 2nd)) (values (css:ident-datum 2nd) (css:ident-norm 2nd) #false rest2)]
[(css:vbar? 2nd) (throw-exn:css:type:identifier 2nd)]
[else (throw-exn:css:unrecognized 1st)])]
[(and (or (css:ident? 1st) (css:delim=:=? 1st #\*)) (css:vbar? 2nd) (css:ident? 3rd))
(define ns (css-declared-namespace namespaces 1st))
(cond [(false? ns) (throw-exn:css:namespace 1st)]
[else (values (css:ident-datum 3rd) (css:ident-norm 3rd) ns rest3)])]
[(and (or (css:ident? 1st) (css:delim=:=? 1st #\*)) (css:vbar? 2nd))
(throw-exn:css:type:identifier 3rd)]
[(or (css:ident? 1st) (css:delim=:=? 1st #\*))
(throw-exn:css:unrecognized 2nd)]
[else (throw-exn:css:unrecognized 1st)]))
(define-values (op value-part value ci-part) (css-car/cadr op-part))
(define-values (i terminal) (css-car ci-part))
(unless (not op)
(cond [(not value) (throw-exn:css:missing-value op)]
[(nor (not i) (css:ident-norm=:=? i 'i)) (throw-exn:css:overconsumption i)]
[(css-pair? terminal) (throw-exn:css:overconsumption terminal)]))
(define val : (U String Symbol)
(cond [(css:string? value) (css:string-datum value)]
[(css:ident? value) (css:ident-datum value)]
[(or (css:whitespace? value) (not value)) ""]
[else (throw-exn:css:type value)]))
(cond [(or (css:whitespace? op) (not op)) (CSS-Attribute-Selector attrname quirkname namespace)]
[(css:delim=:=? op #\=) (CSS-Attribute~Selector attrname quirkname namespace #\= val (css:ident? i))]
[(css:match? op) (CSS-Attribute~Selector attrname quirkname namespace (css:match-datum op) val (css:ident? i))]
[else (throw-exn:css:unrecognized op)])))
(define-type CSS-Syntax-Terminal (U CSS:Delim CSS:Close False))
(define-type CSS-Syntax-Rule (U CSS-Qualified-Rule CSS-@Rule))
(define-type CSS-Media-Value (U CSS-Numeric CSS:Ident CSS:Ratio))
(define css-components->declarations : (-> (Listof CSS-Token) (Listof CSS-Declaration))
(lambda [components]
(let make-style-rule ([seitreporp : (Listof CSS-Declaration) null] [tokens : (Listof CSS-Token) components])
(define-values (id any-values) (css-car tokens))
(define-values (:values rest)
(let collect : (Values (Listof CSS-Token) (Listof CSS-Token)) ([seulav : (Listof CSS-Token) null]
[rest : (Listof CSS-Token) any-values])
(define-values (head tail) (css-car/cdr rest))
(cond [(or (not head) (css:semicolon? head)) (values (reverse seulav) tail)]
[(and (css:block=:=? head #\{) (css:@keyword? id)) (values (reverse (cons head seulav)) tail)]
[else (collect (cons head seulav) tail)])))
(cond [(not id) (reverse seitreporp)]
[(css:ident? id) (make-style-rule (css-cons (css-components->declaration id :values) seitreporp) rest)]
[else (make-style-rule (css-cons (make+exn:css:type:identifier (cons id :values)) seitreporp) rest)]))))
(define css-selector-combinator? : (-> CSS-Syntax-Any Boolean : #:+ (U CSS:WhiteSpace CSS:Delim))
(lambda [token]
(or (css:whitespace? token)
(and (css:delim=<-? token '(#\~ #\+ #\> #\tab))
#true))))
(define junctions->conditional-query : (-> (Listof CSS-Token) Symbol Boolean (U CSS-And CSS-Or))
(lambda [junctions op media?]
(define queries : (Listof CSS-Feature-Query)
(for/list ([junction (in-list (reverse junctions))])
(css-component->feature-query junction media?)))
(if (eq? op 'and) (CSS-And queries) (CSS-Or queries))))
|
a603475df74f7453713c67ec821dfb0ea4059a7eba4395d15bcb4235c8ad882e | rabbitmq/khepri | khepri_path.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright © 2021 - 2023 VMware , Inc. or its affiliates . All rights reserved .
%%
@doc Khepri path API .
%%
A path is the type used by Khepri to reference nodes in the tree structure .
%% A path describes how to reach a node from the root node.
%%
%% A path, or <em>native path</em>, is a list of components. Components can be
Erlang atoms and binaries . Example :
%%
%% ```
% % Native path .
%% Path = [stock, wood, <<"oak">>].
%% '''
%%
%% A path may contain conditions to tune how a node is matched or to match
%% multiple nodes at once. This is called a <em>path pattern</em>. A path
pattern may contain conditions in addition to regular components ( Erlang
%% atoms and binaries). See {@link khepri_condition} to learn more about
%% conditions. Example:
%%
%% ```
%% %% Path pattern with a condition on `wood'.
%% PathPattern = [stock,
%% #if_all{conditions = [wood,
%% #if_node_exists{exists = true}]},
%% oak].
%% '''
%%
%% To be user-friendly, string-based and binary-based <em>Unix-like paths</em>
%% are accepted by most functions. The syntax of these <em>Unix paths</em> is
%% described in the {@link unix_path()} type documentation. Example:
%%
%% ```
% % Unix path , equivalent of the first native path example .
%% UnixPath = "/:stock/:wood/oak".
%% '''
-module(khepri_path).
-include_lib("stdlib/include/assert.hrl").
-include("include/khepri.hrl").
-include("src/khepri_error.hrl").
-export([compile/1,
from_string/1,
from_binary/1,
to_string/1,
to_binary/1,
sigil_p/2,
sigil_P/2,
combine_with_conditions/2,
targets_specific_node/1,
component_targets_specific_node/1,
is_valid/1,
ensure_is_valid/1,
abspath/2,
realpath/1,
pattern_includes_root_node/1]).
-ifdef(TEST).
-export([component_to_string/1]).
-endif.
-type node_id() :: atom() | binary().
%% A node name.
-type component() :: node_id() |
?KHEPRI_ROOT_NODE |
?THIS_KHEPRI_NODE |
?PARENT_KHEPRI_NODE.
%% Component name in a path to a node.
-type native_path() :: [component()].
Native path to a node .
%%
%% A native path is a list of atoms, binaries and special components.
%%
It is called < em > native</em > because it requires no further processing
%% (unlike {@link unix_path()}) and is the format used internally by the state
%% machine.
%%
%% Special components are:
%% <ol>
%% <li>`?KHEPRI_ROOT_NODE' to explicitly mark the root node. A path is absolute
by default . Using ` ? ' is only useful when manipulating the
%% root node itself (querying it or storing something in the root node).</li>
%% <li>`?THIS_KHEPRI_NODE' to make a relative path (the default being an
%% absolute path). This is mostly useful for {@link
%% khepri_condition:keep_while()} to make it easy to put a condition on the
node itself.</li >
%% <li>`?PARENT_KHEPRI_NODE' to target the parent of a node, with the same
%% benefits and use cases as `?THIS_KHEPRI_NODE'.</li>
%% </ol>
%%
%% Example:
%%
%% ```
% % Native path .
%% Path = [stock, wood, <<"oak">>].
%% '''
-type native_pattern() :: [pattern_component()].
Path pattern which may match zero , one or more nodes .
%%
%% A native pattern is a list of atoms, binaries, special components and
%% conditions.
%%
It is called < em > native</em > because it requires no further processing
%% (unlike {@link unix_pattern()}) and is the format used internally by the
%% state machine.
%%
%% See {@link native_path()} for a description of special components.
%%
%% Conditions are any condition defined by {@link
%% khepri_condition:condition()}.
%%
%% Example:
%%
%% ```
%% %% Path pattern with a condition on `wood'.
%% PathPattern = [stock,
%% #if_all{conditions = [wood,
%% #if_node_exists{exists = true}]},
%% oak].
%% '''
-type unix_path() :: string() | binary().
%% Unix-like path to a node.
%%
%% These <em>Unix paths</em> have the following syntax:
%%
%% <ul>
%% <li>Path components are separated by a forward slash, `/'.</li>
%% <li>Atom-based node IDs are prefixed with a `:' character: `:wood'.</li>
%% <li>Binary-based node IDs are written as-is: `oak'.</li>
%% <li>Atom and binaries can be percent-encoded.</li>
%% <li>An absolute path must start with `/', otherwise it is considered a
relative path</li >
%% <li>`.' and `..' represent `?THIS_KHEPRI_NODE' and `?PARENT_KHEPRI_NODE'
%% respectively</li>
%% <li>Simple glob patterns are accepted:
%% <ul>
< li>`abc*def ' is the same as ` # if_name_matches{regex = " ^abc.*def$"}'</li >
%% <li>`*' is the same as `?KHEPRI_WILDCARD_STAR' or `#if_name_matches{regex =
any}'</li >
%% <li>`**' is the same as `?KHEPRI_WILDCARD_STAR_STAR' or
%% `if_path_matches{regex = any}'</li>
%% </ul></li>
%% </ul>
%%
< strong > Warning</strong > : There is no special handling of Unicode in tree
node names . To use Unicode , it is recommended to either use a native path or
%% a binary-based Unix-like path. If using a string-based Unix-like path, the
%% behavior is undefined and the call may crash. Matching against node names is
%% also undefined behavior and may crash, regardless of the type of path being
%% used. It will be improved in the future.
%%
%% Example:
%% ```
% % Unix path , equivalent of the first native path example .
%% UnixPath = "/:stock/:wood/oak".
%% '''
-type unix_pattern() :: string() | binary().
%% Unix-like path pattern to a node.
%%
%% It accepts the following special characters:
%% <ol>
%% <li>`*' anywhere in a path component behaves like a {@link
khepri_condition : if_name_matches()}.</li >
%% <li>`**' as a path component behaves like a {@link
%% khepri_condition:if_path_matches()}.</li>
%% </ol>
%%
%% A Unix-like path pattern can't express all the conditions of a native path
%% pattern currently.
%%
%% Otherwise it works as a {@link unix_path()} and has the same syntax and
%% limitations.
%%
%% Example:
%% ```
%% %% Unix path pattern, matching multiple types of oak.
%% UnixPathPattern = "/:stock/:wood/*oak".
%% '''
-type path() :: native_path() | unix_path().
%% Path to a node.
-type pattern() :: native_pattern() | unix_pattern().
Path pattern which may match zero , one or more nodes .
-type pattern_component() :: component() | khepri_condition:condition().
Path pattern component which may match zero , one or more nodes .
-export_type([path/0,
native_path/0,
unix_path/0,
pattern/0,
native_pattern/0,
unix_pattern/0,
component/0,
pattern_component/0,
node_id/0]).
-define(
reject_invalid_path(Path),
?khepri_misuse(invalid_path, #{path => Path})).
-define(
reject_invalid_path(Path, Component),
?khepri_misuse(invalid_path, #{path => Path,
component => Component})).
-spec compile(PathPattern) -> PathPattern when
PathPattern :: native_pattern().
@private
compile(PathPattern) ->
lists:map(fun khepri_condition:compile/1, PathPattern).
-spec from_string(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
%% @doc Converts a Unix-like path to a native path.
%%
The Unix - like string can be either an Erlang string or an Erlang binary .
%%
%% For convenience, a native path is also accepted and returned as-is.
from_string([$/, $/ | MaybeString]) ->
The path starts with two forward slashes . Therefore the path starts
%% with an empty binary.
from_string([$/ | MaybeString], [<<>>, ?KHEPRI_ROOT_NODE]);
from_string([$/ | MaybeString]) ->
from_string(MaybeString, [?KHEPRI_ROOT_NODE]);
from_string(MaybeString) when is_list(MaybeString) ->
from_string(MaybeString, []);
from_string(Binary) when is_binary(Binary) ->
String = erlang:binary_to_list(Binary),
from_string(String);
from_string(NotPath) ->
?reject_invalid_path(NotPath).
-spec from_binary(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
%% @doc Converts a Unix-like path to a native path.
%%
%% This is the same as calling `from_string(String)'. Therefore, it accepts
Erlang strings or binaries and native paths .
%%
%% @see from_string/1.
from_binary(MaybeString) ->
from_string(MaybeString).
-spec sigil_p(PathPattern, Options) -> NativePathPattern when
PathPattern :: pattern(),
Options :: [char()],
NativePathPattern :: native_pattern().
%% @doc Elixir sigil to parse Unix-like path using the `~p"/:path/:to/node"'
%% syntax.
%%
%% The lowercase `~p' sigil means that the string will go through
interpolation first before this function is called .
%%
%% @see sigil_P/2.
%%
@private
sigil_p(PathPattern, _Options) ->
from_string(PathPattern).
-spec sigil_P(PathPattern, Options) -> NativePathPattern when
PathPattern :: pattern(),
Options :: [char()],
NativePathPattern :: native_pattern().
%% @doc Elixir sigil to parse Unix-like path using the `~P"/:path/:to/node"'
%% syntax.
%%
%% The uppercase `~P' sigil means that the string will NOT go through
interpolation first before this function is called .
%%
%% @see sigil_p/2.
%%
@private
sigil_P(PathPattern, _Options) ->
from_string(PathPattern).
from_string([Component | _] = Rest, ReversedPath)
when ?IS_KHEPRI_NODE_ID(Component) orelse
?IS_KHEPRI_CONDITION(Component) ->
finalize_path(Rest, ReversedPath);
from_string([Char, Component | _] = Rest, ReversedPath)
when ?IS_SPECIAL_KHEPRI_PATH_COMPONENT(Char) andalso
(?IS_KHEPRI_NODE_ID(Component) orelse
?IS_KHEPRI_CONDITION(Component)) ->
finalize_path(Rest, ReversedPath);
from_string([?PARENT_KHEPRI_NODE, $/ | _] = Rest, ReversedPath) ->
%% If the character used to represent the parent node in a regular path
%% (`^') appears alone in a path component, it's a regular path. Other
%% special path components may appear alone in both forms though.
finalize_path(Rest, ReversedPath);
from_string([Char] = Rest, [] = ReversedPath)
when ?IS_SPECIAL_KHEPRI_PATH_COMPONENT(Char) ->
finalize_path(Rest, ReversedPath);
from_string([$/, $/ | Rest], ReversedPath) ->
Two consecutive forward slashes mean there is an empty binary
%% component.
ReversedPath1 = prepend_component(<<>>, ReversedPath),
from_string([$/ | Rest], ReversedPath1);
from_string([$/ | Rest], ReversedPath) ->
from_string(Rest, ReversedPath);
from_string([$: | Rest], ReversedPath) ->
parse_atom_from_string(Rest, ReversedPath);
from_string([Char | _] = Rest, ReversedPath) when is_integer(Char) ->
parse_binary_from_string(Rest, ReversedPath);
from_string([], ReversedPath) ->
finalize_path([], ReversedPath);
from_string(Rest, ReversedPath) ->
NotPath = lists:reverse(ReversedPath) ++ Rest,
?reject_invalid_path(NotPath).
parse_atom_from_string(Rest, ReversedPath) ->
parse_atom_from_string(Rest, "", ReversedPath).
parse_atom_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_atom_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_atom_from_string(Rest, Acc1, ReversedPath);
parse_atom_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_atom_component(Acc) ->
Acc1 = lists:reverse(Acc),
Acc2 = percent_decode_string(Acc1),
erlang:list_to_atom(Acc2).
parse_binary_from_string(Rest, ReversedPath) ->
parse_binary_from_string(Rest, "", ReversedPath).
parse_binary_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_binary_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_binary_from_string(Rest, Acc1, ReversedPath);
parse_binary_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_binary_componenent(Acc) ->
Acc1 = lists:reverse(Acc),
case Acc1 of
"." ->
?THIS_KHEPRI_NODE;
".." ->
?PARENT_KHEPRI_NODE;
"*" ->
?KHEPRI_WILDCARD_STAR;
"**" ->
?KHEPRI_WILDCARD_STAR_STAR;
_ ->
Acc2 = percent_decode_string(Acc1),
case re:run(Acc2, "\\*", [{capture, none}]) of
match ->
ReOpts = [global, {return, list}],
Regex = re:replace(Acc2, "\\*", ".*", ReOpts),
#if_name_matches{regex = "^" ++ Regex ++ "$"};
nomatch ->
erlang:list_to_binary(Acc2)
end
end.
prepend_component(Component, []) when ?IS_KHEPRI_NODE_ID(Component) ->
%% This is a relative path.
[Component, ?THIS_KHEPRI_NODE];
prepend_component(Component, ReversedPath) ->
[Component | ReversedPath].
finalize_path(Rest, []) ->
Rest;
finalize_path(Rest, ReversedPath) ->
case lists:reverse(ReversedPath) ++ Rest of
[?KHEPRI_ROOT_NODE | Path] -> Path;
Path -> Path
end.
-spec to_string(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: string().
%% @doc Converts a native path to a string.
to_string([] = Path) ->
to_string(Path, "/", false);
to_string([?KHEPRI_ROOT_NODE | Path]) ->
to_string(Path, "/", false);
to_string([?THIS_KHEPRI_NODE] = Path) ->
to_string(Path, "", false);
to_string([?THIS_KHEPRI_NODE, <<>> | _] = Path) ->
%% Special case: a relative path starting with an empty binary. We need to
%% keep the leading '.' because we rely on forward slashes to "encode" the
%% empty binary. If we don't keep the '.', the path will become absolute.
to_string(Path, "", false);
to_string([?THIS_KHEPRI_NODE | Path]) ->
to_string(Path, "", false);
to_string([?PARENT_KHEPRI_NODE | _] = Path) ->
to_string(Path, "", false);
to_string(Path) ->
to_string(Path, "", true).
to_string([<<>> = Component], Result, NeedSlash) ->
Component1 = component_to_string(Component),
Result1 = append_string_component(Component1, Result, NeedSlash) ++ [$/],
Result1;
to_string([Component | Rest], Result, NeedSlash) ->
Component1 = component_to_string(Component),
Result1 = append_string_component(Component1, Result, NeedSlash),
to_string(Rest, Result1, true);
to_string([], Result, _NeedSlash) ->
Result.
append_string_component(Component, Result, true) ->
Result ++ [$/ | Component];
append_string_component(Component, Result, false) ->
Result ++ Component.
-spec to_binary(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: binary().
%% @doc Converts a native path to a binary.
to_binary(Path) ->
String = to_string(Path),
erlang:list_to_binary(String).
-spec component_to_string(component()) -> string().
@private
component_to_string(?KHEPRI_ROOT_NODE) ->
"/";
component_to_string(?THIS_KHEPRI_NODE) ->
".";
component_to_string(?PARENT_KHEPRI_NODE) ->
"..";
component_to_string(Component) when is_atom(Component) ->
":" ++ percent_encode_string(erlang:atom_to_list(Component));
component_to_string(Component) when is_binary(Component) ->
percent_encode_string(erlang:binary_to_list(Component)).
-define(IS_HEX(Digit), (is_integer(Digit) andalso
((Digit >= $0 andalso Digit =< $9) orelse
(Digit >= $A andalso Digit =< $F) orelse
(Digit >= $a andalso Digit =< $f)))).
percent_decode_string(String) when is_list(String) ->
percent_decode_string(String, "").
, Digit1 , Digit2 | Rest ] , PercentDecoded )
when ?IS_HEX(Digit1) andalso ?IS_HEX(Digit2) ->
Char = erlang:list_to_integer([Digit1, Digit2], 16),
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([Char | Rest], PercentDecoded) ->
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([], PercentDecoded) ->
PercentDecoded.
percent_encode_string(String) when is_list(String) ->
percent_encode_string(String, "").
percent_encode_string([Char | Rest], PercentEncoded)
when is_integer(Char) andalso
((Char >= $A andalso Char =< $Z) orelse
(Char >= $a andalso Char =< $z) orelse
(Char >= $0 andalso Char =< $9) orelse
(Char =:= $. andalso PercentEncoded =/= "") orelse
Char =:= $- orelse Char =:= $_ orelse Char =:= $~) ->
PercentEncoded1 = PercentEncoded ++ [Char],
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([Char | Rest], PercentEncoded) ->
PEChar = lists:flatten(io_lib:format("%~2.16.0B", [Char])),
PercentEncoded1 = PercentEncoded ++ PEChar,
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([], PercentEncoded) ->
PercentEncoded.
-spec combine_with_conditions(PathPattern, Conditions) -> PathPattern when
PathPattern :: native_pattern(),
Conditions :: [khepri_condition:condition()].
combine_with_conditions(Path, []) ->
Path;
combine_with_conditions(Path, Conditions) ->
[ChildName | Rest] = lists:reverse(Path),
Combined = #if_all{conditions = [ChildName | Conditions]},
lists:reverse([Combined | Rest]).
-spec targets_specific_node(PathPattern) -> Ret when
PathPattern :: native_pattern(),
Ret :: {true, Path} | false,
Path :: native_path().
targets_specific_node(PathPattern) ->
targets_specific_node(PathPattern, []).
targets_specific_node([Condition | Rest], Path) ->
case component_targets_specific_node(Condition) of
{true, Component} -> targets_specific_node(Rest, [Component | Path]);
false -> false
end;
targets_specific_node([], Path) ->
{true, lists:reverse(Path)}.
-spec component_targets_specific_node(ComponentPattern) -> Ret when
ComponentPattern :: pattern_component(),
Ret :: {true, Component} | false,
Component :: component().
@private
component_targets_specific_node(ChildName)
when ?IS_KHEPRI_PATH_COMPONENT(ChildName) ->
{true, ChildName};
component_targets_specific_node(#if_not{condition = Cond}) ->
component_targets_specific_node(Cond);
component_targets_specific_node(#if_all{conditions = []}) ->
false;
component_targets_specific_node(#if_all{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> True
end;
(Cond, false) ->
case component_targets_specific_node(Cond) of
{true, _} = True -> True;
false -> false
end;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(#if_any{conditions = []}) ->
false;
component_targets_specific_node(#if_any{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> false
end;
(_, false) ->
false;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(_) ->
false.
-spec is_valid(PathPattern) -> IsValid when
PathPattern :: native_pattern(),
IsValid :: true | {false, ComponentPattern},
ComponentPattern :: pattern_component().
is_valid(PathPattern) when is_list(PathPattern) ->
lists:foldl(
fun
(_, {false, _} = False) -> False;
(Component, _) -> khepri_condition:is_valid(Component)
end, true, PathPattern);
is_valid(NotPathPattern) ->
{false, NotPathPattern}.
-spec ensure_is_valid(PathPattern) -> ok | no_return() when
PathPattern :: native_pattern().
ensure_is_valid(PathPattern) ->
case is_valid(PathPattern) of
true ->
ok;
{false, Component} ->
?reject_invalid_path(PathPattern, Component)
end.
-spec abspath(Path, BasePath) -> Path when
Path :: native_pattern(),
BasePath :: native_pattern().
abspath([FirstComponent | _] = AbsolutePath, _)
when FirstComponent =/= ?THIS_KHEPRI_NODE andalso
FirstComponent =/= ?PARENT_KHEPRI_NODE ->
AbsolutePath;
abspath([_ | _] = RelativePath, BasePath) ->
realpath(BasePath ++ RelativePath, []);
abspath([] = PathToRoot, _) ->
PathToRoot.
-spec realpath(Path) -> Path when
Path :: native_pattern().
realpath(Path) ->
realpath(Path, []).
realpath([?KHEPRI_ROOT_NODE | Rest], _Result) ->
realpath(Rest, []);
realpath([?THIS_KHEPRI_NODE | Rest], Result) ->
realpath(Rest, Result);
realpath([?PARENT_KHEPRI_NODE | Rest], [_ | Result]) ->
realpath(Rest, Result);
realpath([?PARENT_KHEPRI_NODE | Rest], [] = Result) ->
realpath(Rest, Result);
realpath([Component | Rest], Result) ->
realpath(Rest, [Component | Result]);
realpath([], Result) ->
lists:reverse(Result).
pattern_includes_root_node(Path) ->
[] =:= realpath(Path).
| null | https://raw.githubusercontent.com/rabbitmq/khepri/3527362ad9f59cff36231eb4e7b34dc066aa0f50/src/khepri_path.erl | erlang |
A path describes how to reach a node from the root node.
A path, or <em>native path</em>, is a list of components. Components can be
```
% Native path .
Path = [stock, wood, <<"oak">>].
'''
A path may contain conditions to tune how a node is matched or to match
multiple nodes at once. This is called a <em>path pattern</em>. A path
atoms and binaries). See {@link khepri_condition} to learn more about
conditions. Example:
```
%% Path pattern with a condition on `wood'.
PathPattern = [stock,
#if_all{conditions = [wood,
#if_node_exists{exists = true}]},
oak].
'''
To be user-friendly, string-based and binary-based <em>Unix-like paths</em>
are accepted by most functions. The syntax of these <em>Unix paths</em> is
described in the {@link unix_path()} type documentation. Example:
```
% Unix path , equivalent of the first native path example .
UnixPath = "/:stock/:wood/oak".
'''
A node name.
Component name in a path to a node.
A native path is a list of atoms, binaries and special components.
(unlike {@link unix_path()}) and is the format used internally by the state
machine.
Special components are:
<ol>
<li>`?KHEPRI_ROOT_NODE' to explicitly mark the root node. A path is absolute
root node itself (querying it or storing something in the root node).</li>
<li>`?THIS_KHEPRI_NODE' to make a relative path (the default being an
absolute path). This is mostly useful for {@link
khepri_condition:keep_while()} to make it easy to put a condition on the
<li>`?PARENT_KHEPRI_NODE' to target the parent of a node, with the same
benefits and use cases as `?THIS_KHEPRI_NODE'.</li>
</ol>
Example:
```
% Native path .
Path = [stock, wood, <<"oak">>].
'''
A native pattern is a list of atoms, binaries, special components and
conditions.
(unlike {@link unix_pattern()}) and is the format used internally by the
state machine.
See {@link native_path()} for a description of special components.
Conditions are any condition defined by {@link
khepri_condition:condition()}.
Example:
```
%% Path pattern with a condition on `wood'.
PathPattern = [stock,
#if_all{conditions = [wood,
#if_node_exists{exists = true}]},
oak].
'''
Unix-like path to a node.
These <em>Unix paths</em> have the following syntax:
<ul>
<li>Path components are separated by a forward slash, `/'.</li>
<li>Atom-based node IDs are prefixed with a `:' character: `:wood'.</li>
<li>Binary-based node IDs are written as-is: `oak'.</li>
<li>Atom and binaries can be percent-encoded.</li>
<li>An absolute path must start with `/', otherwise it is considered a
<li>`.' and `..' represent `?THIS_KHEPRI_NODE' and `?PARENT_KHEPRI_NODE'
respectively</li>
<li>Simple glob patterns are accepted:
<ul>
<li>`*' is the same as `?KHEPRI_WILDCARD_STAR' or `#if_name_matches{regex =
<li>`**' is the same as `?KHEPRI_WILDCARD_STAR_STAR' or
`if_path_matches{regex = any}'</li>
</ul></li>
</ul>
a binary-based Unix-like path. If using a string-based Unix-like path, the
behavior is undefined and the call may crash. Matching against node names is
also undefined behavior and may crash, regardless of the type of path being
used. It will be improved in the future.
Example:
```
% Unix path , equivalent of the first native path example .
UnixPath = "/:stock/:wood/oak".
'''
Unix-like path pattern to a node.
It accepts the following special characters:
<ol>
<li>`*' anywhere in a path component behaves like a {@link
<li>`**' as a path component behaves like a {@link
khepri_condition:if_path_matches()}.</li>
</ol>
A Unix-like path pattern can't express all the conditions of a native path
pattern currently.
Otherwise it works as a {@link unix_path()} and has the same syntax and
limitations.
Example:
```
%% Unix path pattern, matching multiple types of oak.
UnixPathPattern = "/:stock/:wood/*oak".
'''
Path to a node.
@doc Converts a Unix-like path to a native path.
For convenience, a native path is also accepted and returned as-is.
with an empty binary.
@doc Converts a Unix-like path to a native path.
This is the same as calling `from_string(String)'. Therefore, it accepts
@see from_string/1.
@doc Elixir sigil to parse Unix-like path using the `~p"/:path/:to/node"'
syntax.
The lowercase `~p' sigil means that the string will go through
@see sigil_P/2.
@doc Elixir sigil to parse Unix-like path using the `~P"/:path/:to/node"'
syntax.
The uppercase `~P' sigil means that the string will NOT go through
@see sigil_p/2.
If the character used to represent the parent node in a regular path
(`^') appears alone in a path component, it's a regular path. Other
special path components may appear alone in both forms though.
component.
This is a relative path.
@doc Converts a native path to a string.
Special case: a relative path starting with an empty binary. We need to
keep the leading '.' because we rely on forward slashes to "encode" the
empty binary. If we don't keep the '.', the path will become absolute.
@doc Converts a native path to a binary. | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright © 2021 - 2023 VMware , Inc. or its affiliates . All rights reserved .
@doc Khepri path API .
A path is the type used by Khepri to reference nodes in the tree structure .
Erlang atoms and binaries . Example :
pattern may contain conditions in addition to regular components ( Erlang
-module(khepri_path).
-include_lib("stdlib/include/assert.hrl").
-include("include/khepri.hrl").
-include("src/khepri_error.hrl").
-export([compile/1,
from_string/1,
from_binary/1,
to_string/1,
to_binary/1,
sigil_p/2,
sigil_P/2,
combine_with_conditions/2,
targets_specific_node/1,
component_targets_specific_node/1,
is_valid/1,
ensure_is_valid/1,
abspath/2,
realpath/1,
pattern_includes_root_node/1]).
-ifdef(TEST).
-export([component_to_string/1]).
-endif.
-type node_id() :: atom() | binary().
-type component() :: node_id() |
?KHEPRI_ROOT_NODE |
?THIS_KHEPRI_NODE |
?PARENT_KHEPRI_NODE.
-type native_path() :: [component()].
Native path to a node .
It is called < em > native</em > because it requires no further processing
by default . Using ` ? ' is only useful when manipulating the
node itself.</li >
-type native_pattern() :: [pattern_component()].
Path pattern which may match zero , one or more nodes .
It is called < em > native</em > because it requires no further processing
-type unix_path() :: string() | binary().
relative path</li >
< li>`abc*def ' is the same as ` # if_name_matches{regex = " ^abc.*def$"}'</li >
any}'</li >
< strong > Warning</strong > : There is no special handling of Unicode in tree
node names . To use Unicode , it is recommended to either use a native path or
-type unix_pattern() :: string() | binary().
khepri_condition : if_name_matches()}.</li >
-type path() :: native_path() | unix_path().
-type pattern() :: native_pattern() | unix_pattern().
Path pattern which may match zero , one or more nodes .
-type pattern_component() :: component() | khepri_condition:condition().
Path pattern component which may match zero , one or more nodes .
-export_type([path/0,
native_path/0,
unix_path/0,
pattern/0,
native_pattern/0,
unix_pattern/0,
component/0,
pattern_component/0,
node_id/0]).
-define(
reject_invalid_path(Path),
?khepri_misuse(invalid_path, #{path => Path})).
-define(
reject_invalid_path(Path, Component),
?khepri_misuse(invalid_path, #{path => Path,
component => Component})).
-spec compile(PathPattern) -> PathPattern when
PathPattern :: native_pattern().
@private
compile(PathPattern) ->
lists:map(fun khepri_condition:compile/1, PathPattern).
-spec from_string(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
The Unix - like string can be either an Erlang string or an Erlang binary .
from_string([$/, $/ | MaybeString]) ->
The path starts with two forward slashes . Therefore the path starts
from_string([$/ | MaybeString], [<<>>, ?KHEPRI_ROOT_NODE]);
from_string([$/ | MaybeString]) ->
from_string(MaybeString, [?KHEPRI_ROOT_NODE]);
from_string(MaybeString) when is_list(MaybeString) ->
from_string(MaybeString, []);
from_string(Binary) when is_binary(Binary) ->
String = erlang:binary_to_list(Binary),
from_string(String);
from_string(NotPath) ->
?reject_invalid_path(NotPath).
-spec from_binary(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
Erlang strings or binaries and native paths .
from_binary(MaybeString) ->
from_string(MaybeString).
-spec sigil_p(PathPattern, Options) -> NativePathPattern when
PathPattern :: pattern(),
Options :: [char()],
NativePathPattern :: native_pattern().
interpolation first before this function is called .
@private
sigil_p(PathPattern, _Options) ->
from_string(PathPattern).
-spec sigil_P(PathPattern, Options) -> NativePathPattern when
PathPattern :: pattern(),
Options :: [char()],
NativePathPattern :: native_pattern().
interpolation first before this function is called .
@private
sigil_P(PathPattern, _Options) ->
from_string(PathPattern).
from_string([Component | _] = Rest, ReversedPath)
when ?IS_KHEPRI_NODE_ID(Component) orelse
?IS_KHEPRI_CONDITION(Component) ->
finalize_path(Rest, ReversedPath);
from_string([Char, Component | _] = Rest, ReversedPath)
when ?IS_SPECIAL_KHEPRI_PATH_COMPONENT(Char) andalso
(?IS_KHEPRI_NODE_ID(Component) orelse
?IS_KHEPRI_CONDITION(Component)) ->
finalize_path(Rest, ReversedPath);
from_string([?PARENT_KHEPRI_NODE, $/ | _] = Rest, ReversedPath) ->
finalize_path(Rest, ReversedPath);
from_string([Char] = Rest, [] = ReversedPath)
when ?IS_SPECIAL_KHEPRI_PATH_COMPONENT(Char) ->
finalize_path(Rest, ReversedPath);
from_string([$/, $/ | Rest], ReversedPath) ->
Two consecutive forward slashes mean there is an empty binary
ReversedPath1 = prepend_component(<<>>, ReversedPath),
from_string([$/ | Rest], ReversedPath1);
from_string([$/ | Rest], ReversedPath) ->
from_string(Rest, ReversedPath);
from_string([$: | Rest], ReversedPath) ->
parse_atom_from_string(Rest, ReversedPath);
from_string([Char | _] = Rest, ReversedPath) when is_integer(Char) ->
parse_binary_from_string(Rest, ReversedPath);
from_string([], ReversedPath) ->
finalize_path([], ReversedPath);
from_string(Rest, ReversedPath) ->
NotPath = lists:reverse(ReversedPath) ++ Rest,
?reject_invalid_path(NotPath).
parse_atom_from_string(Rest, ReversedPath) ->
parse_atom_from_string(Rest, "", ReversedPath).
parse_atom_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_atom_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_atom_from_string(Rest, Acc1, ReversedPath);
parse_atom_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_atom_component(Acc) ->
Acc1 = lists:reverse(Acc),
Acc2 = percent_decode_string(Acc1),
erlang:list_to_atom(Acc2).
parse_binary_from_string(Rest, ReversedPath) ->
parse_binary_from_string(Rest, "", ReversedPath).
parse_binary_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_binary_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_binary_from_string(Rest, Acc1, ReversedPath);
parse_binary_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_binary_componenent(Acc) ->
Acc1 = lists:reverse(Acc),
case Acc1 of
"." ->
?THIS_KHEPRI_NODE;
".." ->
?PARENT_KHEPRI_NODE;
"*" ->
?KHEPRI_WILDCARD_STAR;
"**" ->
?KHEPRI_WILDCARD_STAR_STAR;
_ ->
Acc2 = percent_decode_string(Acc1),
case re:run(Acc2, "\\*", [{capture, none}]) of
match ->
ReOpts = [global, {return, list}],
Regex = re:replace(Acc2, "\\*", ".*", ReOpts),
#if_name_matches{regex = "^" ++ Regex ++ "$"};
nomatch ->
erlang:list_to_binary(Acc2)
end
end.
prepend_component(Component, []) when ?IS_KHEPRI_NODE_ID(Component) ->
[Component, ?THIS_KHEPRI_NODE];
prepend_component(Component, ReversedPath) ->
[Component | ReversedPath].
finalize_path(Rest, []) ->
Rest;
finalize_path(Rest, ReversedPath) ->
case lists:reverse(ReversedPath) ++ Rest of
[?KHEPRI_ROOT_NODE | Path] -> Path;
Path -> Path
end.
-spec to_string(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: string().
to_string([] = Path) ->
to_string(Path, "/", false);
to_string([?KHEPRI_ROOT_NODE | Path]) ->
to_string(Path, "/", false);
to_string([?THIS_KHEPRI_NODE] = Path) ->
to_string(Path, "", false);
to_string([?THIS_KHEPRI_NODE, <<>> | _] = Path) ->
to_string(Path, "", false);
to_string([?THIS_KHEPRI_NODE | Path]) ->
to_string(Path, "", false);
to_string([?PARENT_KHEPRI_NODE | _] = Path) ->
to_string(Path, "", false);
to_string(Path) ->
to_string(Path, "", true).
to_string([<<>> = Component], Result, NeedSlash) ->
Component1 = component_to_string(Component),
Result1 = append_string_component(Component1, Result, NeedSlash) ++ [$/],
Result1;
to_string([Component | Rest], Result, NeedSlash) ->
Component1 = component_to_string(Component),
Result1 = append_string_component(Component1, Result, NeedSlash),
to_string(Rest, Result1, true);
to_string([], Result, _NeedSlash) ->
Result.
append_string_component(Component, Result, true) ->
Result ++ [$/ | Component];
append_string_component(Component, Result, false) ->
Result ++ Component.
-spec to_binary(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: binary().
to_binary(Path) ->
String = to_string(Path),
erlang:list_to_binary(String).
-spec component_to_string(component()) -> string().
@private
component_to_string(?KHEPRI_ROOT_NODE) ->
"/";
component_to_string(?THIS_KHEPRI_NODE) ->
".";
component_to_string(?PARENT_KHEPRI_NODE) ->
"..";
component_to_string(Component) when is_atom(Component) ->
":" ++ percent_encode_string(erlang:atom_to_list(Component));
component_to_string(Component) when is_binary(Component) ->
percent_encode_string(erlang:binary_to_list(Component)).
-define(IS_HEX(Digit), (is_integer(Digit) andalso
((Digit >= $0 andalso Digit =< $9) orelse
(Digit >= $A andalso Digit =< $F) orelse
(Digit >= $a andalso Digit =< $f)))).
percent_decode_string(String) when is_list(String) ->
percent_decode_string(String, "").
, Digit1 , Digit2 | Rest ] , PercentDecoded )
when ?IS_HEX(Digit1) andalso ?IS_HEX(Digit2) ->
Char = erlang:list_to_integer([Digit1, Digit2], 16),
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([Char | Rest], PercentDecoded) ->
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([], PercentDecoded) ->
PercentDecoded.
percent_encode_string(String) when is_list(String) ->
percent_encode_string(String, "").
percent_encode_string([Char | Rest], PercentEncoded)
when is_integer(Char) andalso
((Char >= $A andalso Char =< $Z) orelse
(Char >= $a andalso Char =< $z) orelse
(Char >= $0 andalso Char =< $9) orelse
(Char =:= $. andalso PercentEncoded =/= "") orelse
Char =:= $- orelse Char =:= $_ orelse Char =:= $~) ->
PercentEncoded1 = PercentEncoded ++ [Char],
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([Char | Rest], PercentEncoded) ->
PEChar = lists:flatten(io_lib:format("%~2.16.0B", [Char])),
PercentEncoded1 = PercentEncoded ++ PEChar,
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([], PercentEncoded) ->
PercentEncoded.
-spec combine_with_conditions(PathPattern, Conditions) -> PathPattern when
PathPattern :: native_pattern(),
Conditions :: [khepri_condition:condition()].
combine_with_conditions(Path, []) ->
Path;
combine_with_conditions(Path, Conditions) ->
[ChildName | Rest] = lists:reverse(Path),
Combined = #if_all{conditions = [ChildName | Conditions]},
lists:reverse([Combined | Rest]).
-spec targets_specific_node(PathPattern) -> Ret when
PathPattern :: native_pattern(),
Ret :: {true, Path} | false,
Path :: native_path().
targets_specific_node(PathPattern) ->
targets_specific_node(PathPattern, []).
targets_specific_node([Condition | Rest], Path) ->
case component_targets_specific_node(Condition) of
{true, Component} -> targets_specific_node(Rest, [Component | Path]);
false -> false
end;
targets_specific_node([], Path) ->
{true, lists:reverse(Path)}.
-spec component_targets_specific_node(ComponentPattern) -> Ret when
ComponentPattern :: pattern_component(),
Ret :: {true, Component} | false,
Component :: component().
@private
component_targets_specific_node(ChildName)
when ?IS_KHEPRI_PATH_COMPONENT(ChildName) ->
{true, ChildName};
component_targets_specific_node(#if_not{condition = Cond}) ->
component_targets_specific_node(Cond);
component_targets_specific_node(#if_all{conditions = []}) ->
false;
component_targets_specific_node(#if_all{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> True
end;
(Cond, false) ->
case component_targets_specific_node(Cond) of
{true, _} = True -> True;
false -> false
end;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(#if_any{conditions = []}) ->
false;
component_targets_specific_node(#if_any{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> false
end;
(_, false) ->
false;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(_) ->
false.
-spec is_valid(PathPattern) -> IsValid when
PathPattern :: native_pattern(),
IsValid :: true | {false, ComponentPattern},
ComponentPattern :: pattern_component().
is_valid(PathPattern) when is_list(PathPattern) ->
lists:foldl(
fun
(_, {false, _} = False) -> False;
(Component, _) -> khepri_condition:is_valid(Component)
end, true, PathPattern);
is_valid(NotPathPattern) ->
{false, NotPathPattern}.
-spec ensure_is_valid(PathPattern) -> ok | no_return() when
PathPattern :: native_pattern().
ensure_is_valid(PathPattern) ->
case is_valid(PathPattern) of
true ->
ok;
{false, Component} ->
?reject_invalid_path(PathPattern, Component)
end.
-spec abspath(Path, BasePath) -> Path when
Path :: native_pattern(),
BasePath :: native_pattern().
abspath([FirstComponent | _] = AbsolutePath, _)
when FirstComponent =/= ?THIS_KHEPRI_NODE andalso
FirstComponent =/= ?PARENT_KHEPRI_NODE ->
AbsolutePath;
abspath([_ | _] = RelativePath, BasePath) ->
realpath(BasePath ++ RelativePath, []);
abspath([] = PathToRoot, _) ->
PathToRoot.
-spec realpath(Path) -> Path when
Path :: native_pattern().
realpath(Path) ->
realpath(Path, []).
realpath([?KHEPRI_ROOT_NODE | Rest], _Result) ->
realpath(Rest, []);
realpath([?THIS_KHEPRI_NODE | Rest], Result) ->
realpath(Rest, Result);
realpath([?PARENT_KHEPRI_NODE | Rest], [_ | Result]) ->
realpath(Rest, Result);
realpath([?PARENT_KHEPRI_NODE | Rest], [] = Result) ->
realpath(Rest, Result);
realpath([Component | Rest], Result) ->
realpath(Rest, [Component | Result]);
realpath([], Result) ->
lists:reverse(Result).
pattern_includes_root_node(Path) ->
[] =:= realpath(Path).
|
76a6986e6fe764638282e85c4828cd581a0bab2bd84933d8cddd492514b45d95 | TOTBWF/teenytt | Univ.hs | -- | Refiner rules for the universe.
module TeenyTT.Elaborator.Refiner.Univ
( formation
, el
) where
import TeenyTT.Core.Domain qualified as D
import TeenyTT.Core.Syntax qualified as S
import TeenyTT.Elaborator.Tactic qualified as T
formation :: T.Tp
formation = T.Tp $ pure S.Univ
el :: T.Chk -> T.Tp
el tac = T.Tp do
tm <- T.runChk tac D.VUniv
pure $ S.El tm
| null | https://raw.githubusercontent.com/TOTBWF/teenytt/0b5f149508a0a4efe9818b3fb8d76e00dc898f56/src/TeenyTT/Elaborator/Refiner/Univ.hs | haskell | | Refiner rules for the universe. | module TeenyTT.Elaborator.Refiner.Univ
( formation
, el
) where
import TeenyTT.Core.Domain qualified as D
import TeenyTT.Core.Syntax qualified as S
import TeenyTT.Elaborator.Tactic qualified as T
formation :: T.Tp
formation = T.Tp $ pure S.Univ
el :: T.Chk -> T.Tp
el tac = T.Tp do
tm <- T.runChk tac D.VUniv
pure $ S.El tm
|
1b3376d19da51e77829bebcdf07b81906da5813c13b8f12de1a737bd2660c5a5 | zadean/xqerl | op_dateTime_greater_than_SUITE.erl | -module('op_dateTime_greater_than_SUITE').
-include_lib("common_test/include/ct.hrl").
-export([
all/0,
groups/0,
suite/0
]).
-export([
init_per_suite/1,
init_per_group/2,
end_per_group/2,
end_per_suite/1
]).
-export(['op-dateTime-greater-than2args-1'/1]).
-export(['op-dateTime-greater-than2args-2'/1]).
-export(['op-dateTime-greater-than2args-3'/1]).
-export(['op-dateTime-greater-than2args-4'/1]).
-export(['op-dateTime-greater-than2args-5'/1]).
-export(['op-dateTime-greater-than2args-6'/1]).
-export(['op-dateTime-greater-than2args-7'/1]).
-export(['op-dateTime-greater-than2args-8'/1]).
-export(['op-dateTime-greater-than2args-9'/1]).
-export(['op-dateTime-greater-than2args-10'/1]).
-export(['op-dateTime-greater-than-3'/1]).
-export(['op-dateTime-greater-than-4'/1]).
-export(['op-dateTime-greater-than-5'/1]).
-export(['op-dateTime-greater-than-6'/1]).
-export(['op-dateTime-greater-than-7'/1]).
-export(['op-dateTime-greater-than-8'/1]).
-export(['op-dateTime-greater-than-9'/1]).
-export(['op-dateTime-greater-than-10'/1]).
-export(['op-dateTime-greater-than-11'/1]).
-export(['op-dateTime-greater-than-12'/1]).
-export(['op-dateTime-greater-than-13'/1]).
-export(['op-dateTime-greater-than-14'/1]).
-export(['K-DateTimeGT-1'/1]).
-export(['K-DateTimeGT-2'/1]).
-export(['K-DateTimeGT-3'/1]).
-export(['K-DateTimeGT-4'/1]).
-export(['K-DateTimeGT-5'/1]).
-export(['K-DateTimeGT-6'/1]).
-export(['cbcl-dateTime-greater-than-001'/1]).
-export(['cbcl-dateTime-greater-than-002'/1]).
-export(['cbcl-dateTime-greater-than-003'/1]).
-export(['cbcl-dateTime-greater-than-004'/1]).
-export(['cbcl-dateTime-greater-than-005'/1]).
-export(['cbcl-dateTime-greater-than-006'/1]).
-export(['cbcl-dateTime-greater-than-007'/1]).
-export(['cbcl-dateTime-greater-than-008'/1]).
-export(['cbcl-dateTime-greater-than-009'/1]).
-export(['cbcl-dateTime-greater-than-010'/1]).
-export(['cbcl-dateTime-greater-than-011'/1]).
-export(['cbcl-dateTime-greater-than-012'/1]).
-export(['cbcl-dateTime-greater-than-013'/1]).
-export(['cbcl-dateTime-greater-than-014'/1]).
-export(['cbcl-dateTime-greater-than-015'/1]).
-export(['cbcl-dateTime-greater-than-016'/1]).
suite() -> [{timetrap, {seconds, 180}}].
init_per_group(_, Config) -> Config.
end_per_group(_, _Config) ->
xqerl_code_server:unload(all).
end_per_suite(_Config) ->
ct:timetrap({seconds, 60}),
xqerl_code_server:unload(all).
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(xqerl),
DD = filename:dirname(filename:dirname(filename:dirname(?config(data_dir, Config)))),
TD = filename:join(DD, "QT3-test-suite"),
__BaseDir = filename:join(TD, "op"),
[{base_dir, __BaseDir} | Config].
all() ->
[
{group, group_0},
{group, group_1}
].
groups() ->
[
{group_0, [parallel], [
'op-dateTime-greater-than2args-1',
'op-dateTime-greater-than2args-2',
'op-dateTime-greater-than2args-3',
'op-dateTime-greater-than2args-4',
'op-dateTime-greater-than2args-5',
'op-dateTime-greater-than2args-6',
'op-dateTime-greater-than2args-7',
'op-dateTime-greater-than2args-8',
'op-dateTime-greater-than2args-9',
'op-dateTime-greater-than2args-10',
'op-dateTime-greater-than-3',
'op-dateTime-greater-than-4',
'op-dateTime-greater-than-5',
'op-dateTime-greater-than-6',
'op-dateTime-greater-than-7',
'op-dateTime-greater-than-8',
'op-dateTime-greater-than-9',
'op-dateTime-greater-than-10',
'op-dateTime-greater-than-11',
'op-dateTime-greater-than-12',
'op-dateTime-greater-than-13',
'op-dateTime-greater-than-14',
'K-DateTimeGT-1'
]},
{group_1, [parallel], [
'K-DateTimeGT-2',
'K-DateTimeGT-3',
'K-DateTimeGT-4',
'K-DateTimeGT-5',
'K-DateTimeGT-6',
'cbcl-dateTime-greater-than-001',
'cbcl-dateTime-greater-than-002',
'cbcl-dateTime-greater-than-003',
'cbcl-dateTime-greater-than-004',
'cbcl-dateTime-greater-than-005',
'cbcl-dateTime-greater-than-006',
'cbcl-dateTime-greater-than-007',
'cbcl-dateTime-greater-than-008',
'cbcl-dateTime-greater-than-009',
'cbcl-dateTime-greater-than-010',
'cbcl-dateTime-greater-than-011',
'cbcl-dateTime-greater-than-012',
'cbcl-dateTime-greater-than-013',
'cbcl-dateTime-greater-than-014',
'cbcl-dateTime-greater-than-015',
'cbcl-dateTime-greater-than-016'
]}
].
'op-dateTime-greater-than2args-1'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-1.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-2'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1996-04-07T01:40:52Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-2.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2030-12-31T23:59:59Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-3.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"1996-04-07T01:40:52Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-4.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"2030-12-31T23:59:59Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-5.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-6.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-7'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1996-04-07T01:40:52Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-7.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-8'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2030-12-31T23:59:59Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-8.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-9'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"1996-04-07T01:40:52Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-9.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"2030-12-31T23:59:59Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"fn:not((xs:dateTime(\"2004-04-02T12:00:00Z\") gt xs:dateTime(\"2003-04-02T12:00:00Z\")))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-3.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2002-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-4.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") gt xs:dateTime(\"2002-05-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-5.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2008-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-6.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-7'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-02T12:00:00Z\") gt xs:dateTime(\"2002-04-02T12:01:00Z\")) and (xs:dateTime(\"2003-04-02T12:00:00Z\") gt xs:dateTime(\"2002-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-7.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-8'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2005-04-02T12:00:20Z\")) and (xs:dateTime(\"2002-04-02T12:10:00Z\") ge xs:dateTime(\"2002-04-03T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-8.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-9'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-06-02T12:00:10Z\") gt xs:dateTime(\"2000-04-04T12:00:00Z\")) or (xs:dateTime(\"2002-04-02T13:00:10Z\") gt xs:dateTime(\"2001-04-02T10:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-9.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-03T12:00:10Z\") ge xs:dateTime(\"1990-04-02T12:10:00Z\")) or (xs:dateTime(\"1975-04-03T12:10:00Z\") ge xs:dateTime(\"2000-02-02T12:00:09Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-11'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1990-04-02T12:00:10Z\") gt xs:dateTime(\"2006-06-02T12:10:00Z\")) or (fn:true())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-11.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-12'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1970-04-02T12:00:20Z\") ge xs:dateTime(\"1980-04-02T12:00:20Z\")) or (fn:true())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-12.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-13'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1981-04-02T12:00:00Z\") gt xs:dateTime(\"2003-04-02T12:10:00Z\")) or (fn:false())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-13.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-14'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1976-04-03T12:00:00Z\") ge xs:dateTime(\"2002-07-02T12:00:30Z\")) or (fn:false())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-14.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-1'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-13T23:01:04.12\") gt xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-1.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-2'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-12T23:01:04.12\") gt xs:dateTime(\"2004-07-12T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-2.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-12T23:01:04.12\") gt xs:dateTime(\"2004-07-13T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-3.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-12T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-4.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-13T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-5.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-11T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-6.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-001'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:two-digit($number as xs:integer) { let $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string }; declare function local:dateTime($year as xs:integer, $month as xs:integer, $day as xs:integer) { let $m := local:two-digit($month), $d := local:two-digit($day) return xs:dateTime(concat($year, '-', $m, '-', $d, \"T12:00:00\")) }; not(local:dateTime(2008, 05, 12) gt xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-001.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-002'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-30T00:01:00\") gt xs:dateTime(\"2008-01-31T01:00:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-002.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-003'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") gt xs:dateTime(\"2008-01-30T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-003.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-004'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00\") gt xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-004.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-005'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") gt xs:dateTime(\"2008-01-31T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-005.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-006'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:two-digit($number as xs:integer) { let $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string }; declare function local:dateTime($year as xs:integer, $month as xs:integer, $day as xs:integer) { let $m := local:two-digit($month), $d := local:two-digit($day) return xs:dateTime(concat($year, '-', $m, '-', $d, \"T12:00:00\")) }; not(local:dateTime(2008, 05, 12) ge xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-006.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-007'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-30T00:01:00\") ge xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-007.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-008'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") ge xs:dateTime(\"2008-01-30T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-008.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-009'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00\") ge xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-009.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-010'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") ge xs:dateTime(\"2008-01-31T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-010.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-011'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; exists(local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:true()) gt xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-011.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-012'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:false()) gt xs:dateTime(\"1972-12-15T12:00:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-012.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-013'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; exists(local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:true()) le xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-013.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-014'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:false()) le xs:dateTime(\"1972-12-15T12:00:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-014.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-015'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:string, $timezone as xs:string) { xs:dateTime( concat($dateTime, $timezone) ) }; adjust-dateTime-to-timezone(local:dateTime(\"1972-12-14T00:00:00\", \"-12:00\")) gt adjust-dateTime-to-timezone(xs:dateTime(\"1972-12-15T00:00:00+12:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-015.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-016'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:string, $timezone as xs:string) { xs:dateTime( concat($dateTime, $timezone) ) }; adjust-dateTime-to-timezone(local:dateTime(\"1972-12-14T00:00:00\", \"-12:00\")) ge adjust-dateTime-to-timezone(xs:dateTime(\"1972-12-15T00:00:00+12:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-016.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
| null | https://raw.githubusercontent.com/zadean/xqerl/1a94833e996435495922346010ce918b4b0717f2/test/op/op_dateTime_greater_than_SUITE.erl | erlang | -module('op_dateTime_greater_than_SUITE').
-include_lib("common_test/include/ct.hrl").
-export([
all/0,
groups/0,
suite/0
]).
-export([
init_per_suite/1,
init_per_group/2,
end_per_group/2,
end_per_suite/1
]).
-export(['op-dateTime-greater-than2args-1'/1]).
-export(['op-dateTime-greater-than2args-2'/1]).
-export(['op-dateTime-greater-than2args-3'/1]).
-export(['op-dateTime-greater-than2args-4'/1]).
-export(['op-dateTime-greater-than2args-5'/1]).
-export(['op-dateTime-greater-than2args-6'/1]).
-export(['op-dateTime-greater-than2args-7'/1]).
-export(['op-dateTime-greater-than2args-8'/1]).
-export(['op-dateTime-greater-than2args-9'/1]).
-export(['op-dateTime-greater-than2args-10'/1]).
-export(['op-dateTime-greater-than-3'/1]).
-export(['op-dateTime-greater-than-4'/1]).
-export(['op-dateTime-greater-than-5'/1]).
-export(['op-dateTime-greater-than-6'/1]).
-export(['op-dateTime-greater-than-7'/1]).
-export(['op-dateTime-greater-than-8'/1]).
-export(['op-dateTime-greater-than-9'/1]).
-export(['op-dateTime-greater-than-10'/1]).
-export(['op-dateTime-greater-than-11'/1]).
-export(['op-dateTime-greater-than-12'/1]).
-export(['op-dateTime-greater-than-13'/1]).
-export(['op-dateTime-greater-than-14'/1]).
-export(['K-DateTimeGT-1'/1]).
-export(['K-DateTimeGT-2'/1]).
-export(['K-DateTimeGT-3'/1]).
-export(['K-DateTimeGT-4'/1]).
-export(['K-DateTimeGT-5'/1]).
-export(['K-DateTimeGT-6'/1]).
-export(['cbcl-dateTime-greater-than-001'/1]).
-export(['cbcl-dateTime-greater-than-002'/1]).
-export(['cbcl-dateTime-greater-than-003'/1]).
-export(['cbcl-dateTime-greater-than-004'/1]).
-export(['cbcl-dateTime-greater-than-005'/1]).
-export(['cbcl-dateTime-greater-than-006'/1]).
-export(['cbcl-dateTime-greater-than-007'/1]).
-export(['cbcl-dateTime-greater-than-008'/1]).
-export(['cbcl-dateTime-greater-than-009'/1]).
-export(['cbcl-dateTime-greater-than-010'/1]).
-export(['cbcl-dateTime-greater-than-011'/1]).
-export(['cbcl-dateTime-greater-than-012'/1]).
-export(['cbcl-dateTime-greater-than-013'/1]).
-export(['cbcl-dateTime-greater-than-014'/1]).
-export(['cbcl-dateTime-greater-than-015'/1]).
-export(['cbcl-dateTime-greater-than-016'/1]).
suite() -> [{timetrap, {seconds, 180}}].
init_per_group(_, Config) -> Config.
end_per_group(_, _Config) ->
xqerl_code_server:unload(all).
end_per_suite(_Config) ->
ct:timetrap({seconds, 60}),
xqerl_code_server:unload(all).
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(xqerl),
DD = filename:dirname(filename:dirname(filename:dirname(?config(data_dir, Config)))),
TD = filename:join(DD, "QT3-test-suite"),
__BaseDir = filename:join(TD, "op"),
[{base_dir, __BaseDir} | Config].
all() ->
[
{group, group_0},
{group, group_1}
].
groups() ->
[
{group_0, [parallel], [
'op-dateTime-greater-than2args-1',
'op-dateTime-greater-than2args-2',
'op-dateTime-greater-than2args-3',
'op-dateTime-greater-than2args-4',
'op-dateTime-greater-than2args-5',
'op-dateTime-greater-than2args-6',
'op-dateTime-greater-than2args-7',
'op-dateTime-greater-than2args-8',
'op-dateTime-greater-than2args-9',
'op-dateTime-greater-than2args-10',
'op-dateTime-greater-than-3',
'op-dateTime-greater-than-4',
'op-dateTime-greater-than-5',
'op-dateTime-greater-than-6',
'op-dateTime-greater-than-7',
'op-dateTime-greater-than-8',
'op-dateTime-greater-than-9',
'op-dateTime-greater-than-10',
'op-dateTime-greater-than-11',
'op-dateTime-greater-than-12',
'op-dateTime-greater-than-13',
'op-dateTime-greater-than-14',
'K-DateTimeGT-1'
]},
{group_1, [parallel], [
'K-DateTimeGT-2',
'K-DateTimeGT-3',
'K-DateTimeGT-4',
'K-DateTimeGT-5',
'K-DateTimeGT-6',
'cbcl-dateTime-greater-than-001',
'cbcl-dateTime-greater-than-002',
'cbcl-dateTime-greater-than-003',
'cbcl-dateTime-greater-than-004',
'cbcl-dateTime-greater-than-005',
'cbcl-dateTime-greater-than-006',
'cbcl-dateTime-greater-than-007',
'cbcl-dateTime-greater-than-008',
'cbcl-dateTime-greater-than-009',
'cbcl-dateTime-greater-than-010',
'cbcl-dateTime-greater-than-011',
'cbcl-dateTime-greater-than-012',
'cbcl-dateTime-greater-than-013',
'cbcl-dateTime-greater-than-014',
'cbcl-dateTime-greater-than-015',
'cbcl-dateTime-greater-than-016'
]}
].
'op-dateTime-greater-than2args-1'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-1.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-2'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1996-04-07T01:40:52Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-2.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2030-12-31T23:59:59Z\") gt xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-3.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"1996-04-07T01:40:52Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-4.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") gt xs:dateTime(\"2030-12-31T23:59:59Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-5.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-6.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-7'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1996-04-07T01:40:52Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-7.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-8'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2030-12-31T23:59:59Z\") le xs:dateTime(\"1970-01-01T00:00:00Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-8.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-9'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"1996-04-07T01:40:52Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-9.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than2args-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"1970-01-01T00:00:00Z\") le xs:dateTime(\"2030-12-31T23:59:59Z\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than2args-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"fn:not((xs:dateTime(\"2004-04-02T12:00:00Z\") gt xs:dateTime(\"2003-04-02T12:00:00Z\")))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-3.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2002-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-4.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") gt xs:dateTime(\"2002-05-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-5.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "fn:not(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2008-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-6.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-7'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-02T12:00:00Z\") gt xs:dateTime(\"2002-04-02T12:01:00Z\")) and (xs:dateTime(\"2003-04-02T12:00:00Z\") gt xs:dateTime(\"2002-04-02T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-7.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-8'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-02T12:00:00Z\") ge xs:dateTime(\"2005-04-02T12:00:20Z\")) and (xs:dateTime(\"2002-04-02T12:10:00Z\") ge xs:dateTime(\"2002-04-03T12:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-8.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-9'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-06-02T12:00:10Z\") gt xs:dateTime(\"2000-04-04T12:00:00Z\")) or (xs:dateTime(\"2002-04-02T13:00:10Z\") gt xs:dateTime(\"2001-04-02T10:00:00Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-9.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"2002-04-03T12:00:10Z\") ge xs:dateTime(\"1990-04-02T12:10:00Z\")) or (xs:dateTime(\"1975-04-03T12:10:00Z\") ge xs:dateTime(\"2000-02-02T12:00:09Z\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-11'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1990-04-02T12:00:10Z\") gt xs:dateTime(\"2006-06-02T12:10:00Z\")) or (fn:true())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-11.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-12'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1970-04-02T12:00:20Z\") ge xs:dateTime(\"1980-04-02T12:00:20Z\")) or (fn:true())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-12.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-13'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1981-04-02T12:00:00Z\") gt xs:dateTime(\"2003-04-02T12:10:00Z\")) or (fn:false())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-13.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'op-dateTime-greater-than-14'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"(xs:dateTime(\"1976-04-03T12:00:00Z\") ge xs:dateTime(\"2002-07-02T12:00:30Z\")) or (fn:false())",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "op-dateTime-greater-than-14.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-1'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-13T23:01:04.12\") gt xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-1.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-2'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-12T23:01:04.12\") gt xs:dateTime(\"2004-07-12T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-2.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-3'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-12T23:01:04.12\") gt xs:dateTime(\"2004-07-13T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-3.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-4'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-12T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-4.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-5'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2004-07-13T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-5.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'K-DateTimeGT-6'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "not(xs:dateTime(\"2004-07-11T23:01:04.12\") ge xs:dateTime(\"2004-07-12T23:01:04.12\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(filename:join(__BaseDir, "K-DateTimeGT-6.xq"), Qry1),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-001'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:two-digit($number as xs:integer) { let $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string }; declare function local:dateTime($year as xs:integer, $month as xs:integer, $day as xs:integer) { let $m := local:two-digit($month), $d := local:two-digit($day) return xs:dateTime(concat($year, '-', $m, '-', $d, \"T12:00:00\")) }; not(local:dateTime(2008, 05, 12) gt xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-001.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-002'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-30T00:01:00\") gt xs:dateTime(\"2008-01-31T01:00:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-002.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-003'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") gt xs:dateTime(\"2008-01-30T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-003.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-004'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00\") gt xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-004.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-005'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") gt xs:dateTime(\"2008-01-31T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-005.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-006'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:two-digit($number as xs:integer) { let $string := string($number) return if (string-length($string) lt 2) then concat('0', $string) else $string }; declare function local:dateTime($year as xs:integer, $month as xs:integer, $day as xs:integer) { let $m := local:two-digit($month), $d := local:two-digit($day) return xs:dateTime(concat($year, '-', $m, '-', $d, \"T12:00:00\")) }; not(local:dateTime(2008, 05, 12) ge xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-006.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-007'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-30T00:01:00\") ge xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-007.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-008'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") ge xs:dateTime(\"2008-01-30T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-008.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-009'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00\") ge xs:dateTime(\"2008-01-31T00:01:00+09:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-009.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-010'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "xs:dateTime(\"2008-01-31T00:01:00+09:00\") ge xs:dateTime(\"2008-01-31T00:01:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-010.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-011'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; exists(local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:true()) gt xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-011.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-012'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:false()) gt xs:dateTime(\"1972-12-15T12:00:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-012.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-013'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; exists(local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:true()) le xs:dateTime(\"1972-12-15T12:00:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-013.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-014'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:dateTime, $null as xs:boolean) { if ($null) then () else $dateTime }; local:dateTime(xs:dateTime(\"1972-12-15T12:00:00\"), fn:false()) le xs:dateTime(\"1972-12-15T12:00:00\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-014.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-015'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:string, $timezone as xs:string) { xs:dateTime( concat($dateTime, $timezone) ) }; adjust-dateTime-to-timezone(local:dateTime(\"1972-12-14T00:00:00\", \"-12:00\")) gt adjust-dateTime-to-timezone(xs:dateTime(\"1972-12-15T00:00:00+12:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-015.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'cbcl-dateTime-greater-than-016'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"declare function local:dateTime($dateTime as xs:string, $timezone as xs:string) { xs:dateTime( concat($dateTime, $timezone) ) }; adjust-dateTime-to-timezone(local:dateTime(\"1972-12-14T00:00:00\", \"-12:00\")) ge adjust-dateTime-to-timezone(xs:dateTime(\"1972-12-15T00:00:00+12:00\"))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "cbcl-dateTime-greater-than-016.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
|
|
38480e6781ed07c4ec64cc63e0fbe04572f42b29c8fbf5665855b9e65f1aff1c | wilbowma/cur | rackunit-ntac.rkt | #lang racket/base
;; this file contains shim test forms for old cur test files,
;; and some new test forms, eg check-ntac-trace
(provide check-ntac-fail check-ntac-trace ntac/trace ntac/trace/raw :: check-equal?)
(require rackunit
(only-in rackunit/turnstile+ check-type)
syntax/parse/define
(for-syntax racket/base
racket/port
racket/pretty
racket/string
racket/format
syntax/parse
rackunit
macrotypes/stx-utils)
(only-in cur/ntac/base ntac ntac/debug)
(only-in cur/ntac/standard exn:fail:ntac:goal?))
(define-simple-macro (:: e t) (check-type e : t))
(define-syntax check-ntac-fail
(syntax-parser
[(_ e msg)
#:when (check-exn
(λ (exn)
(and
(exn:fail:ntac:goal? exn)
(regexp-match? (syntax-e #'msg) (exn-message exn))))
(λ () (local-expand #'e 'expression null)))
#'(void)]))
(define-syntax ntac/trace
(syntax-parser
[(_ . ts)
#:with d-f (datum->syntax this-syntax 'display-focus)
#`(ntac/debug . #,(stx-appendmap (λ (t) (list t #'d-f)) #'ts))]))
(define-syntax ntac/trace/raw
(syntax-parser
[(_ . ts)
#:with disp-foc (datum->syntax this-syntax 'display-focus/raw)
#`(ntac . #,(stx-appendmap (λ (t) (list t #'disp-foc)) #'ts))]))
(define-syntax check-ntac-trace
(syntax-parser
[(_ t ... (~datum ~>) . expected)
#:do[(define expected-str
(clean-up
(string-join
(append (expected-stx->strs #'expected)
(list "Proof complete.\n"))
"")))
(define actual-trace
(clean-up
(with-output-to-string
(λ ()
(local-expand #'(ntac/trace t ...) 'expression null)))))]
#:fail-unless (equal? expected-str actual-trace)
(format "trace not equal, expected:\n~s\ngot:\n~s\n"
expected-str actual-trace)
(syntax/loc this-syntax (check-true #t))]))
(begin-for-syntax
(define (clean-up str)
(string-join
(filter
(λ (s) (not (or (string-prefix? s "***")
(string-prefix? s "step")
(string-prefix? s "(subgoal")
(string=? s ""))))
(string-split str "\n"))
"\n"))
(define expected-stx->strs
(syntax-parser
[() null]
[((~datum --------------------------------) . rst)
(cons "--------------------------------\n"
(expected-stx->strs #'rst))]
[(X:id (~datum :) ty . rst) ; env binding
(cons (format "~a : ~a\n" (syntax->datum #'X) (syntax->datum #'ty))
(expected-stx->strs #'rst))]
[(other . rst) ; goal, add extra newline
(let ([other-datum (stx->datum #'other)])
(if (> (string-length (~a other-datum)) 80)
(cons
(~a (substring
(with-output-to-string (λ () (pretty-print other-datum)))
1))
(cons "\n" (expected-stx->strs #'rst)))
(cons (~a (syntax->datum #'other))
(cons "\n\n" (expected-stx->strs #'rst)))))])))
| null | https://raw.githubusercontent.com/wilbowma/cur/e039c98941b3d272c6e462387df22846e10b0128/cur-test/cur/tests/ntac/rackunit-ntac.rkt | racket | this file contains shim test forms for old cur test files,
and some new test forms, eg check-ntac-trace
env binding
goal, add extra newline | #lang racket/base
(provide check-ntac-fail check-ntac-trace ntac/trace ntac/trace/raw :: check-equal?)
(require rackunit
(only-in rackunit/turnstile+ check-type)
syntax/parse/define
(for-syntax racket/base
racket/port
racket/pretty
racket/string
racket/format
syntax/parse
rackunit
macrotypes/stx-utils)
(only-in cur/ntac/base ntac ntac/debug)
(only-in cur/ntac/standard exn:fail:ntac:goal?))
(define-simple-macro (:: e t) (check-type e : t))
(define-syntax check-ntac-fail
(syntax-parser
[(_ e msg)
#:when (check-exn
(λ (exn)
(and
(exn:fail:ntac:goal? exn)
(regexp-match? (syntax-e #'msg) (exn-message exn))))
(λ () (local-expand #'e 'expression null)))
#'(void)]))
(define-syntax ntac/trace
(syntax-parser
[(_ . ts)
#:with d-f (datum->syntax this-syntax 'display-focus)
#`(ntac/debug . #,(stx-appendmap (λ (t) (list t #'d-f)) #'ts))]))
(define-syntax ntac/trace/raw
(syntax-parser
[(_ . ts)
#:with disp-foc (datum->syntax this-syntax 'display-focus/raw)
#`(ntac . #,(stx-appendmap (λ (t) (list t #'disp-foc)) #'ts))]))
(define-syntax check-ntac-trace
(syntax-parser
[(_ t ... (~datum ~>) . expected)
#:do[(define expected-str
(clean-up
(string-join
(append (expected-stx->strs #'expected)
(list "Proof complete.\n"))
"")))
(define actual-trace
(clean-up
(with-output-to-string
(λ ()
(local-expand #'(ntac/trace t ...) 'expression null)))))]
#:fail-unless (equal? expected-str actual-trace)
(format "trace not equal, expected:\n~s\ngot:\n~s\n"
expected-str actual-trace)
(syntax/loc this-syntax (check-true #t))]))
(begin-for-syntax
(define (clean-up str)
(string-join
(filter
(λ (s) (not (or (string-prefix? s "***")
(string-prefix? s "step")
(string-prefix? s "(subgoal")
(string=? s ""))))
(string-split str "\n"))
"\n"))
(define expected-stx->strs
(syntax-parser
[() null]
[((~datum --------------------------------) . rst)
(cons "--------------------------------\n"
(expected-stx->strs #'rst))]
(cons (format "~a : ~a\n" (syntax->datum #'X) (syntax->datum #'ty))
(expected-stx->strs #'rst))]
(let ([other-datum (stx->datum #'other)])
(if (> (string-length (~a other-datum)) 80)
(cons
(~a (substring
(with-output-to-string (λ () (pretty-print other-datum)))
1))
(cons "\n" (expected-stx->strs #'rst)))
(cons (~a (syntax->datum #'other))
(cons "\n\n" (expected-stx->strs #'rst)))))])))
|
a377c137f098876a5e0790d4efb5e000a4c6dd1454487931f1724ab514c7f083 | mbuczko/cerber-oauth2-provider | client.clj | (ns cerber.stores.client
"Functions handling OAuth2 client storage."
(:require [cerber.stores.token :as token]
[cerber
[db :as db]
[error :as error]
[helpers :as helpers]
[mappers :as mappers]
[store :refer :all]]
[failjure.core :as f]))
(def client-store (atom :not-initialized))
(defrecord Client [id secret info redirects grants scopes approved? enabled? created-at modified-at activated-at blocked-at])
(defrecord SqlClientStore []
Store
(fetch-one [this [client-id]]
(some-> (db/find-client {:id client-id})
mappers/row->client))
(revoke-one! [this [client-id]]
(db/delete-client {:id client-id}))
(store! [this k client]
(= 1 (db/insert-client (-> client
(update :scopes helpers/coll->str)
(update :grants helpers/coll->str)
(update :redirects helpers/coll->str)))))
(modify! [this k client]
(if (:enabled? client)
(db/enable-client client)
(db/disable-client client)))
(purge! [this]
(db/clear-clients))
(close! [this]
))
(defmulti create-client-store (fn [type config] type))
(defmethod create-client-store :in-memory [_ _]
(->MemoryStore "clients" (atom {})))
(defmethod create-client-store :redis [_ redis-spec]
(->RedisStore "clients" redis-spec))
(defmethod create-client-store :sql [_ db-conn]
(when db-conn
(db/bind-queries db-conn)
(->SqlClientStore)))
(defn init-store
"Initializes client store according to given type and configuration."
[type config]
(reset! client-store (create-client-store type config)))
(defn validate-uri
"Returns java.net.URL instance of given uri or failure info in case of error."
[uri]
(if (empty? uri)
(error/internal-error "redirect-uri cannot be empty")
(if (or (>= (.indexOf uri "#") 0)
(>= (.indexOf uri "..") 0)
(.matches uri ".*\\s+.*"))
(error/internal-error "Illegal characters in redirect URI")
(try
(java.net.URL. uri)
(catch Exception e (error/internal-error (.getMessage e)))))))
(defn validate-redirects
"Goes through all redirects and returns list of validation failures."
[redirects]
(filter f/failed? (map validate-uri redirects)))
(defn find-client
"Returns a client with given id if any found or nil otherwise."
[client-id]
(when-let [found (and client-id (fetch-one @client-store [client-id]))]
(map->Client found)))
(defn revoke-client
"Revokes previously generated client and all tokens generated to this client so far."
[client]
(let [id (:id client)]
(revoke-one! @client-store [id])
(token/revoke-client-tokens client)))
(defn purge-clients
"Removes clients from store."
[]
(purge! @client-store))
(defn enable-client
"Enables client. Returns true if client has been enabled successfully or false otherwise."
[client]
(= 1 (modify! @client-store [:id] (assoc client :enabled? true :activated-at (helpers/now)))))
(defn disable-client
"Disables client. Returns true if client has been disabled successfully or false otherwise."
[client]
(token/revoke-client-tokens client)
(= 1 (modify! @client-store [:id] (assoc client :enabled? false :blocked-at (helpers/now)))))
(defn create-client
"Creates and returns a new client."
[grants redirects {:keys [info scopes enabled? approved? id secret]}]
(let [result (validate-redirects redirects)
client {:id (or id (helpers/generate-secret))
:secret (or secret (helpers/generate-secret))
:info info
:approved? (boolean approved?)
:enabled? (boolean enabled?)
:scopes scopes
:grants grants
:redirects redirects
:activated-at (helpers/now)
:created-at (helpers/now)}]
(if (seq result)
(error/internal-error (first result))
(if (store! @client-store [:id] client)
(map->Client client)
(error/internal-error "Cannot store client")))))
(defn grant-allowed?
[client grant]
(when-let [grants (:grants client)]
(.contains grants grant)))
(defn redirect-uri-valid?
[client redirect-uri]
(.contains (:redirects client) redirect-uri))
(defn scopes-valid?
"Checks whether given scopes are valid ones assigned to client."
[client scopes]
(let [client-scopes (:scopes client)]
(or (empty? scopes)
(every? #(.contains client-scopes %) scopes))))
| null | https://raw.githubusercontent.com/mbuczko/cerber-oauth2-provider/56ef50366bc83630ae41d438fa017e5411c20fdd/src/cerber/stores/client.clj | clojure | (ns cerber.stores.client
"Functions handling OAuth2 client storage."
(:require [cerber.stores.token :as token]
[cerber
[db :as db]
[error :as error]
[helpers :as helpers]
[mappers :as mappers]
[store :refer :all]]
[failjure.core :as f]))
(def client-store (atom :not-initialized))
(defrecord Client [id secret info redirects grants scopes approved? enabled? created-at modified-at activated-at blocked-at])
(defrecord SqlClientStore []
Store
(fetch-one [this [client-id]]
(some-> (db/find-client {:id client-id})
mappers/row->client))
(revoke-one! [this [client-id]]
(db/delete-client {:id client-id}))
(store! [this k client]
(= 1 (db/insert-client (-> client
(update :scopes helpers/coll->str)
(update :grants helpers/coll->str)
(update :redirects helpers/coll->str)))))
(modify! [this k client]
(if (:enabled? client)
(db/enable-client client)
(db/disable-client client)))
(purge! [this]
(db/clear-clients))
(close! [this]
))
(defmulti create-client-store (fn [type config] type))
(defmethod create-client-store :in-memory [_ _]
(->MemoryStore "clients" (atom {})))
(defmethod create-client-store :redis [_ redis-spec]
(->RedisStore "clients" redis-spec))
(defmethod create-client-store :sql [_ db-conn]
(when db-conn
(db/bind-queries db-conn)
(->SqlClientStore)))
(defn init-store
"Initializes client store according to given type and configuration."
[type config]
(reset! client-store (create-client-store type config)))
(defn validate-uri
"Returns java.net.URL instance of given uri or failure info in case of error."
[uri]
(if (empty? uri)
(error/internal-error "redirect-uri cannot be empty")
(if (or (>= (.indexOf uri "#") 0)
(>= (.indexOf uri "..") 0)
(.matches uri ".*\\s+.*"))
(error/internal-error "Illegal characters in redirect URI")
(try
(java.net.URL. uri)
(catch Exception e (error/internal-error (.getMessage e)))))))
(defn validate-redirects
"Goes through all redirects and returns list of validation failures."
[redirects]
(filter f/failed? (map validate-uri redirects)))
(defn find-client
"Returns a client with given id if any found or nil otherwise."
[client-id]
(when-let [found (and client-id (fetch-one @client-store [client-id]))]
(map->Client found)))
(defn revoke-client
"Revokes previously generated client and all tokens generated to this client so far."
[client]
(let [id (:id client)]
(revoke-one! @client-store [id])
(token/revoke-client-tokens client)))
(defn purge-clients
"Removes clients from store."
[]
(purge! @client-store))
(defn enable-client
"Enables client. Returns true if client has been enabled successfully or false otherwise."
[client]
(= 1 (modify! @client-store [:id] (assoc client :enabled? true :activated-at (helpers/now)))))
(defn disable-client
"Disables client. Returns true if client has been disabled successfully or false otherwise."
[client]
(token/revoke-client-tokens client)
(= 1 (modify! @client-store [:id] (assoc client :enabled? false :blocked-at (helpers/now)))))
(defn create-client
"Creates and returns a new client."
[grants redirects {:keys [info scopes enabled? approved? id secret]}]
(let [result (validate-redirects redirects)
client {:id (or id (helpers/generate-secret))
:secret (or secret (helpers/generate-secret))
:info info
:approved? (boolean approved?)
:enabled? (boolean enabled?)
:scopes scopes
:grants grants
:redirects redirects
:activated-at (helpers/now)
:created-at (helpers/now)}]
(if (seq result)
(error/internal-error (first result))
(if (store! @client-store [:id] client)
(map->Client client)
(error/internal-error "Cannot store client")))))
(defn grant-allowed?
[client grant]
(when-let [grants (:grants client)]
(.contains grants grant)))
(defn redirect-uri-valid?
[client redirect-uri]
(.contains (:redirects client) redirect-uri))
(defn scopes-valid?
"Checks whether given scopes are valid ones assigned to client."
[client scopes]
(let [client-scopes (:scopes client)]
(or (empty? scopes)
(every? #(.contains client-scopes %) scopes))))
|
|
44ac329c1aea721a1decccc5b1b046052b8abb2879faa2bae255809267e5d8c1 | tweag/asterius | Task.hs | # LANGUAGE DuplicateRecordFields #
module Asterius.Main.Task
( Target (..),
Task,
target,
optimizeLevel,
shrinkLevel,
inputHS,
inputEntryMJS,
outputDirectory,
outputBaseName,
hasMain,
bundle,
debug,
run,
verboseErr,
yolo,
consoleHistory,
extraGHCFlags,
exportFunctions,
extraRootSymbols,
gcThreshold,
defTask,
)
where
import Asterius.Types (EntitySymbol)
data Target
= Node
| Browser
deriving (Eq)
data Task
= Task
{ target :: Target,
optimizeLevel, shrinkLevel :: Int,
inputHS :: FilePath,
inputEntryMJS :: Maybe FilePath,
outputDirectory :: FilePath,
outputBaseName :: String,
hasMain, bundle, debug, run, verboseErr, yolo, consoleHistory :: Bool,
extraGHCFlags :: [String],
exportFunctions, extraRootSymbols :: [EntitySymbol],
gcThreshold :: Int
}
defTask :: Task
defTask = Task
{ target = Node,
optimizeLevel = 0,
shrinkLevel = 0,
inputHS = error "Asterius.Main.parseTask: missing inputHS",
outputDirectory = error "Asterius.Main.parseTask: missing outputDirectory",
outputBaseName = error "Asterius.Main.parseTask: missing outputBaseName",
inputEntryMJS = Nothing,
hasMain = True,
bundle = False,
debug = False,
run = False,
verboseErr = False,
yolo = False,
consoleHistory = False,
extraGHCFlags = [],
exportFunctions = [],
extraRootSymbols = [],
gcThreshold = 64
}
| null | https://raw.githubusercontent.com/tweag/asterius/e4791671dcfe3baae97a3f5f6bc582e0f9d7d292/asterius/src/Asterius/Main/Task.hs | haskell | # LANGUAGE DuplicateRecordFields #
module Asterius.Main.Task
( Target (..),
Task,
target,
optimizeLevel,
shrinkLevel,
inputHS,
inputEntryMJS,
outputDirectory,
outputBaseName,
hasMain,
bundle,
debug,
run,
verboseErr,
yolo,
consoleHistory,
extraGHCFlags,
exportFunctions,
extraRootSymbols,
gcThreshold,
defTask,
)
where
import Asterius.Types (EntitySymbol)
data Target
= Node
| Browser
deriving (Eq)
data Task
= Task
{ target :: Target,
optimizeLevel, shrinkLevel :: Int,
inputHS :: FilePath,
inputEntryMJS :: Maybe FilePath,
outputDirectory :: FilePath,
outputBaseName :: String,
hasMain, bundle, debug, run, verboseErr, yolo, consoleHistory :: Bool,
extraGHCFlags :: [String],
exportFunctions, extraRootSymbols :: [EntitySymbol],
gcThreshold :: Int
}
defTask :: Task
defTask = Task
{ target = Node,
optimizeLevel = 0,
shrinkLevel = 0,
inputHS = error "Asterius.Main.parseTask: missing inputHS",
outputDirectory = error "Asterius.Main.parseTask: missing outputDirectory",
outputBaseName = error "Asterius.Main.parseTask: missing outputBaseName",
inputEntryMJS = Nothing,
hasMain = True,
bundle = False,
debug = False,
run = False,
verboseErr = False,
yolo = False,
consoleHistory = False,
extraGHCFlags = [],
exportFunctions = [],
extraRootSymbols = [],
gcThreshold = 64
}
|
|
31fe425fd162bffdf419f52147ee4c42891552bd27e92b77f28f7fdc711d9aec | mfoemmel/erlang-otp | snmp_test_mgr.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(snmp_test_mgr).
%%----------------------------------------------------------------------
This module implements a simple SNMP manager for Erlang .
%%----------------------------------------------------------------------
%% c(snmp_test_mgr).
%% snmp_test_mgr:start().
%% snmp_test_mgr:g([[sysContact,0]]).
snmp_test_mgr : start([{engine_id , " mbjk 's engine " } , v3 , { agent , " clip " } , , [ " .. /mibs / SNMPv2 - MIB " ] } ] ) .
snmp_test_mgr : start([{engine_id , " agentEngine " } , { user , " iwl_test " } , , " mgr_conf " } , { sec_level , authPriv } , v3 , { agent , " clip " } ] ) .
%% User interface
-export([start_link/1, start/1, stop/0,
d/0, discovery/0,
g/1, s/1, gn/1, gn/0, r/0, gb/3, rpl/1,
send_bytes/1,
expect/2,expect/3,expect/4,expect/6,get_response/2,
receive_response/0,
purify_oid/1,
oid_to_name/1, name_to_oid/1]).
%% Internal exports
-export([get_oid_from_varbind/1,
var_and_value_to_varbind/2, flatten_oid/2, make_vb/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
-include_lib("snmp/include/snmp_types.hrl").
-include_lib("snmp/include/STANDARD-MIB.hrl").
-record(state,{dbg = true,
quiet,
parent,
timeout = 3500,
print_traps = true,
mini_mib,
packet_server,
last_sent_pdu,
last_received_pdu}).
-define(SERVER, ?MODULE).
-define(PACK_SERV, snmp_test_mgr_misc).
start_link(Options) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, {Options, self()}, []).
start(Options) ->
gen_server:start({local, ?SERVER}, ?MODULE, {Options, self()}, []).
stop() ->
call(stop).
d() ->
discovery().
discovery() ->
call(discovery).
g(Oids) ->
cast({get, Oids}).
VarsAndValues is : { PlainOid , o|s|i , Value } ( unknown ) | { Oid , Value }
s(VarsAndValues) ->
cast({set, VarsAndValues}).
gn(Oids) when is_list(Oids) ->
cast({get_next, Oids});
gn(N) when is_integer(N) ->
cast({iter_get_next, N}).
gn() ->
cast(iter_get_next).
r() ->
cast(resend_pdu).
gb(NonRepeaters, MaxRepetitions, Oids) ->
cast({bulk, {NonRepeaters, MaxRepetitions, Oids}}).
rpl(RespPdu) ->
cast({response, RespPdu}).
send_bytes(Bytes) ->
cast({send_bytes, Bytes}).
purify_oid(Oid) ->
call({purify_oid, Oid}, 5000).
oid_to_name(Oid) ->
call({oid_to_name, Oid}, 5000).
name_to_oid(Name) ->
call({name_to_oid, Name}, 5000).
%%----------------------------------------------------------------------
%% Purpose: For writing test sequences
: Y = any ( varbinds ) | trap | timeout | VarBinds | ErrStatus
%% Returns: ok|{error, Id, Reason}
%%----------------------------------------------------------------------
expect(Id,Y) -> echo_errors(expect_impl(Id,Y)).
expect(Id,v2trap,VBs) -> echo_errors(expect_impl(Id,v2trap,VBs));
expect(Id,report,VBs) -> echo_errors(expect_impl(Id,report,VBs));
expect(Id,{inform, Reply},VBs) ->
echo_errors(expect_impl(Id,{inform,Reply},VBs)).
expect(Id,Err,Idx,VBs) -> echo_errors(expect_impl(Id,Err,Idx,VBs)).
expect(Id,trap, Enterp, Generic, Specific, ExpectedVarbinds) ->
echo_errors(expect_impl(Id,trap,Enterp,Generic,
Specific,ExpectedVarbinds)).
%%-----------------------------------------------------------------
%% Purpose: For writing test sequences
%%-----------------------------------------------------------------
get_response(Id, Vars) -> echo_errors(get_response_impl(Id, Vars)).
%%----------------------------------------------------------------------
%% Receives a response from the agent.
Returns : a PDU or { error , Reason } .
%% It doesn't receive traps though.
%%----------------------------------------------------------------------
receive_response() ->
receive_response(get_timeout()).
receive_response(Timeout) ->
d("await response within ~w ms",[Timeout]),
receive
{snmp_pdu, PDU} when is_record(PDU, pdu) ->
d("received PDU: ~n\t~p",[PDU]),
PDU
after Timeout ->
d("response timeout",[]),
{error, timeout}
end.
get_timeout() ->
case get(receive_response_timeout) of
Int when is_integer(Int) and (Int > 0) ->
Int;
_ ->
get_timeout(os:type())
end.
get_timeout(vxworks) -> 7000;
get_timeout(_) -> 3500.
%%----------------------------------------------------------------------
%% Receives a trap from the agent.
%% Returns: TrapPdu|{error, Reason}
%%----------------------------------------------------------------------
receive_trap(Timeout) ->
d("await trap within ~w ms",[Timeout]),
receive
{snmp_pdu, PDU} when is_record(PDU, trappdu) ->
d("received trap-PDU: ~n\t~p",[PDU]),
PDU
after Timeout ->
d("trap timeout",[]),
{error, timeout}
end.
%%----------------------------------------------------------------------
%% Options: List of
{ agent_udp , UDPPort } , { agent , Agent }
%% Optional:
%% {community, String ("public" is default}, quiet,
{ mibs , List of Filenames } , , ( default 5000 ) } ,
%%----------------------------------------------------------------------
init({Options, CallerPid}) ->
put(sname, mgr),
put(verbosity, debug),
{A1,A2,A3} = erlang:now(),
random:seed(A1,A2,A3),
case (catch is_options_ok(Options)) of
true ->
put(debug, get_value(debug, Options, false)),
d("init -> (~p) extract options",[self()]),
PacksDbg = get_value(packet_server_debug, Options, false),
io:format("[~w] ~p -> PacksDbg: ~p~n", [?MODULE, self(), PacksDbg]),
RecBufSz = get_value(recbuf, Options, 1024),
io:format("[~w] ~p -> RecBufSz: ~p~n", [?MODULE, self(), RecBufSz]),
Mibs = get_value(mibs, Options, []),
io:format("[~w] ~p -> Mibs: ~p~n", [?MODULE, self(), Mibs]),
Udp = get_value(agent_udp, Options, 4000),
io:format("[~w] ~p -> Udp: ~p~n", [?MODULE, self(), Udp]),
User = get_value(user, Options, "initial"),
io:format("[~w] ~p -> User: ~p~n", [?MODULE, self(), User]),
EngineId = get_value(engine_id, Options, "agentEngine"),
io:format("[~w] ~p -> EngineId: ~p~n", [?MODULE, self(), EngineId]),
CtxEngineId = get_value(context_engine_id, Options, EngineId),
io:format("[~w] ~p -> CtxEngineId: ~p~n", [?MODULE, self(), CtxEngineId]),
TrapUdp = get_value(trap_udp, Options, 5000),
io:format("[~w] ~p -> TrapUdp: ~p~n", [?MODULE, self(), TrapUdp]),
Dir = get_value(dir, Options, "."),
io:format("[~w] ~p -> Dir: ~p~n", [?MODULE, self(), Dir]),
SecLevel = get_value(sec_level, Options, noAuthNoPriv),
io:format("[~w] ~p -> SecLevel: ~p~n", [?MODULE, self(), SecLevel]),
MiniMIB = snmp_mini_mib:create(Mibs),
io:format("[~w] ~p -> MiniMIB: ~p~n", [?MODULE, self(), MiniMIB]),
Version = case lists:member(v2, Options) of
true -> 'version-2';
false ->
case lists:member(v3, Options) of
true -> 'version-3';
false -> 'version-1'
end
end,
io:format("[~w] ~p -> Version: ~p~n", [?MODULE, self(), Version]),
Com = case Version of
'version-3' ->
get_value(context, Options, "");
_ ->
get_value(community, Options, "public")
end,
io:format("[~w] ~p -> Com: ~p~n", [?MODULE, self(), Com]),
VsnHdrD =
{Com, User, EngineId, CtxEngineId, mk_seclevel(SecLevel)},
io:format("[~w] ~p -> VsnHdrD: ~p~n", [?MODULE, self(), VsnHdrD]),
AgIp = case snmp_misc:assq(agent, Options) of
{value, Tuple4} when is_tuple(Tuple4) andalso
(size(Tuple4) =:= 4) ->
Tuple4;
{value, Host} when is_list(Host) ->
{ok, Ip} = snmp_misc:ip(Host),
Ip
end,
io:format("[~w] ~p -> AgIp: ~p~n", [?MODULE, self(), AgIp]),
Quiet = lists:member(quiet, Options),
io:format("[~w] ~p -> Quiet: ~p~n", [?MODULE, self(), Quiet]),
PackServ = start_packet_server(Quiet, Options, CallerPid,
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg),
d("init -> packet server: ~p",[PackServ]),
State = #state{parent = CallerPid,
quiet = Quiet,
mini_mib = MiniMIB,
packet_server = PackServ},
d("init -> done",[]),
{ok, State};
{error, Reason} ->
{stop,Reason}
end.
start_packet_server(false, _Options, _CallerPid, AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz, PacksDbg) ->
d("start_packet_server -> entry", []),
?PACK_SERV:start_link_packet({msg, self()},
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg);
start_packet_server(true, Options, CallerPid, AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz, PacksDbg) ->
Type = get_value(receive_type, Options, pdu),
d("start_packet_server -> entry with"
"~n CallerPid: ~p"
"~n when"
"~n Type: ~p",[CallerPid, Type]),
?PACK_SERV:start_link_packet({Type, CallerPid},
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg).
is_options_ok([{mibs,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([quiet|Opts]) ->
is_options_ok(Opts);
is_options_ok([{agent,_}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{agent_udp,Int}|Opts]) when is_integer(Int) ->
is_options_ok(Opts);
is_options_ok([{trap_udp,Int}|Opts]) when is_integer(Int) ->
is_options_ok(Opts);
is_options_ok([{community,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{dir,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{sec_level,noAuthNoPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{sec_level,authNoPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{sec_level,authPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{context,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{user,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{engine_id,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{context_engine_id,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([v1|Opts]) ->
is_options_ok(Opts);
is_options_ok([v2|Opts]) ->
is_options_ok(Opts);
is_options_ok([v3|Opts]) ->
is_options_ok(Opts);
is_options_ok([{debug,Bool}|Opts]) ->
case is_bool(Bool) of
ok ->
is_options_ok(Opts);
error ->
{error, {bad_option, debug, Bool}}
end;
is_options_ok([{packet_server_debug,Bool}|Opts]) ->
case is_bool(Bool) of
ok ->
is_options_ok(Opts);
error ->
{error, {bad_option, packet_server_debug, Bool}}
end;
is_options_ok([{recbuf,Sz}|Opts]) when (0 < Sz) and (Sz =< 65535) ->
is_options_ok(Opts);
is_options_ok([InvOpt|_]) ->
{error,{invalid_option,InvOpt}};
is_options_ok([]) -> true.
is_bool(true) -> ok;
is_bool(false) -> ok;
is_bool(_) -> error.
mk_seclevel(noAuthNoPriv) -> 0;
mk_seclevel(authNoPriv) -> 1;
mk_seclevel(authPriv) -> 3.
handle_call({purify_oid, Oid}, _From, #state{mini_mib = MiniMib} = State) ->
d("handle_call -> purify_oid for ~p",[Oid]),
Reply = (catch purify_oid(Oid, MiniMib)),
{reply, Reply, State};
handle_call({find_pure_oid, XOid}, _From, State) ->
d("handle_call -> find_pure_oid for ~p",[XOid]),
{reply, catch flatten_oid(XOid, State#state.mini_mib), State};
handle_call({oid_to_name, Oid}, _From, State) ->
d("handle_call -> oid_to_name for Oid: ~p",[Oid]),
Reply =
case lists:keysearch(Oid, 1, State#state.mini_mib) of
{value, {_Oid, Name, _Type}} ->
{ok, Name};
false ->
{error, {no_such_oid, Oid}}
end,
{reply, Reply, State};
handle_call({name_to_oid, Name}, _From, State) ->
d("handle_call -> name_to_oid for Name: ~p",[Name]),
Reply =
case lists:keysearch(Name, 2, State#state.mini_mib) of
{value, {Oid, _Name, _Type}} ->
{ok, Oid};
false ->
{error, {no_such_name, Name}}
end,
{reply, Reply, State};
handle_call(stop, _From, #state{mini_mib = MiniMIB} = State) ->
d("handle_call -> stop request",[]),
snmp_mini_mib:delete(MiniMIB),
{stop, normal, ok, State#state{mini_mib = undefined}};
handle_call(discovery, _From, State) ->
d("handle_call -> discovery",[]),
{Reply, NewState} = execute_discovery(State),
{reply, Reply, NewState}.
handle_cast({get, Oids}, State) ->
d("handle_cast -> get request for ~p", [Oids]),
{noreply, execute_request(get, Oids, State)};
handle_cast({set, VariablesAndValues}, State) ->
d("handle_cast -> set request for ~p", [VariablesAndValues]),
{noreply, execute_request(set, VariablesAndValues, State)};
handle_cast({get_next, Oids}, State) ->
d("handle_cast -> get-next request for ~p", [Oids]),
{noreply, execute_request(get_next, Oids, State)};
handle_cast(iter_get_next, State)
when is_record(State#state.last_received_pdu, pdu) ->
d("handle_cast -> iter_get_next request", []),
PrevPDU = State#state.last_received_pdu,
Oids = [get_oid_from_varbind(Vb) || Vb <- PrevPDU#pdu.varbinds],
{noreply, execute_request(get_next, Oids, State)};
handle_cast(iter_get_next, State) ->
?PACK_SERV:error("[Iterated get-next] No Response PDU to "
"start iterating from.", []),
{noreply, State};
handle_cast({iter_get_next, N}, State) ->
d("handle_cast -> iter_get_next(~p) request",[N]),
if
is_record(State#state.last_received_pdu, pdu) ->
PDU = get_next_iter_impl(N, State#state.last_received_pdu,
State#state.mini_mib,
State#state.packet_server),
{noreply, State#state{last_received_pdu = PDU}};
true ->
?PACK_SERV:error("[Iterated get-next] No Response PDU to "
"start iterating from.", []),
{noreply, State}
end;
handle_cast(resend_pdu, #state{last_sent_pdu = PDU} = State) ->
d("handle_cast -> resend_pdu request when"
"~n PDU = ~p", [PDU]),
send_pdu(PDU#pdu{request_id = make_request_id()},
State#state.mini_mib,
State#state.packet_server),
{noreply, State};
handle_cast({bulk, Args}, State) ->
d("handle_bulk -> bulk request for ~p", [Args]),
{noreply, execute_request(bulk, Args, State)};
handle_cast({response, RespPdu}, State) ->
d("handle_cast -> response request with ~p", [RespPdu]),
?PACK_SERV:send_pdu(RespPdu, State#state.packet_server),
{noreply, State};
handle_cast({send_bytes, Bytes}, State) ->
d("handle_cast -> send-bytes request for ~p bytes", [sizeOf(Bytes)]),
?PACK_SERV:send_bytes(Bytes, State#state.packet_server),
{noreply, State};
handle_cast(Msg, State) ->
d("handle_cast -> unknown message: "
"~n ~p", [Msg]),
{noreply, State}.
handle_info({snmp_msg, Msg, Ip, Udp}, State) ->
io:format("* Got PDU: ~s", [?PACK_SERV:format_hdr(Msg)]),
PDU = ?PACK_SERV:get_pdu(Msg),
echo_pdu(PDU, State#state.mini_mib),
case PDU#pdu.type of
'inform-request' ->
%% Generate a response...
RespPDU = PDU#pdu{type = 'get-response',
error_status = noError,
error_index = 0},
RespMsg = ?PACK_SERV:set_pdu(Msg, RespPDU),
?PACK_SERV:send_msg(RespMsg, State#state.packet_server, Ip, Udp);
_Else ->
ok
end,
{noreply, State#state{last_received_pdu = PDU}};
handle_info(Info, State) ->
d("handle_info -> unknown info: "
"~n ~p", [Info]),
{noreply, State}.
terminate(Reason, State) ->
d("terminate -> with Reason: ~n\t~p",[Reason]),
?PACK_SERV:stop(State#state.packet_server).
%%----------------------------------------------------------------------
Returns : A new State
%%----------------------------------------------------------------------
execute_discovery(State) ->
Pdu = make_discovery_pdu(),
Reply = ?PACK_SERV:send_discovery_pdu(Pdu, State#state.packet_server),
{Reply, State#state{last_sent_pdu = Pdu}}.
execute_request(Operation, Data, State) ->
case (catch make_pdu(Operation, Data, State#state.mini_mib)) of
{error, {Format, Data2}} ->
report_error(State, Format, Data2),
State;
{error, _Reason} ->
State;
PDU when is_record(PDU, pdu) ->
send_pdu(PDU, State#state.mini_mib, State#state.packet_server),
State#state{last_sent_pdu = PDU}
end.
report_error(#state{quiet = true, parent = Pid}, Format, Args) ->
Reason = lists:flatten(io_lib:format(Format, Args)),
Pid ! {oid_error, Reason};
report_error(_, Format, Args) ->
?PACK_SERV:error(Format, Args).
get_oid_from_varbind(#varbind{oid = Oid}) -> Oid.
send_pdu(PDU, _MiniMIB, PackServ) ->
?PACK_SERV:send_pdu(PDU, PackServ).
%%----------------------------------------------------------------------
Purpose : Unnesting of oids like [ myTable , 3 , 4 , " hej " , 45 ] to
%% [1,2,3,3,4,104,101,106,45]
%%----------------------------------------------------------------------
purify_oid([A|T], MiniMib) when is_atom(A) ->
Oid2 =
case snmp_mini_mib:oid(MiniMib, A) of
false ->
throw({error, {unknown_aliasname, A}});
Oid ->
lists:flatten([Oid|T])
end,
{ok, verify_pure_oid(Oid2)};
purify_oid(L, _) when is_list(L) ->
{ok, verify_pure_oid(lists:flatten(L))};
purify_oid(X, _) ->
{error, {invalid_oid, X}}.
verify_pure_oid([]) ->
[];
verify_pure_oid([H | T]) when is_integer(H) and (H >= 0) ->
[H | verify_pure_oid(T)];
verify_pure_oid([H | _]) ->
throw({error, {not_pure_oid, H}}).
flatten_oid(XOid, DB) ->
Oid = case XOid of
[A|T] when is_atom(A) ->
[remove_atom(A, DB)|T];
L when is_list(L) ->
XOid;
Shit ->
throw({error,
{"Invalid oid, not a list of integers: ~w", [Shit]}})
end,
check_is_pure_oid(lists:flatten(Oid)).
remove_atom(AliasName, DB) when is_atom(AliasName) ->
case snmp_mini_mib:oid(DB, AliasName) of
false ->
throw({error, {"Unknown aliasname in oid: ~w", [AliasName]}});
Oid ->
Oid
end;
remove_atom(X, _DB) ->
X.
%%----------------------------------------------------------------------
%% Throws if not a list of integers
%%----------------------------------------------------------------------
check_is_pure_oid([]) -> [];
check_is_pure_oid([X | T]) when is_integer(X) and (X >= 0) ->
[X | check_is_pure_oid(T)];
check_is_pure_oid([X | _T]) ->
throw({error, {"Invalid oid, it contains a non-integer: ~w", [X]}}).
get_next_iter_impl(0, PrevPDU, _MiniMIB, _PackServ) ->
PrevPDU;
get_next_iter_impl(N, PrevPDU, MiniMIB, PackServ) ->
Oids = [get_oid_from_varbind(Vb) || Vb <- PrevPDU#pdu.varbinds],
PDU = make_pdu(get_next, Oids, MiniMIB),
send_pdu(PDU, MiniMIB, PackServ),
case receive_response() of
{error, timeout} ->
io:format("(timeout)~n"),
get_next_iter_impl(N, PrevPDU, MiniMIB, PackServ);
{error, _Reason} ->
PrevPDU;
RPDU when is_record(RPDU, pdu) ->
io:format("(~w)", [N]),
echo_pdu(RPDU, MiniMIB),
get_next_iter_impl(N-1, RPDU, MiniMIB, PackServ)
end.
%%--------------------------------------------------
Used to resend a PDU . Takes the old PDU and
%% generates a fresh one (with a new requestID).
%%--------------------------------------------------
make_pdu(set, VarsAndValues, MiniMIB) ->
VBs = [var_and_value_to_varbind(VAV, MiniMIB) || VAV <- VarsAndValues],
make_pdu_impl(set, VBs);
make_pdu(bulk, {NonRepeaters, MaxRepetitions, Oids}, MiniMIB) ->
Foids = [flatten_oid(Oid, MiniMIB) || Oid <- Oids],
#pdu{type = 'get-bulk-request',
request_id = make_request_id(),
error_status = NonRepeaters,
error_index = MaxRepetitions,
varbinds = [make_vb(Oid) || Oid <- Foids]};
make_pdu(Operation, Oids, MiniMIB) ->
Foids = [flatten_oid(Oid, MiniMIB) || Oid <- Oids],
make_pdu_impl(Operation, Foids).
make_pdu_impl(get, Oids) ->
#pdu{type = 'get-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = [make_vb(Oid) || Oid <- Oids]};
make_pdu_impl(get_next, Oids) ->
#pdu{type = 'get-next-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = [make_vb(Oid) || Oid <- Oids]};
make_pdu_impl(set, Varbinds) ->
#pdu{type = 'set-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = Varbinds}.
make_discovery_pdu() ->
make_pdu_impl(get, []).
var_and_value_to_varbind({Oid, Type, Value}, MiniMIB) ->
Oid2 = flatten_oid(Oid, MiniMIB),
#varbind{oid = Oid2,
variabletype = char_to_type(Type),
value = Value};
var_and_value_to_varbind({XOid, Value}, MiniMIB) ->
Oid = flatten_oid(XOid, MiniMIB),
#varbind{oid = Oid,
variabletype = snmp_mini_mib:type(MiniMIB, Oid),
value = Value}.
char_to_type(o) ->
'OBJECT IDENTIFIER';
char_to_type(i) ->
'INTEGER';
char_to_type(u) ->
'Unsigned32';
Gauge , Gauge32
'Unsigned32';
char_to_type(s) ->
'OCTET STRING'.
make_vb(Oid) ->
#varbind{oid = Oid, variabletype = 'NULL', value = 'NULL'}.
make_request_id() ->
random:uniform(16#FFFFFFF-1).
echo_pdu(PDU, MiniMIB) ->
io:format("~s", [snmp_misc:format_pdu(PDU, MiniMIB)]).
%%----------------------------------------------------------------------
%% Test Sequence
%%----------------------------------------------------------------------
echo_errors({error, Id, {ExpectedFormat, ExpectedData}, {Format, Data}})->
io:format("* Unexpected Behaviour * Id: ~w.~n"
" Expected: " ++ ExpectedFormat ++ "~n"
" Got: " ++ Format ++ "~n",
[Id] ++ ExpectedData ++ Data),
{error, Id, {ExpectedFormat, ExpectedData}, {Format, Data}};
echo_errors(ok) -> ok;
echo_errors({ok, Val}) -> {ok, Val}.
get_response_impl(Id, Vars) ->
case receive_response() of
#pdu{type = 'get-response',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
match_vars(Id, find_pure_oids2(Vars), VBs, []);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
{error,
Id,
{"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w",
[Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
%%----------------------------------------------------------------------
Returns : ok | { error , I d , { ExpectedFormat , ExpectedData } , { Format , Data } }
%%----------------------------------------------------------------------
expect_impl(Id, any) ->
io:format("expect_impl(~w, any) -> entry ~n", [Id]),
case receive_response() of
PDU when is_record(PDU, pdu) -> ok;
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, return) ->
io:format("expect_impl(~w, return) -> entry ~n", [Id]),
case receive_response() of
PDU when is_record(PDU, pdu) -> {ok, PDU};
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, trap) ->
io:format("expect_impl(~w, trap) -> entry ~n", [Id]),
case receive_trap(3500) of
PDU when is_record(PDU, trappdu) -> ok;
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, timeout) ->
io:format("expect_impl(~w, timeout) -> entry ~n", [Id]),
receive
X ->
io:format("expect_impl(~w, timeout) -> "
"received unexpected message: ~n~p~n", [Id, X]),
{error, Id, {"Timeout", []}, {"Message ~w", [X]}}
after 3500 ->
ok
end;
expect_impl(Id, Err) when is_atom(Err) ->
io:format("expect_impl(~w, ~w) -> entry ~n", [Id, Err]),
case receive_response() of
#pdu{error_status = Err} ->
ok;
#pdu{request_id = ReqId,
error_status = OtherErr} ->
io:format("expect_impl(~w, ~w) -> "
"received pdu (~w) with unexpected error-status: "
"~n~p~n", [Id, Err, ReqId, OtherErr]),
{error, Id, {"ErrorStatus: ~w, RequestId: ~w", [Err,ReqId]},
{"ErrorStatus: ~w", [OtherErr]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'get-response',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
expect_impl(Id, v2trap, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, v2trap) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'snmpv2-trap',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, v2trap) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, v2trap) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['snmpv2-trap', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, report, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, report) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'report',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, report) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, report) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
[report, noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, {inform, Reply}, ExpectedVarbinds)
when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, inform) -> entry with"
"~n Reply: ~p"
"~n ExpectedVarbinds: ~p"
"~n", [Id, Reply, ExpectedVarbinds]),
Resp = receive_response(),
case Resp of
#pdu{type = 'inform-request',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, inform) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
case check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs) of
ok when (Reply == true) ->
io:format("expect_impl(~w, inform) -> send ok response"
"~n", [Id]),
RespPDU = Resp#pdu{type = 'get-response',
error_status = noError,
error_index = 0},
?MODULE:rpl(RespPDU),
ok;
ok when (element(1, Reply) == error) ->
io:format("expect_impl(~w, inform) -> send error response"
"~n", [Id]),
{error, Status, Index} = Reply,
RespPDU = Resp#pdu{type = 'get-response',
error_status = Status,
error_index = Index},
?MODULE:rpl(RespPDU),
ok;
ok when (Reply == false) ->
io:format("expect_impl(~w, inform) -> no response sent"
"~n", [Id]),
ok;
Else ->
io:format("expect_impl(~w, inform) -> "
"~n Else: ~p"
"~n", [Id, Else]),
Else
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, inform) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['inform-request', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
io:format("expect_impl(~w, inform) -> receive failed"
"~n Reason: ~p"
"~n", [Id, Reason]),
format_reason(Id, Reason)
end.
expect_impl(Id, Err, Index, any) ->
io:format("expect_impl(~w, any) -> entry with"
"~n Err: ~p"
"~n Index: ~p"
"~n", [Id, Err, Index]),
case receive_response() of
#pdu{type = 'get-response',
error_status = Err,
error_index = Index} ->
io:format("expect_impl(~w, any) -> received expected pdu"
"~n", [Id]),
ok;
#pdu{type = 'get-response', error_status = Err} when (Index == any) ->
io:format("expect_impl(~w, any) -> received expected pdu (any)"
"~n", [Id]),
ok;
#pdu{type = 'get-response',
request_id = ReqId,
error_status = Err,
error_index = Idx} when is_list(Index) ->
io:format("expect_impl(~w, any) -> received pdu: "
"~n ReqId: ~p"
"~n Err: ~p"
"~n Idx: ~p"
"~n", [Id, ReqId, Err, Idx]),
case lists:member(Idx, Index) of
true ->
ok;
false ->
{error, Id, {"ErrStat: ~w, Idx: ~w, RequestId: ~w",
[Err, Index, ReqId]},
{"ErrStat: ~w, Idx: ~w", [Err, Idx]}}
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, any) -> received unexpected pdu: "
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', Err, Index, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, Err, Index, ExpectedVarbinds) ->
io:format("expect_impl(~w) -> entry with"
"~n Err: ~p"
"~n Index: ~p"
"~n ExpectedVarbinds: ~p"
"~n", [Id, Err, Index, ExpectedVarbinds]),
PureVBs = find_pure_oids(ExpectedVarbinds),
case receive_response() of
#pdu{type = 'get-response',
error_status = Err,
error_index = Index,
varbinds = VBs} ->
check_vars(Id, PureVBs, VBs);
#pdu{type = 'get-response',
error_status = Err,
varbinds = VBs} when (Index == any) ->
check_vars(Id, PureVBs, VBs);
#pdu{type = 'get-response',
request_id = ReqId,
error_status = Err,
error_index = Idx,
varbinds = VBs} when is_list(Index) ->
case lists:member(Idx, Index) of
true ->
check_vars(Id, PureVBs, VBs);
false ->
{error,Id,
{"ErrStat: ~w, Idx: ~w, Varbinds: ~w, RequestId: ~w",
[Err,Index,PureVBs,ReqId]},
{"ErrStat: ~w, Idx: ~w, Varbinds: ~w",
[Err,Idx,VBs]}}
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2,
varbinds = VBs} ->
{error,Id,
{"Type: ~w, ErrStat: ~w, Idx: ~w, Varbinds: ~w, RequestId: ~w",
['get-response',Err,Index,PureVBs,ReqId]},
{"Type: ~w, ErrStat: ~w Idx: ~w Varbinds: ~w",
[Type2,Err2,Index2,VBs]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
expect_impl(Id, trap, Enterp, Generic, Specific, ExpectedVarbinds) ->
PureE = find_pure_oid(Enterp),
case receive_trap(3500) of
#trappdu{enterprise = PureE,
generic_trap = Generic,
specific_trap = Specific,
varbinds = VBs} ->
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#trappdu{enterprise = Ent2,
generic_trap = G2,
specific_trap = Spec2,
varbinds = VBs} ->
{error, Id,
{"Enterprise: ~w, Generic: ~w, Specific: ~w, Varbinds: ~w",
[PureE, Generic, Specific, ExpectedVarbinds]},
{"Enterprise: ~w, Generic: ~w, Specific: ~w, Varbinds: ~w",
[Ent2, G2, Spec2, VBs]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
format_reason(Id, Reason) ->
{error, Id, {"?", []}, {"~w", [Reason]}}.
%%----------------------------------------------------------------------
: I d , ExpectedVarbinds ,
%% Returns: ok
%% Fails: if not ok
%%----------------------------------------------------------------------
check_vars(_Id,[], []) ->
ok;
check_vars(Id,Vars, []) ->
{error, Id, {"More Varbinds (~w)", [Vars]}, {"Too few", []}};
check_vars(Id,[], Varbinds) ->
{error,Id, {"Fewer Varbinds", []}, {"Too many (~w)", [Varbinds]}};
check_vars(Id,[{_XOid, any} | Vars], [#varbind{oid = _Oid} |Vbs]) ->
check_vars(Id,Vars, Vbs);
check_vars(Id,[{Oid, Val} | Vars], [#varbind{oid = Oid, value = Val} |Vbs]) ->
check_vars(Id,Vars, Vbs);
check_vars(Id,[{Oid, Val} | _], [#varbind{oid = Oid, value = Val2} |_]) ->
{error, Id, {" Varbind: ~w = ~w", [Oid, Val]}, {"Value: ~w", [Val2]}};
check_vars(Id,[{Oid, _Val} | _], [#varbind{oid = Oid2, value = _Val2} |_]) ->
{error, Id, {"Oid: ~w", [Oid]}, {"Oid: ~w", [Oid2]}}.
match_vars(Id, [Oid|T], [#varbind{oid = Oid, value = Value} | Vbs], Res) ->
match_vars(Id, T, Vbs, [Value | Res]);
match_vars(_Id, [], [], Res) ->
{ok, lists:reverse(Res)};
match_vars(Id, [Oid | _], [#varbind{oid = Oid2}], _Res) ->
{error, Id, {" Oid: ~w", [Oid]}, {"Oid2: ~w", [Oid2]}};
match_vars(Id, Vars, [], _Res) ->
{error, Id, {"More Varbinds (~w)", [Vars]}, {"Too few", []}};
match_vars(Id, [], Varbinds, _Res) ->
{error,Id, {"Fewer Varbinds", []}, {"Too many (~w)", [Varbinds]}}.
find_pure_oids([]) -> [];
find_pure_oids([{XOid, Q}|T]) ->
[{find_pure_oid(XOid), Q} | find_pure_oids(T)].
find_pure_oids2([]) -> [];
find_pure_oids2([XOid|T]) ->
[find_pure_oid(XOid) | find_pure_oids2(T)].
%%----------------------------------------------------------------------
%% Returns: Oid
%% Fails: malformed oids
%%----------------------------------------------------------------------
find_pure_oid(XOid) ->
case gen_server:call(?MODULE, {find_pure_oid, XOid}, infinity) of
{error, {Format, Data}} ->
ok = io:format(Format, Data),
exit(malformed_oid);
Oid when is_list(Oid) -> Oid
end.
get_value(Opt, Opts, Default) ->
case snmp_misc:assq(Opt,Opts) of
{value, C} -> C;
false -> Default
end.
%%----------------------------------------------------------------------
call(Req) ->
call(Req, infinity).
call(Req, To) ->
gen_server:call(?SERVER, Req, To).
cast(Msg) ->
gen_server:cast(?SERVER, Msg).
%%----------------------------------------------------------------------
%% Debug
%%----------------------------------------------------------------------
sizeOf(L) when is_list(L) ->
length(lists:flatten(L));
sizeOf(B) when is_binary(B) ->
size(B).
d(F,A) -> d(get(debug),F,A).
d(true,F,A) ->
io:format("*** [~s] MGR_DBG *** " ++ F ++ "~n",
[format_timestamp(now())|A]);
d(_,_F,_A) ->
ok.
format_timestamp({_N1, _N2, N3} = Now) ->
{Date, Time} = calendar:now_to_datetime(Now),
{YYYY,MM,DD} = Date,
{Hour,Min,Sec} = Time,
FormatDate =
io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
[YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
lists:flatten(FormatDate).
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/snmp/test/snmp_test_mgr.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
----------------------------------------------------------------------
----------------------------------------------------------------------
c(snmp_test_mgr).
snmp_test_mgr:start().
snmp_test_mgr:g([[sysContact,0]]).
User interface
Internal exports
----------------------------------------------------------------------
Purpose: For writing test sequences
Returns: ok|{error, Id, Reason}
----------------------------------------------------------------------
-----------------------------------------------------------------
Purpose: For writing test sequences
-----------------------------------------------------------------
----------------------------------------------------------------------
Receives a response from the agent.
It doesn't receive traps though.
----------------------------------------------------------------------
----------------------------------------------------------------------
Receives a trap from the agent.
Returns: TrapPdu|{error, Reason}
----------------------------------------------------------------------
----------------------------------------------------------------------
Options: List of
Optional:
{community, String ("public" is default}, quiet,
----------------------------------------------------------------------
Generate a response...
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
[1,2,3,3,4,104,101,106,45]
----------------------------------------------------------------------
----------------------------------------------------------------------
Throws if not a list of integers
----------------------------------------------------------------------
--------------------------------------------------
generates a fresh one (with a new requestID).
--------------------------------------------------
----------------------------------------------------------------------
Test Sequence
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
Returns: ok
Fails: if not ok
----------------------------------------------------------------------
----------------------------------------------------------------------
Returns: Oid
Fails: malformed oids
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
Debug
---------------------------------------------------------------------- | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(snmp_test_mgr).
This module implements a simple SNMP manager for Erlang .
snmp_test_mgr : start([{engine_id , " mbjk 's engine " } , v3 , { agent , " clip " } , , [ " .. /mibs / SNMPv2 - MIB " ] } ] ) .
snmp_test_mgr : start([{engine_id , " agentEngine " } , { user , " iwl_test " } , , " mgr_conf " } , { sec_level , authPriv } , v3 , { agent , " clip " } ] ) .
-export([start_link/1, start/1, stop/0,
d/0, discovery/0,
g/1, s/1, gn/1, gn/0, r/0, gb/3, rpl/1,
send_bytes/1,
expect/2,expect/3,expect/4,expect/6,get_response/2,
receive_response/0,
purify_oid/1,
oid_to_name/1, name_to_oid/1]).
-export([get_oid_from_varbind/1,
var_and_value_to_varbind/2, flatten_oid/2, make_vb/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
-include_lib("snmp/include/snmp_types.hrl").
-include_lib("snmp/include/STANDARD-MIB.hrl").
-record(state,{dbg = true,
quiet,
parent,
timeout = 3500,
print_traps = true,
mini_mib,
packet_server,
last_sent_pdu,
last_received_pdu}).
-define(SERVER, ?MODULE).
-define(PACK_SERV, snmp_test_mgr_misc).
start_link(Options) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, {Options, self()}, []).
start(Options) ->
gen_server:start({local, ?SERVER}, ?MODULE, {Options, self()}, []).
stop() ->
call(stop).
d() ->
discovery().
discovery() ->
call(discovery).
g(Oids) ->
cast({get, Oids}).
VarsAndValues is : { PlainOid , o|s|i , Value } ( unknown ) | { Oid , Value }
s(VarsAndValues) ->
cast({set, VarsAndValues}).
gn(Oids) when is_list(Oids) ->
cast({get_next, Oids});
gn(N) when is_integer(N) ->
cast({iter_get_next, N}).
gn() ->
cast(iter_get_next).
r() ->
cast(resend_pdu).
gb(NonRepeaters, MaxRepetitions, Oids) ->
cast({bulk, {NonRepeaters, MaxRepetitions, Oids}}).
rpl(RespPdu) ->
cast({response, RespPdu}).
send_bytes(Bytes) ->
cast({send_bytes, Bytes}).
purify_oid(Oid) ->
call({purify_oid, Oid}, 5000).
oid_to_name(Oid) ->
call({oid_to_name, Oid}, 5000).
name_to_oid(Name) ->
call({name_to_oid, Name}, 5000).
: Y = any ( varbinds ) | trap | timeout | VarBinds | ErrStatus
expect(Id,Y) -> echo_errors(expect_impl(Id,Y)).
expect(Id,v2trap,VBs) -> echo_errors(expect_impl(Id,v2trap,VBs));
expect(Id,report,VBs) -> echo_errors(expect_impl(Id,report,VBs));
expect(Id,{inform, Reply},VBs) ->
echo_errors(expect_impl(Id,{inform,Reply},VBs)).
expect(Id,Err,Idx,VBs) -> echo_errors(expect_impl(Id,Err,Idx,VBs)).
expect(Id,trap, Enterp, Generic, Specific, ExpectedVarbinds) ->
echo_errors(expect_impl(Id,trap,Enterp,Generic,
Specific,ExpectedVarbinds)).
get_response(Id, Vars) -> echo_errors(get_response_impl(Id, Vars)).
Returns : a PDU or { error , Reason } .
receive_response() ->
receive_response(get_timeout()).
receive_response(Timeout) ->
d("await response within ~w ms",[Timeout]),
receive
{snmp_pdu, PDU} when is_record(PDU, pdu) ->
d("received PDU: ~n\t~p",[PDU]),
PDU
after Timeout ->
d("response timeout",[]),
{error, timeout}
end.
get_timeout() ->
case get(receive_response_timeout) of
Int when is_integer(Int) and (Int > 0) ->
Int;
_ ->
get_timeout(os:type())
end.
get_timeout(vxworks) -> 7000;
get_timeout(_) -> 3500.
receive_trap(Timeout) ->
d("await trap within ~w ms",[Timeout]),
receive
{snmp_pdu, PDU} when is_record(PDU, trappdu) ->
d("received trap-PDU: ~n\t~p",[PDU]),
PDU
after Timeout ->
d("trap timeout",[]),
{error, timeout}
end.
{ agent_udp , UDPPort } , { agent , Agent }
{ mibs , List of Filenames } , , ( default 5000 ) } ,
init({Options, CallerPid}) ->
put(sname, mgr),
put(verbosity, debug),
{A1,A2,A3} = erlang:now(),
random:seed(A1,A2,A3),
case (catch is_options_ok(Options)) of
true ->
put(debug, get_value(debug, Options, false)),
d("init -> (~p) extract options",[self()]),
PacksDbg = get_value(packet_server_debug, Options, false),
io:format("[~w] ~p -> PacksDbg: ~p~n", [?MODULE, self(), PacksDbg]),
RecBufSz = get_value(recbuf, Options, 1024),
io:format("[~w] ~p -> RecBufSz: ~p~n", [?MODULE, self(), RecBufSz]),
Mibs = get_value(mibs, Options, []),
io:format("[~w] ~p -> Mibs: ~p~n", [?MODULE, self(), Mibs]),
Udp = get_value(agent_udp, Options, 4000),
io:format("[~w] ~p -> Udp: ~p~n", [?MODULE, self(), Udp]),
User = get_value(user, Options, "initial"),
io:format("[~w] ~p -> User: ~p~n", [?MODULE, self(), User]),
EngineId = get_value(engine_id, Options, "agentEngine"),
io:format("[~w] ~p -> EngineId: ~p~n", [?MODULE, self(), EngineId]),
CtxEngineId = get_value(context_engine_id, Options, EngineId),
io:format("[~w] ~p -> CtxEngineId: ~p~n", [?MODULE, self(), CtxEngineId]),
TrapUdp = get_value(trap_udp, Options, 5000),
io:format("[~w] ~p -> TrapUdp: ~p~n", [?MODULE, self(), TrapUdp]),
Dir = get_value(dir, Options, "."),
io:format("[~w] ~p -> Dir: ~p~n", [?MODULE, self(), Dir]),
SecLevel = get_value(sec_level, Options, noAuthNoPriv),
io:format("[~w] ~p -> SecLevel: ~p~n", [?MODULE, self(), SecLevel]),
MiniMIB = snmp_mini_mib:create(Mibs),
io:format("[~w] ~p -> MiniMIB: ~p~n", [?MODULE, self(), MiniMIB]),
Version = case lists:member(v2, Options) of
true -> 'version-2';
false ->
case lists:member(v3, Options) of
true -> 'version-3';
false -> 'version-1'
end
end,
io:format("[~w] ~p -> Version: ~p~n", [?MODULE, self(), Version]),
Com = case Version of
'version-3' ->
get_value(context, Options, "");
_ ->
get_value(community, Options, "public")
end,
io:format("[~w] ~p -> Com: ~p~n", [?MODULE, self(), Com]),
VsnHdrD =
{Com, User, EngineId, CtxEngineId, mk_seclevel(SecLevel)},
io:format("[~w] ~p -> VsnHdrD: ~p~n", [?MODULE, self(), VsnHdrD]),
AgIp = case snmp_misc:assq(agent, Options) of
{value, Tuple4} when is_tuple(Tuple4) andalso
(size(Tuple4) =:= 4) ->
Tuple4;
{value, Host} when is_list(Host) ->
{ok, Ip} = snmp_misc:ip(Host),
Ip
end,
io:format("[~w] ~p -> AgIp: ~p~n", [?MODULE, self(), AgIp]),
Quiet = lists:member(quiet, Options),
io:format("[~w] ~p -> Quiet: ~p~n", [?MODULE, self(), Quiet]),
PackServ = start_packet_server(Quiet, Options, CallerPid,
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg),
d("init -> packet server: ~p",[PackServ]),
State = #state{parent = CallerPid,
quiet = Quiet,
mini_mib = MiniMIB,
packet_server = PackServ},
d("init -> done",[]),
{ok, State};
{error, Reason} ->
{stop,Reason}
end.
start_packet_server(false, _Options, _CallerPid, AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz, PacksDbg) ->
d("start_packet_server -> entry", []),
?PACK_SERV:start_link_packet({msg, self()},
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg);
start_packet_server(true, Options, CallerPid, AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz, PacksDbg) ->
Type = get_value(receive_type, Options, pdu),
d("start_packet_server -> entry with"
"~n CallerPid: ~p"
"~n when"
"~n Type: ~p",[CallerPid, Type]),
?PACK_SERV:start_link_packet({Type, CallerPid},
AgIp, Udp, TrapUdp,
VsnHdrD, Version, Dir, RecBufSz,
PacksDbg).
is_options_ok([{mibs,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([quiet|Opts]) ->
is_options_ok(Opts);
is_options_ok([{agent,_}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{agent_udp,Int}|Opts]) when is_integer(Int) ->
is_options_ok(Opts);
is_options_ok([{trap_udp,Int}|Opts]) when is_integer(Int) ->
is_options_ok(Opts);
is_options_ok([{community,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{dir,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{sec_level,noAuthNoPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{sec_level,authNoPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{sec_level,authPriv}|Opts]) ->
is_options_ok(Opts);
is_options_ok([{context,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{user,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{engine_id,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([{context_engine_id,List}|Opts]) when is_list(List) ->
is_options_ok(Opts);
is_options_ok([v1|Opts]) ->
is_options_ok(Opts);
is_options_ok([v2|Opts]) ->
is_options_ok(Opts);
is_options_ok([v3|Opts]) ->
is_options_ok(Opts);
is_options_ok([{debug,Bool}|Opts]) ->
case is_bool(Bool) of
ok ->
is_options_ok(Opts);
error ->
{error, {bad_option, debug, Bool}}
end;
is_options_ok([{packet_server_debug,Bool}|Opts]) ->
case is_bool(Bool) of
ok ->
is_options_ok(Opts);
error ->
{error, {bad_option, packet_server_debug, Bool}}
end;
is_options_ok([{recbuf,Sz}|Opts]) when (0 < Sz) and (Sz =< 65535) ->
is_options_ok(Opts);
is_options_ok([InvOpt|_]) ->
{error,{invalid_option,InvOpt}};
is_options_ok([]) -> true.
is_bool(true) -> ok;
is_bool(false) -> ok;
is_bool(_) -> error.
mk_seclevel(noAuthNoPriv) -> 0;
mk_seclevel(authNoPriv) -> 1;
mk_seclevel(authPriv) -> 3.
handle_call({purify_oid, Oid}, _From, #state{mini_mib = MiniMib} = State) ->
d("handle_call -> purify_oid for ~p",[Oid]),
Reply = (catch purify_oid(Oid, MiniMib)),
{reply, Reply, State};
handle_call({find_pure_oid, XOid}, _From, State) ->
d("handle_call -> find_pure_oid for ~p",[XOid]),
{reply, catch flatten_oid(XOid, State#state.mini_mib), State};
handle_call({oid_to_name, Oid}, _From, State) ->
d("handle_call -> oid_to_name for Oid: ~p",[Oid]),
Reply =
case lists:keysearch(Oid, 1, State#state.mini_mib) of
{value, {_Oid, Name, _Type}} ->
{ok, Name};
false ->
{error, {no_such_oid, Oid}}
end,
{reply, Reply, State};
handle_call({name_to_oid, Name}, _From, State) ->
d("handle_call -> name_to_oid for Name: ~p",[Name]),
Reply =
case lists:keysearch(Name, 2, State#state.mini_mib) of
{value, {Oid, _Name, _Type}} ->
{ok, Oid};
false ->
{error, {no_such_name, Name}}
end,
{reply, Reply, State};
handle_call(stop, _From, #state{mini_mib = MiniMIB} = State) ->
d("handle_call -> stop request",[]),
snmp_mini_mib:delete(MiniMIB),
{stop, normal, ok, State#state{mini_mib = undefined}};
handle_call(discovery, _From, State) ->
d("handle_call -> discovery",[]),
{Reply, NewState} = execute_discovery(State),
{reply, Reply, NewState}.
handle_cast({get, Oids}, State) ->
d("handle_cast -> get request for ~p", [Oids]),
{noreply, execute_request(get, Oids, State)};
handle_cast({set, VariablesAndValues}, State) ->
d("handle_cast -> set request for ~p", [VariablesAndValues]),
{noreply, execute_request(set, VariablesAndValues, State)};
handle_cast({get_next, Oids}, State) ->
d("handle_cast -> get-next request for ~p", [Oids]),
{noreply, execute_request(get_next, Oids, State)};
handle_cast(iter_get_next, State)
when is_record(State#state.last_received_pdu, pdu) ->
d("handle_cast -> iter_get_next request", []),
PrevPDU = State#state.last_received_pdu,
Oids = [get_oid_from_varbind(Vb) || Vb <- PrevPDU#pdu.varbinds],
{noreply, execute_request(get_next, Oids, State)};
handle_cast(iter_get_next, State) ->
?PACK_SERV:error("[Iterated get-next] No Response PDU to "
"start iterating from.", []),
{noreply, State};
handle_cast({iter_get_next, N}, State) ->
d("handle_cast -> iter_get_next(~p) request",[N]),
if
is_record(State#state.last_received_pdu, pdu) ->
PDU = get_next_iter_impl(N, State#state.last_received_pdu,
State#state.mini_mib,
State#state.packet_server),
{noreply, State#state{last_received_pdu = PDU}};
true ->
?PACK_SERV:error("[Iterated get-next] No Response PDU to "
"start iterating from.", []),
{noreply, State}
end;
handle_cast(resend_pdu, #state{last_sent_pdu = PDU} = State) ->
d("handle_cast -> resend_pdu request when"
"~n PDU = ~p", [PDU]),
send_pdu(PDU#pdu{request_id = make_request_id()},
State#state.mini_mib,
State#state.packet_server),
{noreply, State};
handle_cast({bulk, Args}, State) ->
d("handle_bulk -> bulk request for ~p", [Args]),
{noreply, execute_request(bulk, Args, State)};
handle_cast({response, RespPdu}, State) ->
d("handle_cast -> response request with ~p", [RespPdu]),
?PACK_SERV:send_pdu(RespPdu, State#state.packet_server),
{noreply, State};
handle_cast({send_bytes, Bytes}, State) ->
d("handle_cast -> send-bytes request for ~p bytes", [sizeOf(Bytes)]),
?PACK_SERV:send_bytes(Bytes, State#state.packet_server),
{noreply, State};
handle_cast(Msg, State) ->
d("handle_cast -> unknown message: "
"~n ~p", [Msg]),
{noreply, State}.
handle_info({snmp_msg, Msg, Ip, Udp}, State) ->
io:format("* Got PDU: ~s", [?PACK_SERV:format_hdr(Msg)]),
PDU = ?PACK_SERV:get_pdu(Msg),
echo_pdu(PDU, State#state.mini_mib),
case PDU#pdu.type of
'inform-request' ->
RespPDU = PDU#pdu{type = 'get-response',
error_status = noError,
error_index = 0},
RespMsg = ?PACK_SERV:set_pdu(Msg, RespPDU),
?PACK_SERV:send_msg(RespMsg, State#state.packet_server, Ip, Udp);
_Else ->
ok
end,
{noreply, State#state{last_received_pdu = PDU}};
handle_info(Info, State) ->
d("handle_info -> unknown info: "
"~n ~p", [Info]),
{noreply, State}.
terminate(Reason, State) ->
d("terminate -> with Reason: ~n\t~p",[Reason]),
?PACK_SERV:stop(State#state.packet_server).
Returns : A new State
execute_discovery(State) ->
Pdu = make_discovery_pdu(),
Reply = ?PACK_SERV:send_discovery_pdu(Pdu, State#state.packet_server),
{Reply, State#state{last_sent_pdu = Pdu}}.
execute_request(Operation, Data, State) ->
case (catch make_pdu(Operation, Data, State#state.mini_mib)) of
{error, {Format, Data2}} ->
report_error(State, Format, Data2),
State;
{error, _Reason} ->
State;
PDU when is_record(PDU, pdu) ->
send_pdu(PDU, State#state.mini_mib, State#state.packet_server),
State#state{last_sent_pdu = PDU}
end.
report_error(#state{quiet = true, parent = Pid}, Format, Args) ->
Reason = lists:flatten(io_lib:format(Format, Args)),
Pid ! {oid_error, Reason};
report_error(_, Format, Args) ->
?PACK_SERV:error(Format, Args).
get_oid_from_varbind(#varbind{oid = Oid}) -> Oid.
send_pdu(PDU, _MiniMIB, PackServ) ->
?PACK_SERV:send_pdu(PDU, PackServ).
Purpose : Unnesting of oids like [ myTable , 3 , 4 , " hej " , 45 ] to
purify_oid([A|T], MiniMib) when is_atom(A) ->
Oid2 =
case snmp_mini_mib:oid(MiniMib, A) of
false ->
throw({error, {unknown_aliasname, A}});
Oid ->
lists:flatten([Oid|T])
end,
{ok, verify_pure_oid(Oid2)};
purify_oid(L, _) when is_list(L) ->
{ok, verify_pure_oid(lists:flatten(L))};
purify_oid(X, _) ->
{error, {invalid_oid, X}}.
verify_pure_oid([]) ->
[];
verify_pure_oid([H | T]) when is_integer(H) and (H >= 0) ->
[H | verify_pure_oid(T)];
verify_pure_oid([H | _]) ->
throw({error, {not_pure_oid, H}}).
flatten_oid(XOid, DB) ->
Oid = case XOid of
[A|T] when is_atom(A) ->
[remove_atom(A, DB)|T];
L when is_list(L) ->
XOid;
Shit ->
throw({error,
{"Invalid oid, not a list of integers: ~w", [Shit]}})
end,
check_is_pure_oid(lists:flatten(Oid)).
remove_atom(AliasName, DB) when is_atom(AliasName) ->
case snmp_mini_mib:oid(DB, AliasName) of
false ->
throw({error, {"Unknown aliasname in oid: ~w", [AliasName]}});
Oid ->
Oid
end;
remove_atom(X, _DB) ->
X.
check_is_pure_oid([]) -> [];
check_is_pure_oid([X | T]) when is_integer(X) and (X >= 0) ->
[X | check_is_pure_oid(T)];
check_is_pure_oid([X | _T]) ->
throw({error, {"Invalid oid, it contains a non-integer: ~w", [X]}}).
get_next_iter_impl(0, PrevPDU, _MiniMIB, _PackServ) ->
PrevPDU;
get_next_iter_impl(N, PrevPDU, MiniMIB, PackServ) ->
Oids = [get_oid_from_varbind(Vb) || Vb <- PrevPDU#pdu.varbinds],
PDU = make_pdu(get_next, Oids, MiniMIB),
send_pdu(PDU, MiniMIB, PackServ),
case receive_response() of
{error, timeout} ->
io:format("(timeout)~n"),
get_next_iter_impl(N, PrevPDU, MiniMIB, PackServ);
{error, _Reason} ->
PrevPDU;
RPDU when is_record(RPDU, pdu) ->
io:format("(~w)", [N]),
echo_pdu(RPDU, MiniMIB),
get_next_iter_impl(N-1, RPDU, MiniMIB, PackServ)
end.
Used to resend a PDU . Takes the old PDU and
make_pdu(set, VarsAndValues, MiniMIB) ->
VBs = [var_and_value_to_varbind(VAV, MiniMIB) || VAV <- VarsAndValues],
make_pdu_impl(set, VBs);
make_pdu(bulk, {NonRepeaters, MaxRepetitions, Oids}, MiniMIB) ->
Foids = [flatten_oid(Oid, MiniMIB) || Oid <- Oids],
#pdu{type = 'get-bulk-request',
request_id = make_request_id(),
error_status = NonRepeaters,
error_index = MaxRepetitions,
varbinds = [make_vb(Oid) || Oid <- Foids]};
make_pdu(Operation, Oids, MiniMIB) ->
Foids = [flatten_oid(Oid, MiniMIB) || Oid <- Oids],
make_pdu_impl(Operation, Foids).
make_pdu_impl(get, Oids) ->
#pdu{type = 'get-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = [make_vb(Oid) || Oid <- Oids]};
make_pdu_impl(get_next, Oids) ->
#pdu{type = 'get-next-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = [make_vb(Oid) || Oid <- Oids]};
make_pdu_impl(set, Varbinds) ->
#pdu{type = 'set-request',
request_id = make_request_id(),
error_status = noError,
error_index = 0,
varbinds = Varbinds}.
make_discovery_pdu() ->
make_pdu_impl(get, []).
var_and_value_to_varbind({Oid, Type, Value}, MiniMIB) ->
Oid2 = flatten_oid(Oid, MiniMIB),
#varbind{oid = Oid2,
variabletype = char_to_type(Type),
value = Value};
var_and_value_to_varbind({XOid, Value}, MiniMIB) ->
Oid = flatten_oid(XOid, MiniMIB),
#varbind{oid = Oid,
variabletype = snmp_mini_mib:type(MiniMIB, Oid),
value = Value}.
char_to_type(o) ->
'OBJECT IDENTIFIER';
char_to_type(i) ->
'INTEGER';
char_to_type(u) ->
'Unsigned32';
Gauge , Gauge32
'Unsigned32';
char_to_type(s) ->
'OCTET STRING'.
make_vb(Oid) ->
#varbind{oid = Oid, variabletype = 'NULL', value = 'NULL'}.
make_request_id() ->
random:uniform(16#FFFFFFF-1).
echo_pdu(PDU, MiniMIB) ->
io:format("~s", [snmp_misc:format_pdu(PDU, MiniMIB)]).
echo_errors({error, Id, {ExpectedFormat, ExpectedData}, {Format, Data}})->
io:format("* Unexpected Behaviour * Id: ~w.~n"
" Expected: " ++ ExpectedFormat ++ "~n"
" Got: " ++ Format ++ "~n",
[Id] ++ ExpectedData ++ Data),
{error, Id, {ExpectedFormat, ExpectedData}, {Format, Data}};
echo_errors(ok) -> ok;
echo_errors({ok, Val}) -> {ok, Val}.
get_response_impl(Id, Vars) ->
case receive_response() of
#pdu{type = 'get-response',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
match_vars(Id, find_pure_oids2(Vars), VBs, []);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
{error,
Id,
{"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w",
[Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
Returns : ok | { error , I d , { ExpectedFormat , ExpectedData } , { Format , Data } }
expect_impl(Id, any) ->
io:format("expect_impl(~w, any) -> entry ~n", [Id]),
case receive_response() of
PDU when is_record(PDU, pdu) -> ok;
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, return) ->
io:format("expect_impl(~w, return) -> entry ~n", [Id]),
case receive_response() of
PDU when is_record(PDU, pdu) -> {ok, PDU};
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, trap) ->
io:format("expect_impl(~w, trap) -> entry ~n", [Id]),
case receive_trap(3500) of
PDU when is_record(PDU, trappdu) -> ok;
{error, Reason} -> format_reason(Id, Reason)
end;
expect_impl(Id, timeout) ->
io:format("expect_impl(~w, timeout) -> entry ~n", [Id]),
receive
X ->
io:format("expect_impl(~w, timeout) -> "
"received unexpected message: ~n~p~n", [Id, X]),
{error, Id, {"Timeout", []}, {"Message ~w", [X]}}
after 3500 ->
ok
end;
expect_impl(Id, Err) when is_atom(Err) ->
io:format("expect_impl(~w, ~w) -> entry ~n", [Id, Err]),
case receive_response() of
#pdu{error_status = Err} ->
ok;
#pdu{request_id = ReqId,
error_status = OtherErr} ->
io:format("expect_impl(~w, ~w) -> "
"received pdu (~w) with unexpected error-status: "
"~n~p~n", [Id, Err, ReqId, OtherErr]),
{error, Id, {"ErrorStatus: ~w, RequestId: ~w", [Err,ReqId]},
{"ErrorStatus: ~w", [OtherErr]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'get-response',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
expect_impl(Id, v2trap, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, v2trap) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'snmpv2-trap',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, v2trap) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, v2trap) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['snmpv2-trap', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, report, ExpectedVarbinds) when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, report) -> entry with"
"~n ExpectedVarbinds: ~p~n", [Id, ExpectedVarbinds]),
case receive_response() of
#pdu{type = 'report',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, report) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, report) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
[report, noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, {inform, Reply}, ExpectedVarbinds)
when is_list(ExpectedVarbinds) ->
io:format("expect_impl(~w, inform) -> entry with"
"~n Reply: ~p"
"~n ExpectedVarbinds: ~p"
"~n", [Id, Reply, ExpectedVarbinds]),
Resp = receive_response(),
case Resp of
#pdu{type = 'inform-request',
error_status = noError,
error_index = 0,
varbinds = VBs} ->
io:format("expect_impl(~w, inform) -> received pdu with"
"~n VBs: ~p~n", [Id, VBs]),
case check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs) of
ok when (Reply == true) ->
io:format("expect_impl(~w, inform) -> send ok response"
"~n", [Id]),
RespPDU = Resp#pdu{type = 'get-response',
error_status = noError,
error_index = 0},
?MODULE:rpl(RespPDU),
ok;
ok when (element(1, Reply) == error) ->
io:format("expect_impl(~w, inform) -> send error response"
"~n", [Id]),
{error, Status, Index} = Reply,
RespPDU = Resp#pdu{type = 'get-response',
error_status = Status,
error_index = Index},
?MODULE:rpl(RespPDU),
ok;
ok when (Reply == false) ->
io:format("expect_impl(~w, inform) -> no response sent"
"~n", [Id]),
ok;
Else ->
io:format("expect_impl(~w, inform) -> "
"~n Else: ~p"
"~n", [Id, Else]),
Else
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, inform) -> received unexpected pdu with"
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['inform-request', noError, 0, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
io:format("expect_impl(~w, inform) -> receive failed"
"~n Reason: ~p"
"~n", [Id, Reason]),
format_reason(Id, Reason)
end.
expect_impl(Id, Err, Index, any) ->
io:format("expect_impl(~w, any) -> entry with"
"~n Err: ~p"
"~n Index: ~p"
"~n", [Id, Err, Index]),
case receive_response() of
#pdu{type = 'get-response',
error_status = Err,
error_index = Index} ->
io:format("expect_impl(~w, any) -> received expected pdu"
"~n", [Id]),
ok;
#pdu{type = 'get-response', error_status = Err} when (Index == any) ->
io:format("expect_impl(~w, any) -> received expected pdu (any)"
"~n", [Id]),
ok;
#pdu{type = 'get-response',
request_id = ReqId,
error_status = Err,
error_index = Idx} when is_list(Index) ->
io:format("expect_impl(~w, any) -> received pdu: "
"~n ReqId: ~p"
"~n Err: ~p"
"~n Idx: ~p"
"~n", [Id, ReqId, Err, Idx]),
case lists:member(Idx, Index) of
true ->
ok;
false ->
{error, Id, {"ErrStat: ~w, Idx: ~w, RequestId: ~w",
[Err, Index, ReqId]},
{"ErrStat: ~w, Idx: ~w", [Err, Idx]}}
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2} ->
io:format("expect_impl(~w, any) -> received unexpected pdu: "
"~n Type2: ~p"
"~n ReqId: ~p"
"~n Err2: ~p"
"~n Index2: ~p"
"~n", [Id, Type2, ReqId, Err2, Index2]),
{error, Id, {"Type: ~w, ErrStat: ~w, Idx: ~w, RequestId: ~w",
['get-response', Err, Index, ReqId]},
{"Type: ~w, ErrStat: ~w, Idx: ~w", [Type2, Err2, Index2]}};
{error, Reason} ->
format_reason(Id, Reason)
end;
expect_impl(Id, Err, Index, ExpectedVarbinds) ->
io:format("expect_impl(~w) -> entry with"
"~n Err: ~p"
"~n Index: ~p"
"~n ExpectedVarbinds: ~p"
"~n", [Id, Err, Index, ExpectedVarbinds]),
PureVBs = find_pure_oids(ExpectedVarbinds),
case receive_response() of
#pdu{type = 'get-response',
error_status = Err,
error_index = Index,
varbinds = VBs} ->
check_vars(Id, PureVBs, VBs);
#pdu{type = 'get-response',
error_status = Err,
varbinds = VBs} when (Index == any) ->
check_vars(Id, PureVBs, VBs);
#pdu{type = 'get-response',
request_id = ReqId,
error_status = Err,
error_index = Idx,
varbinds = VBs} when is_list(Index) ->
case lists:member(Idx, Index) of
true ->
check_vars(Id, PureVBs, VBs);
false ->
{error,Id,
{"ErrStat: ~w, Idx: ~w, Varbinds: ~w, RequestId: ~w",
[Err,Index,PureVBs,ReqId]},
{"ErrStat: ~w, Idx: ~w, Varbinds: ~w",
[Err,Idx,VBs]}}
end;
#pdu{type = Type2,
request_id = ReqId,
error_status = Err2,
error_index = Index2,
varbinds = VBs} ->
{error,Id,
{"Type: ~w, ErrStat: ~w, Idx: ~w, Varbinds: ~w, RequestId: ~w",
['get-response',Err,Index,PureVBs,ReqId]},
{"Type: ~w, ErrStat: ~w Idx: ~w Varbinds: ~w",
[Type2,Err2,Index2,VBs]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
expect_impl(Id, trap, Enterp, Generic, Specific, ExpectedVarbinds) ->
PureE = find_pure_oid(Enterp),
case receive_trap(3500) of
#trappdu{enterprise = PureE,
generic_trap = Generic,
specific_trap = Specific,
varbinds = VBs} ->
check_vars(Id, find_pure_oids(ExpectedVarbinds), VBs);
#trappdu{enterprise = Ent2,
generic_trap = G2,
specific_trap = Spec2,
varbinds = VBs} ->
{error, Id,
{"Enterprise: ~w, Generic: ~w, Specific: ~w, Varbinds: ~w",
[PureE, Generic, Specific, ExpectedVarbinds]},
{"Enterprise: ~w, Generic: ~w, Specific: ~w, Varbinds: ~w",
[Ent2, G2, Spec2, VBs]}};
{error, Reason} ->
format_reason(Id, Reason)
end.
format_reason(Id, Reason) ->
{error, Id, {"?", []}, {"~w", [Reason]}}.
: I d , ExpectedVarbinds ,
check_vars(_Id,[], []) ->
ok;
check_vars(Id,Vars, []) ->
{error, Id, {"More Varbinds (~w)", [Vars]}, {"Too few", []}};
check_vars(Id,[], Varbinds) ->
{error,Id, {"Fewer Varbinds", []}, {"Too many (~w)", [Varbinds]}};
check_vars(Id,[{_XOid, any} | Vars], [#varbind{oid = _Oid} |Vbs]) ->
check_vars(Id,Vars, Vbs);
check_vars(Id,[{Oid, Val} | Vars], [#varbind{oid = Oid, value = Val} |Vbs]) ->
check_vars(Id,Vars, Vbs);
check_vars(Id,[{Oid, Val} | _], [#varbind{oid = Oid, value = Val2} |_]) ->
{error, Id, {" Varbind: ~w = ~w", [Oid, Val]}, {"Value: ~w", [Val2]}};
check_vars(Id,[{Oid, _Val} | _], [#varbind{oid = Oid2, value = _Val2} |_]) ->
{error, Id, {"Oid: ~w", [Oid]}, {"Oid: ~w", [Oid2]}}.
match_vars(Id, [Oid|T], [#varbind{oid = Oid, value = Value} | Vbs], Res) ->
match_vars(Id, T, Vbs, [Value | Res]);
match_vars(_Id, [], [], Res) ->
{ok, lists:reverse(Res)};
match_vars(Id, [Oid | _], [#varbind{oid = Oid2}], _Res) ->
{error, Id, {" Oid: ~w", [Oid]}, {"Oid2: ~w", [Oid2]}};
match_vars(Id, Vars, [], _Res) ->
{error, Id, {"More Varbinds (~w)", [Vars]}, {"Too few", []}};
match_vars(Id, [], Varbinds, _Res) ->
{error,Id, {"Fewer Varbinds", []}, {"Too many (~w)", [Varbinds]}}.
find_pure_oids([]) -> [];
find_pure_oids([{XOid, Q}|T]) ->
[{find_pure_oid(XOid), Q} | find_pure_oids(T)].
find_pure_oids2([]) -> [];
find_pure_oids2([XOid|T]) ->
[find_pure_oid(XOid) | find_pure_oids2(T)].
find_pure_oid(XOid) ->
case gen_server:call(?MODULE, {find_pure_oid, XOid}, infinity) of
{error, {Format, Data}} ->
ok = io:format(Format, Data),
exit(malformed_oid);
Oid when is_list(Oid) -> Oid
end.
get_value(Opt, Opts, Default) ->
case snmp_misc:assq(Opt,Opts) of
{value, C} -> C;
false -> Default
end.
call(Req) ->
call(Req, infinity).
call(Req, To) ->
gen_server:call(?SERVER, Req, To).
cast(Msg) ->
gen_server:cast(?SERVER, Msg).
sizeOf(L) when is_list(L) ->
length(lists:flatten(L));
sizeOf(B) when is_binary(B) ->
size(B).
d(F,A) -> d(get(debug),F,A).
d(true,F,A) ->
io:format("*** [~s] MGR_DBG *** " ++ F ++ "~n",
[format_timestamp(now())|A]);
d(_,_F,_A) ->
ok.
format_timestamp({_N1, _N2, N3} = Now) ->
{Date, Time} = calendar:now_to_datetime(Now),
{YYYY,MM,DD} = Date,
{Hour,Min,Sec} = Time,
FormatDate =
io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
[YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
lists:flatten(FormatDate).
|
b42c449356d28467cf3a4ba61bc2d4bf4314ec53943ed074c73f8a695b2048c1 | halgari/naiad | transducers.clj | (ns naiad.transducers
(:require [naiad.transducers.ioc :refer [transducer ingest emit if-value]])
(:refer-clojure :exclude [first nth last reduce]))
(defn reduce [rf init]
(transducer
(loop [acc init]
(if-value [v (ingest)]
(let [acc (rf acc v)]
(if (reduced? acc)
(emit acc)
(recur acc)))
(emit acc)))))
(def ^{:doc "A transducer that filters all but the first item in a tansduction"}
first
(take 1))
(defn nth
"Creates a transducer that filters all but the nth item of a transduction"
[idx]
(comp (drop idx)
first))
(def ^{:doc "A transducer that filters all but the last item in a transduction"}
last
(transducer
(loop [have-item? false
item nil]
(if-value [v (ingest)]
(recur true v)
(when have-item?
(emit item))))))
| null | https://raw.githubusercontent.com/halgari/naiad/c47bc6f9d2e8e4222bd7820fa404d9ceebfc8e22/src/naiad/transducers.clj | clojure | (ns naiad.transducers
(:require [naiad.transducers.ioc :refer [transducer ingest emit if-value]])
(:refer-clojure :exclude [first nth last reduce]))
(defn reduce [rf init]
(transducer
(loop [acc init]
(if-value [v (ingest)]
(let [acc (rf acc v)]
(if (reduced? acc)
(emit acc)
(recur acc)))
(emit acc)))))
(def ^{:doc "A transducer that filters all but the first item in a tansduction"}
first
(take 1))
(defn nth
"Creates a transducer that filters all but the nth item of a transduction"
[idx]
(comp (drop idx)
first))
(def ^{:doc "A transducer that filters all but the last item in a transduction"}
last
(transducer
(loop [have-item? false
item nil]
(if-value [v (ingest)]
(recur true v)
(when have-item?
(emit item))))))
|
|
186fb896822ff1f1fbb5959a99297734b3a0179413dfb7722b008b215d8cde7b | funcool/catacumba | handlers.clj | (ns website.handlers
(:require [clojure.java.io :as io]
[hiccup.page :as hc]
[catacumba.core :as ct]
[catacumba.handlers :as hs]
[catacumba.handlers.auth :as auth]
[catacumba.http :as http]))
;; A function that renders the basic html layout
;; for all pages used in that application.
(defn layout
[content]
(hc/html5
[:head
[:meta {:charset "utf-8"}]
[:title "Sample dummy website"]
[:link {:href "/assets/styles.css"
:type "text/css"
:rel "stylesheet"
:media "screen"}]]
[:body content]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Home Page
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Shows a simple html page. It has different content for anonynmous
;; and logged users.
(defn home-page
[context]
(-> (layout [:section {:class "home-page"}
(if-let [user (:identity context)]
[:div
[:p (format "Welcome %s" (:username user))]
[:p [:a {:href "/logout"} "logout"]]]
[:div
[:p "Welcome to the dummy website application."]
[:p [:a {:href "/login"} "Login"]]])])
(http/ok {:content-type "text/html"})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Login page
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; A helper function for render login page, it used also for initial
;; rendering and render login page with errors on post requests.
(defn- render-login-page
([] (render-login-page []))
([errors]
(layout
[:section {:class "login-page"}
[:p "Login"]
(when (seq errors)
[:div {:class "errors"}
(for [e errors] [:div e])])
[:form {:action "" :method "post"}
[:div {:class "input-wrapper"}
[:input {:type "text" :name "username"
:placeholder "Write your username here..."}]]
[:div {:class "input-wrapper"}
[:input {:type "password" :name "password"
:placeholder "Write your password here..."}]]
[:div {:class "input-wrapper"}
[:input {:type "submit" :value "Submit"}]]]])))
;; A simple function that has the responsability of authenticate the incomoning
;; credentials on login post request. In the current implementation it just
;; checks if a user and password matches to the builtin "user" representation,
;; but in your implementation this function may access to the database or any
;; other source for authenticate. This is just a example.
(defn- authenticate
[username, password]
(when (and (= username "admin")
(= password "123123"))
{:username "Admin"}))
;; A hanlder that simply render the login page for GET requests.
(defn login-page
[context]
(-> (render-login-page)
(http/ok {:content-type "text/html"})))
;; A handler that clears the session and redirect to the home page.
(defn logout-page
[context]
(let [session (:session context)]
(swap! session dissoc :identity)
(http/found "/")))
;; A handler that handles the POST requests for login page.
(defn login-submit
[context]
(let [form-params (ct/get-formdata context)
query-params (:query-params context)
username (get form-params "username" "")
password (get form-params "password" "")]
(if (or (empty? username) (empty? password))
;; Firstly validates the input, if required fields
;; are empty, render the login page html with
;; convenient error mesasage and return it.
(http/ok (render-login-page ["The two fields are mandatory."])
{:content-type "text/html"})
;; In case contrary, try validate the incoming
;; credentials, and in case of them validates
;; successful, update the session `:identity` key
;; with the authenticated user object.
;; In case contrary, return a login page
;; rendered with approapiate error message
(if-let [user (authenticate username password)]
(let [nexturl (get query-params "next" "/")
session (:session context)]
(swap! session assoc :identity user)
(http/found nexturl))
(http/ok (render-login-page ["User or password are incorrect"])
{:content-type "text/html"})))))
| null | https://raw.githubusercontent.com/funcool/catacumba/a493843176ee8defa2f3c6afa23c720f495d9341/examples/website-ssl/src/website/handlers.clj | clojure | A function that renders the basic html layout
for all pages used in that application.
Shows a simple html page. It has different content for anonynmous
and logged users.
Login page
A helper function for render login page, it used also for initial
rendering and render login page with errors on post requests.
A simple function that has the responsability of authenticate the incomoning
credentials on login post request. In the current implementation it just
checks if a user and password matches to the builtin "user" representation,
but in your implementation this function may access to the database or any
other source for authenticate. This is just a example.
A hanlder that simply render the login page for GET requests.
A handler that clears the session and redirect to the home page.
A handler that handles the POST requests for login page.
Firstly validates the input, if required fields
are empty, render the login page html with
convenient error mesasage and return it.
In case contrary, try validate the incoming
credentials, and in case of them validates
successful, update the session `:identity` key
with the authenticated user object.
In case contrary, return a login page
rendered with approapiate error message | (ns website.handlers
(:require [clojure.java.io :as io]
[hiccup.page :as hc]
[catacumba.core :as ct]
[catacumba.handlers :as hs]
[catacumba.handlers.auth :as auth]
[catacumba.http :as http]))
(defn layout
[content]
(hc/html5
[:head
[:meta {:charset "utf-8"}]
[:title "Sample dummy website"]
[:link {:href "/assets/styles.css"
:type "text/css"
:rel "stylesheet"
:media "screen"}]]
[:body content]))
Home Page
(defn home-page
[context]
(-> (layout [:section {:class "home-page"}
(if-let [user (:identity context)]
[:div
[:p (format "Welcome %s" (:username user))]
[:p [:a {:href "/logout"} "logout"]]]
[:div
[:p "Welcome to the dummy website application."]
[:p [:a {:href "/login"} "Login"]]])])
(http/ok {:content-type "text/html"})))
(defn- render-login-page
([] (render-login-page []))
([errors]
(layout
[:section {:class "login-page"}
[:p "Login"]
(when (seq errors)
[:div {:class "errors"}
(for [e errors] [:div e])])
[:form {:action "" :method "post"}
[:div {:class "input-wrapper"}
[:input {:type "text" :name "username"
:placeholder "Write your username here..."}]]
[:div {:class "input-wrapper"}
[:input {:type "password" :name "password"
:placeholder "Write your password here..."}]]
[:div {:class "input-wrapper"}
[:input {:type "submit" :value "Submit"}]]]])))
(defn- authenticate
[username, password]
(when (and (= username "admin")
(= password "123123"))
{:username "Admin"}))
(defn login-page
[context]
(-> (render-login-page)
(http/ok {:content-type "text/html"})))
(defn logout-page
[context]
(let [session (:session context)]
(swap! session dissoc :identity)
(http/found "/")))
(defn login-submit
[context]
(let [form-params (ct/get-formdata context)
query-params (:query-params context)
username (get form-params "username" "")
password (get form-params "password" "")]
(if (or (empty? username) (empty? password))
(http/ok (render-login-page ["The two fields are mandatory."])
{:content-type "text/html"})
(if-let [user (authenticate username password)]
(let [nexturl (get query-params "next" "/")
session (:session context)]
(swap! session assoc :identity user)
(http/found nexturl))
(http/ok (render-login-page ["User or password are incorrect"])
{:content-type "text/html"})))))
|
3c3751c82d1b482c2b1a450a3c7dac45c7ddd2e3423436a0bedbc39841782573 | clojure-interop/google-cloud-clients | StorageRpc$Option.clj | (ns com.google.cloud.storage.spi.v1.StorageRpc$Option
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.storage.spi.v1 StorageRpc$Option]))
(def PREDEFINED_ACL
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREDEFINED_ACL)
(def PREDEFINED_DEFAULT_OBJECT_ACL
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREDEFINED_DEFAULT_OBJECT_ACL)
(def IF_METAGENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_METAGENERATION_MATCH)
(def IF_METAGENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_METAGENERATION_NOT_MATCH)
(def IF_GENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_GENERATION_MATCH)
(def IF_GENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_GENERATION_NOT_MATCH)
(def IF_SOURCE_METAGENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_METAGENERATION_MATCH)
(def IF_SOURCE_METAGENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_METAGENERATION_NOT_MATCH)
(def IF_SOURCE_GENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_GENERATION_MATCH)
(def IF_SOURCE_GENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_GENERATION_NOT_MATCH)
(def IF_DISABLE_GZIP_CONTENT
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_DISABLE_GZIP_CONTENT)
(def PREFIX
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREFIX)
(def PROJECTION
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PROJECTION)
(def MAX_RESULTS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/MAX_RESULTS)
(def PAGE_TOKEN
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PAGE_TOKEN)
(def DELIMITER
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/DELIMITER)
(def VERSIONS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/VERSIONS)
(def FIELDS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/FIELDS)
(def CUSTOMER_SUPPLIED_KEY
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/CUSTOMER_SUPPLIED_KEY)
(def USER_PROJECT
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/USER_PROJECT)
(def KMS_KEY_NAME
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/KMS_KEY_NAME)
(defn *values
"Returns an array containing the constants of this enum type, in
the order they are declared. This method may be used to iterate
over the constants as follows:
for (StorageRpc.Option c : StorageRpc.Option.values())
System.out.println(c);
returns: an array containing the constants of this enum type, in the order they are declared - `com.google.cloud.storage.spi.v1.StorageRpc$Option[]`"
([]
(StorageRpc$Option/values )))
(defn *value-of
"Returns the enum constant of this type with the specified name.
The string must match exactly an identifier used to declare an
enum constant in this type. (Extraneous whitespace characters are
not permitted.)
name - the name of the enum constant to be returned. - `java.lang.String`
returns: the enum constant with the specified name - `com.google.cloud.storage.spi.v1.StorageRpc$Option`
throws: java.lang.IllegalArgumentException - if this enum type has no constant with the specified name"
(^com.google.cloud.storage.spi.v1.StorageRpc$Option [^java.lang.String name]
(StorageRpc$Option/valueOf name)))
(defn value
"returns: `java.lang.String`"
(^java.lang.String [^StorageRpc$Option this]
(-> this (.value))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.storage/src/com/google/cloud/storage/spi/v1/StorageRpc%24Option.clj | clojure | (ns com.google.cloud.storage.spi.v1.StorageRpc$Option
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.storage.spi.v1 StorageRpc$Option]))
(def PREDEFINED_ACL
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREDEFINED_ACL)
(def PREDEFINED_DEFAULT_OBJECT_ACL
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREDEFINED_DEFAULT_OBJECT_ACL)
(def IF_METAGENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_METAGENERATION_MATCH)
(def IF_METAGENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_METAGENERATION_NOT_MATCH)
(def IF_GENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_GENERATION_MATCH)
(def IF_GENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_GENERATION_NOT_MATCH)
(def IF_SOURCE_METAGENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_METAGENERATION_MATCH)
(def IF_SOURCE_METAGENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_METAGENERATION_NOT_MATCH)
(def IF_SOURCE_GENERATION_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_GENERATION_MATCH)
(def IF_SOURCE_GENERATION_NOT_MATCH
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_SOURCE_GENERATION_NOT_MATCH)
(def IF_DISABLE_GZIP_CONTENT
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/IF_DISABLE_GZIP_CONTENT)
(def PREFIX
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PREFIX)
(def PROJECTION
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PROJECTION)
(def MAX_RESULTS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/MAX_RESULTS)
(def PAGE_TOKEN
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/PAGE_TOKEN)
(def DELIMITER
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/DELIMITER)
(def VERSIONS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/VERSIONS)
(def FIELDS
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/FIELDS)
(def CUSTOMER_SUPPLIED_KEY
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/CUSTOMER_SUPPLIED_KEY)
(def USER_PROJECT
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/USER_PROJECT)
(def KMS_KEY_NAME
"Enum Constant.
type: com.google.cloud.storage.spi.v1.StorageRpc$Option"
StorageRpc$Option/KMS_KEY_NAME)
(defn *values
"Returns an array containing the constants of this enum type, in
the order they are declared. This method may be used to iterate
over the constants as follows:
for (StorageRpc.Option c : StorageRpc.Option.values())
returns: an array containing the constants of this enum type, in the order they are declared - `com.google.cloud.storage.spi.v1.StorageRpc$Option[]`"
([]
(StorageRpc$Option/values )))
(defn *value-of
"Returns the enum constant of this type with the specified name.
The string must match exactly an identifier used to declare an
enum constant in this type. (Extraneous whitespace characters are
not permitted.)
name - the name of the enum constant to be returned. - `java.lang.String`
returns: the enum constant with the specified name - `com.google.cloud.storage.spi.v1.StorageRpc$Option`
throws: java.lang.IllegalArgumentException - if this enum type has no constant with the specified name"
(^com.google.cloud.storage.spi.v1.StorageRpc$Option [^java.lang.String name]
(StorageRpc$Option/valueOf name)))
(defn value
"returns: `java.lang.String`"
(^java.lang.String [^StorageRpc$Option this]
(-> this (.value))))
|
|
cbaa6410c61dd5f350d544591fece0e1e44c29db081ff77e3bee540bb29f7df2 | helium/erlang-dkg | dkg_distributed_SUITE.erl | -module(dkg_distributed_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/inet.hrl").
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0
]).
-export([symmetric_test/1]).
%% common test callbacks
all() -> [symmetric_test].
init_per_suite(Config) ->
os:cmd(os:find_executable("epmd")++" -daemon"),
{ok, Hostname} = inet:gethostname(),
case net_kernel:start([list_to_atom("runner@"++Hostname), shortnames]) of
{ok, _} -> ok;
{error, {already_started, _}} -> ok;
{error, {{already_started, _},_}} -> ok
end,
Config.
end_per_suite(Config) ->
%% per suite cleanup, placeholder
Config.
init_per_testcase(TestCase, Config) ->
assuming each testcase will work with 7 nodes for now
NodeNames = [eric, kenny, kyle, ike, stan, randy, butters],
Nodes = dkg_ct_utils:pmap(fun(Node) ->
dkg_ct_utils:start_node(Node, Config, TestCase)
end, NodeNames),
_ = [dkg_ct_utils:connect(Node) || Node <- NodeNames],
N = length(Nodes),
F = 0,
T = 2,
{ok, _} = ct_cover:add_nodes(Nodes),
[{nodes, Nodes}, {n, N}, {f, F}, {t, T} | Config].
end_per_testcase(_TestCase, Config) ->
Nodes = proplists:get_value(nodes, Config),
dkg_ct_utils:pmap(fun(Node) -> catch ct_slave:stop(Node) end, Nodes),
ok.
%% test cases
symmetric_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
N = proplists:get_value(n, Config),
F = proplists:get_value(f, Config),
T = proplists:get_value(t, Config),
run(N, F, T, Nodes),
ok.
run(N, F, T, Nodes) ->
%% load dkg_worker on each node
{Mod, Bin, _} = code:get_object_code(dkg_worker),
_ = dkg_ct_utils:pmap(fun(Node) ->
rpc:call(Node, erlang, load_module, [Mod, Bin])
end, Nodes),
%% start a dkg_worker on each node
Workers = [{Node, rpc:call(Node,
dkg_worker,
start_link,
[I, N, F, T, <<0>>])} || {I, Node} <- dkg_test_utils:enumerate(Nodes)],
ok = global:sync(),
ct:pal("workers ~p", [Workers]),
[ link(W) || {_, {ok, W}} <- Workers ],
%% begin the DKG
[ dkg_worker:start_round(W) || {_ ,{ok, W}} <- Workers ],
wait for to complete
ok = dkg_ct_utils:wait_until(fun() ->
lists:all(fun({_Node, {ok, W}}) ->
dkg_worker:is_done(W)
end, Workers)
end, 60*2, 1000),
_ = [ ct:pal("~p is_done? :~p", [Node, dkg_worker:is_done(W)]) || {Node, {ok, W}} <- Workers],
true = check_status(Workers),
[ unlink(W) || {_, {ok, W}} <- Workers ].
%% helper functions
check_status(Workers) ->
Statuses = [dkg_worker:dkg_status(W) || {_Node, {ok, W}} <- Workers],
Check1 = lists:all(fun(Status) ->
5 == maps:get(echoes_required, Status) andalso
3 == maps:get(readies_required, Status)
end,
Statuses),
CountShares = lists:foldl(fun(Status, Acc) ->
SharesMap = maps:get(shares_map, Status),
CountDone = maps:fold(fun(_ID, Result, Acc2) ->
case maps:get(done, Result, false) of
true -> Acc2 + 1;
false -> Acc2
end
end, 0, SharesMap),
CountDone + Acc
end,
0,
Statuses),
%% for each worker, we expect i - 1 shares worse case
MinimumExpectedShares = (length(Workers) - 1) * length(Workers),
Check2 = CountShares >= MinimumExpectedShares,
Check1 andalso Check2.
| null | https://raw.githubusercontent.com/helium/erlang-dkg/a22b841ae6cb31b17e547a6f208e93fa35f04b7f/test/dkg_distributed_SUITE.erl | erlang | common test callbacks
per suite cleanup, placeholder
test cases
load dkg_worker on each node
start a dkg_worker on each node
begin the DKG
helper functions
for each worker, we expect i - 1 shares worse case | -module(dkg_distributed_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/inet.hrl").
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0
]).
-export([symmetric_test/1]).
all() -> [symmetric_test].
init_per_suite(Config) ->
os:cmd(os:find_executable("epmd")++" -daemon"),
{ok, Hostname} = inet:gethostname(),
case net_kernel:start([list_to_atom("runner@"++Hostname), shortnames]) of
{ok, _} -> ok;
{error, {already_started, _}} -> ok;
{error, {{already_started, _},_}} -> ok
end,
Config.
end_per_suite(Config) ->
Config.
init_per_testcase(TestCase, Config) ->
assuming each testcase will work with 7 nodes for now
NodeNames = [eric, kenny, kyle, ike, stan, randy, butters],
Nodes = dkg_ct_utils:pmap(fun(Node) ->
dkg_ct_utils:start_node(Node, Config, TestCase)
end, NodeNames),
_ = [dkg_ct_utils:connect(Node) || Node <- NodeNames],
N = length(Nodes),
F = 0,
T = 2,
{ok, _} = ct_cover:add_nodes(Nodes),
[{nodes, Nodes}, {n, N}, {f, F}, {t, T} | Config].
end_per_testcase(_TestCase, Config) ->
Nodes = proplists:get_value(nodes, Config),
dkg_ct_utils:pmap(fun(Node) -> catch ct_slave:stop(Node) end, Nodes),
ok.
symmetric_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
N = proplists:get_value(n, Config),
F = proplists:get_value(f, Config),
T = proplists:get_value(t, Config),
run(N, F, T, Nodes),
ok.
run(N, F, T, Nodes) ->
{Mod, Bin, _} = code:get_object_code(dkg_worker),
_ = dkg_ct_utils:pmap(fun(Node) ->
rpc:call(Node, erlang, load_module, [Mod, Bin])
end, Nodes),
Workers = [{Node, rpc:call(Node,
dkg_worker,
start_link,
[I, N, F, T, <<0>>])} || {I, Node} <- dkg_test_utils:enumerate(Nodes)],
ok = global:sync(),
ct:pal("workers ~p", [Workers]),
[ link(W) || {_, {ok, W}} <- Workers ],
[ dkg_worker:start_round(W) || {_ ,{ok, W}} <- Workers ],
wait for to complete
ok = dkg_ct_utils:wait_until(fun() ->
lists:all(fun({_Node, {ok, W}}) ->
dkg_worker:is_done(W)
end, Workers)
end, 60*2, 1000),
_ = [ ct:pal("~p is_done? :~p", [Node, dkg_worker:is_done(W)]) || {Node, {ok, W}} <- Workers],
true = check_status(Workers),
[ unlink(W) || {_, {ok, W}} <- Workers ].
check_status(Workers) ->
Statuses = [dkg_worker:dkg_status(W) || {_Node, {ok, W}} <- Workers],
Check1 = lists:all(fun(Status) ->
5 == maps:get(echoes_required, Status) andalso
3 == maps:get(readies_required, Status)
end,
Statuses),
CountShares = lists:foldl(fun(Status, Acc) ->
SharesMap = maps:get(shares_map, Status),
CountDone = maps:fold(fun(_ID, Result, Acc2) ->
case maps:get(done, Result, false) of
true -> Acc2 + 1;
false -> Acc2
end
end, 0, SharesMap),
CountDone + Acc
end,
0,
Statuses),
MinimumExpectedShares = (length(Workers) - 1) * length(Workers),
Check2 = CountShares >= MinimumExpectedShares,
Check1 andalso Check2.
|
65892346539eb5e89b075a6795730e38cd4ddbe40c6b005ec71263114d9bf5e3 | racket/picturing-programs | map-image.rkt | #lang racket/base
Spring 2010 : started trying to get this to work .
Late June 2010 : Got build - image and map - image working .
; Added name->color and get-pixel-color.
; Added build-masked-image and map-masked-image.
July 6 , 2010 : added change - to - color
July 28 , 2010 : added map3 - image and - image . Is change - to - color really useful ?
Dec. 26 , 2010 : added color= ? to export ( duh ! )
Dec. 26 , 2010 : API for bitmaps has changed for 5.1 , so I need to rewrite to match it .
Dec. 28 , 2010 : added alphas into the " color " type , and provided an implementation
of map - image . He recommends using racket / draw bitmaps rather than 2htdp / image bitmaps .
May 10 , 2011 : added build - image / extra and map - image / extra .
Dec 1 , 2011 : allowed map - image and map - image / extra to give their
function x and y or not , depending on their arity . This way one
; can write a function from color to color, and immediately map it
; onto an image.
Apr 27 , 2012 : get - pixel - color has long had a " cache " of one image so it does n't need
to keep re - rendering . Experimenting with increasing this cache to two images , so we
can call get - pixel - color on two images in alternation without thrashing . The cache
; itself seems to work, and having the cache size >= the number of images DOES improve
; performance for a series of get-pixel-color calls rotating among several images (each
render seems to take about a ms ) .
Apr 28 , 2012 : added fold - image and fold - image / extra .
(require (except-in racket/draw make-color make-pen)
racket/snip
racket/class
2htdp/image
(only-in htdp/error natural?)
(only-in mrlib/image-core render-image string->color-object/f))
;(require picturing-programs/book-pictures)
( require / image - core )
( require 2htdp / private / image - more )
( require 2htdp / private / img - err )
;(require scheme/gui)
(require lang/prim)
(provide-primitives real->int
; maybe-color?
name->color
colorize
get-pixel-color
;pixel-visible?
; change-to-color
color=?
; show-cache
)
(provide-higher-order-primitive map-image (f _))
(provide-higher-order-primitive map3-image (rfunc gfunc bfunc _))
(provide-higher-order-primitive map4-image (rfunc gfunc bfunc afunc _))
;(provide-higher-order-primitive map-masked-image (f _))
(provide-higher-order-primitive build-image (_ _ f))
(provide-higher-order-primitive build3-image (_ _ rfunc gfunc bfunc))
(provide-higher-order-primitive build4-image (_ _ rfunc gfunc bfunc afunc))
;(provide-higher-order-primitive build-masked-image (_ _ f))
(provide-higher-order-primitive build-image/extra (_ _ f _))
(provide-higher-order-primitive map-image/extra (f _ _))
(provide-higher-order-primitive fold-image (f _ _))
(provide-higher-order-primitive fold-image/extra (f _ _ _))
(module+ test
(require "book-pictures.rkt" test-engine/racket-tests)
)
check - procedure - arity : alleged - function nat - num symbol string
Note : if you invoke these things from a BSL or BSLL program , the syntax checker will
; catch non-procedure arguments before the "(and (procedure? f) ..." test ever sees them,
; but that's no longer true if you invoke them from an ISLL, ASL, or racket program,
; so I'm keeping the test.
(define (check-procedure-arity f n func-name msg)
(unless (and (procedure? f) (procedure-arity-includes? f n))
(error func-name msg)))
(define transparent (make-color 0 0 0 0))
(define (maybe-color? thing)
(or (color? thing)
(eqv? thing #f)
; (image-color? thing) ; handles string & symbol color names
))
(define (broad-color? thing)
(or (maybe-color? thing)
(image-color? thing)))
; color->color% : does the obvious
; Note that color% doesn't have an alpha component, so alpha is lost.
(define (color->color% c)
(if (string? c)
c
(make-object color%
(color-red c)
(color-green c)
(color-blue c))))
; color%->color : does the obvious, with alpha defaulting to full-opaque.
(define (color%->color c)
(make-color (send c red)
(send c green)
(send c blue)))
; name->color : string-or-symbol -> maybe-color
(define (name->color name)
(unless (or (string? name) (symbol? name))
(error 'name->color
(format "Expected a string or symbol, but received ~v" name)))
(let [[result (string->color-object/f
(if (string? name)
name
(symbol->string name)))]]
(if result
(color%->color result)
#f)))
(module+ test
(check-expect (name->color "red") (make-color 255 0 0 255))
(check-expect (name->color "plaid") #f)
;; "grey" is normalized to "gray" by normalize-color-string
(check-expect (name->color "grey") (make-color 190 190 190 255))
(check-error (name->color 7 "name->color: Expected a string or symbol, but received 7"))
)
; colorize : broad-color -> color -- returns #f for unrecognized names
(define (colorize thing)
(cond [(color? thing) thing]
[(eqv? thing #f) transparent]
[(image-color? thing) (name->color thing)]
[else (error 'colorize (format "Expected a color, but received ~v" thing))]))
; colorize-func : (... -> broad-color) -> (... -> color)
(define (colorize-func f)
(compose colorize f))
;; natural? : anything -> boolean
;(define (natural? it)
; (and (integer? it)
; (>= it 0)))
; color=? : broad-color broad-color -> boolean
(define (color=? c1 c2)
(let [[rc1 (colorize c1)]
[rc2 (colorize c2)]]
(unless (color? rc1)
(error 'color=?
(format "Expected a color or color name as first argument, but received ~v" c1)))
(unless (color? rc2)
(error 'color=?
(format "Expected a color or color name as second argument, but received ~v" c2)))
(and (= (color-alpha rc1) (color-alpha rc2)) ; Both alphas MUST be equal.
If both are transparent , ignore rgb .
(and (= (color-red rc1) (color-red rc2))
(= (color-green rc1) (color-green rc2))
(= (color-blue rc1) (color-blue rc2)))))))
(module+ test
(check-expect (color=? "red" (make-color 255 0 0)) #t)
(check-expect (color=? (make-color 0 255 0) 'green) #t)
(check-expect (color=? "red" (make-color 255 0 1)) #f)
(check-expect (color=? (make-color 0 255 0 254) 'green) #f)
(check-expect (color=? (make-color 255 0 0 0) (make-color 0 255 0 0)) #t) ; if both alphas are 0...
(check-error (color=? 87 (make-color 87 87 87)) "colorize: Expected a color, but received 87")
(check-error (color=? "red" #t) "colorize: Expected a color, but received #t")
)
(define (real->int num)
(inexact->exact (round num)))
; get-px : x y w h bytes -> color
(define (get-px x y w h bytes)
(define offset (* 4 (+ x (* y w))))
(make-color (bytes-ref bytes (+ offset 1))
(bytes-ref bytes (+ offset 2))
(bytes-ref bytes (+ offset 3))
(bytes-ref bytes offset)))
; set-px! : bytes x y w h color -> void
(define (set-px! bytes x y w h new-color)
(define offset (* 4 (+ x (* y w))))
(bytes-set! bytes offset (color-alpha new-color))
(bytes-set! bytes (+ offset 1) (color-red new-color))
(bytes-set! bytes (+ offset 2) (color-green new-color))
(bytes-set! bytes (+ offset 3) (color-blue new-color)))
; get-pixel-color : x y image -> color
; This will remember the last CACHE-SIZE images on which it was called.
; Really terrible performance if you call it in alternation
on CACHE - SIZE+1 different images , but should be OK if you call it
; lots of times on the same image.
; Returns transparent if you ask about a position outside the picture.
(define CACHE-SIZE 3)
(define-struct ib (image bytes) #:transparent)
A cache is a list of at most CACHE - SIZE ib 's .
; search-cache: image cache -> bytes or #f
(define (search-cache pic cache)
(cond [(null? cache) #f]
[(eqv? pic (ib-image (car cache))) (ib-bytes (car cache))]
[else (search-cache pic (cdr cache))]))
We 'll do a simple LRU cache - replacement .
; add-and-drop : ib cache -> cache
; preserves size
(define (add-and-drop new-ib cache)
(cons new-ib (drop-last cache)))
; drop-last : non-empty list -> list
(define (drop-last L)
(cond [(null? L) (error 'drop-last "list is empty")]
[(null? (cdr L)) '()]
[else (cons (car L) (drop-last (cdr L)))]))
(define cache (build-list CACHE-SIZE (lambda (n) (ib #f #f))))
(define (show-cache) (map ib-image cache)) ; exported temporarily for debugging
(define (get-pixel-color x y pic)
(let* [(w (image-width pic))
(h (image-height pic))
(bytes
(or (search-cache pic cache)
(let* [(bm (make-bitmap w h))
(bmdc (make-object bitmap-dc% bm))
(new-bytes (make-bytes (* 4 w h)))]
(render-image pic bmdc 0 0)
(send bmdc set-bitmap #f)
(send bm get-argb-pixels 0 0 w h new-bytes)
(set! cache (add-and-drop (ib pic new-bytes) cache))
new-bytes)))]
(if (and (<= 0 x (sub1 w))
(<= 0 y (sub1 h)))
(get-px x y w h bytes)
transparent))
)
build - image - internal : ) ) ( > color ) - > image
(define (build-image-internal w h f)
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(define bytes (make-bytes (* w h 4)))
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(set-px! bytes x y w h (f x y))
)
(send bm set-argb-pixels 0 0 w h bytes)
(make-object image-snip% bm))
build - image : ) ) ( > broad - color ) - > image
(define (build-image w h f)
(unless (natural? w)
(error 'build-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity f 2 'build-image "Expected a function with contract num(x) num(y) -> color as third argument")
(build-image-internal w h (colorize-func f)))
(module+ test
(check-expect (build-image 50 30 (lambda (x y) "red"))
(rectangle 50 30 "solid" "red"))
(check-error (build-image "a" 30 (lambda (x y) "red"))
"build-image: Expected a natural number as first argument, but received \"a\"")
(check-error (build-image 50 #f (lambda (x y) "red"))
"build-image: Expected a natural number as second argument, but received #f")
(check-error (build-image 50 30 70)
"build-image: Expected a function with contract num(x) num(y) -> color as third argument")
(check-error (build-image 50 30 add1)
"build-image: Expected a function with contract num(x) num(y) -> color as third argument")
(check-error (build-image 50 30 +)
"colorize: Expected a color, but received 0")
)
build - image / extra : ) ) ( any - > broad - color ) any - > image
; Like build-image, but passes a fixed extra argument to every call of the function.
; For students who don't yet know function closures.
(define (build-image/extra w h f extra)
(unless (natural? w)
(error 'build-image/extra
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build-image/extra
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity f 3 'build-image/extra "Expected a function with contract num(x) num(y) any -> color as third argument")
(build-image-internal w h
(colorize-func (lambda (x y) (f x y extra)))))
(module+ test
(check-expect (build-image/extra 50 30 (lambda (x y dummy) "red") "blue")
(rectangle 50 30 "solid" "red"))
(check-error (build-image/extra "a" 30 (lambda (x y dummy) "red") "blue")
"build-image/extra: Expected a natural number as first argument, but received \"a\"")
(check-error (build-image/extra 50 #f (lambda (x y dummy) "red") "blue")
"build-image/extra: Expected a natural number as second argument, but received #f")
(check-error (build-image/extra 50 30 70 "blue")
"build-image/extra: Expected a function with contract num(x) num(y) any -> color as third argument")
(check-error (build-image/extra 50 30 add1 "blue")
"build-image/extra: Expected a function with contract num(x) num(y) any -> color as third argument")
(check-error (build-image/extra 50 30 + 7)
"colorize: Expected a color, but received 7")
)
; check-component : anything symbol string -> anything
returns first argument unaltered if it 's an integer in [ 0 - 255 ]
(define (check-component it plaintiff message)
(if (and (integer? it) (>= it 0) (<= it 255))
it
(error plaintiff message)))
build3 - image : nat(width ) nat(height ) rfunc gfunc bfunc - > image
where each of rfunc , gfunc , is ( nat(x ) nat(y ) - > nat )
(define (build3-image w h rfunc gfunc bfunc)
(unless (natural? w)
(error 'build3-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build3-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity rfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-procedure-arity gfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-procedure-arity bfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(build-image-internal w h
(lambda (x y)
(make-color (check-component (rfunc x y) 'build3-image "Expected third argument to return integer in range 0-255")
(check-component (gfunc x y) 'build3-image "Expected fourth argument to return integer in range 0-255")
(check-component (bfunc x y) 'build3-image "Expected fifth argument to return integer in range 0-255")
)))
)
(module+ test
(check-expect (build3-image 7 7 (lambda (x y) 0) (lambda (x y) 255) (lambda (x y) 0))
(square 7 "solid" "green"))
(check-error (build3-image 100 100 add1 + +)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-error (build3-image 100 100 + add1 +)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-error (build3-image 100 100 + + add1)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-error (build3-image 100 100 * + +)
"build3-image: Expected third argument to return integer in range 0-255") ; too big
(check-error (build3-image 100 100 + - +)
"build3-image: Expected fourth argument to return integer in range 0-255") ; too small
(check-error (build3-image 100 100 + + (compose sqrt +))
"build3-image: Expected fifth argument to return integer in range 0-255") ; not an integer
)
build4 - image : nat(width ) nat(height ) rfunc gfunc bfunc afunc - > image
where each of rfunc , gfunc , bfunc , is ( nat(x ) nat(y ) - > nat )
(define (build4-image w h rfunc gfunc bfunc afunc)
(unless (natural? w)
(error 'build4-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build4-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity rfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-procedure-arity gfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-procedure-arity bfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-procedure-arity afunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as sixth argument")
(build-image-internal w h
(lambda (x y)
(make-color (check-component (rfunc x y) 'build4-image "Expected third argument to return integer in range 0-255")
(check-component (gfunc x y) 'build4-image "Expected fourth argument to return integer in range 0-255")
(check-component (bfunc x y) 'build4-image "Expected fifth argument to return integer in range 0-255")
(check-component (afunc x y) 'build4-image "Expected sixth argument to return integer in range 0-255")
))))
(module+ test
(check-expect (build4-image 5 3
(lambda (x y) 0)
(lambda (x y) 0)
(lambda (x y) 255)
(lambda (x y) 127))
(rectangle 5 3 "solid" (make-color 0 0 255 127)))
(check-error (build4-image 100 100 add1 + + +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-error (build4-image 100 100 + add1 + +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-error (build4-image 100 100 + + add1 +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-error (build4-image 100 100 + + + add1)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as sixth argument")
(check-error (build4-image 100 100 + + + (lambda (x y) "hello world"))
"build4-image: Expected sixth argument to return integer in range 0-255") ; not even a number
)
; map-image-internal : (int int color -> color) image -> image
(define (map-image-internal f img)
(define w (image-width img))
(define h (image-height img))
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(render-image img bdc 0 0)
(send bdc set-bitmap #f)
(define bytes (make-bytes (* w h 4)))
(send bm get-argb-pixels 0 0 w h bytes)
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(define answer (f x y (get-px x y w h bytes)))
(if (color? answer)
(set-px! bytes x y w h answer)
(error 'map-image "Expected a function that returns a color")))
(send bm set-argb-pixels 0 0 w h bytes)
(make-object image-snip% bm))
; map-image : ([int int] color -> broad-color) image -> image
(define (map-image f img)
(unless (image? img)
(error 'map-image
(format "Expected an image as second argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 3)
(map-image-internal (colorize-func f) img)]
[(procedure-arity-includes? f 1) ; allow f : color->color as a simple case
(map-image-internal (colorize-func (lambda (x y c) (f c))) img)]
[else (error 'map-image "Expected a function of one or three parameters, returning a color, as first argument")]))
(module+ test
(check-expect (map-image (lambda (c) "blue") (rectangle 5 3 "solid" "green"))
(rectangle 5 3 "solid" "blue"))
(check-expect (map-image (lambda (x y c) (if (< x 5) "blue" "green")) (rectangle 10 3 "solid" "red"))
(beside (rectangle 5 3 "solid" "blue") (rectangle 5 3 "solid" "green")))
(check-error (map-image (lambda (c) "blue") "green")
"map-image: Expected an image as second argument, but received \"green\"")
(check-error (map-image (lambda (c) 0) (rectangle 5 3 "solid" "green"))
"colorize: Expected a color, but received 0")
)
map - image / extra : ( color X - > broad - color ) image X - > image
; Like map-image, but passes a fixed extra argument to every call of the function.
; For students who don't yet know function closures.
(define (map-image/extra f img extra)
(unless (image? img)
(error 'map-image/extra
(format "Expected an image as second argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 4)
(map-image-internal (colorize-func (lambda (x y c) (f x y c extra))) img)]
[(procedure-arity-includes? f 2)
(map-image-internal (colorize-func (lambda (x y c) (f c extra))) img)]
[else (error 'map-image/extra "Expected a function taking two or four parameters, returning a color, as first argument")]))
; The version for use before students have seen structs:
; map3-image :
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(r ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(g ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(b ) )
; image -> image
; Note: by default, preserves alpha values from old image.
(define (map3-image rfunc gfunc bfunc pic)
(check-procedure-arity rfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as first argument")
(check-procedure-arity gfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as second argument")
(check-procedure-arity bfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as third argument")
(unless (image? pic)
(error 'map3-image
(format "Expected an image as fourth argument, but received ~v" pic)))
(map-image-internal
(lambda (x y c)
(define r (color-red c))
(define g (color-green c))
(define b (color-blue c))
(make-color (check-component (rfunc x y r g b) 'map3-image "Expected first argument to return integer in range 0-255")
(check-component (gfunc x y r g b) 'map3-image "Expected second argument to return integer in range 0-255")
(check-component (bfunc x y r g b) 'map3-image "Expected third argument to return integer in range 0-255")
(color-alpha c)))
pic))
(module+ test
(check-error (map3-image add1 + + pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as first argument")
(check-error (map3-image + add1 + pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as second argument")
(check-error (map3-image + + add1 pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as third argument")
(check-error (map3-image + + + 17)
"map3-image: Expected an image as fourth argument, but received 17")
(check-error (map3-image - max max (rectangle 5 3 "solid" "blue")) ; too small
"map3-image: Expected first argument to return integer in range 0-255")
(check-error (map3-image max - max (rectangle 5 3 "solid" "blue"))
"map3-image: Expected second argument to return integer in range 0-255")
(check-error (map3-image max max - (rectangle 5 3 "solid" "blue"))
"map3-image: Expected third argument to return integer in range 0-255")
(check-error (map3-image + max max (rectangle 5 3 "solid" "blue"))
"map3-image: Expected first argument to return integer in range 0-255") ; too big
)
; map4-image :
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(r ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(g ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(b ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(a ) )
; image -> image
(define (map4-image rfunc gfunc bfunc afunc pic)
(check-procedure-arity rfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as first argument")
(check-procedure-arity gfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as second argument")
(check-procedure-arity bfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as third argument")
(check-procedure-arity afunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as fourth argument")
(unless (image? pic)
(error 'map4-image
"Expected an image as fifth argument, but received ~v" pic))
(map-image-internal
(lambda (x y c)
(define r (color-red c))
(define g (color-green c))
(define b (color-blue c))
(define a (color-alpha c))
(make-color (check-component (rfunc x y r g b a) 'map4-image "Expected first argument to return integer in range 0-255")
(check-component (gfunc x y r g b a) 'map4-image "Expected second argument to return integer in range 0-255")
(check-component (bfunc x y r g b a) 'map4-image "Expected third argument to return integer in range 0-255")
(check-component (afunc x y r g b a) 'map4-image "Expected fourth argument to return integer in range 0-255")
))
pic))
(module+ test
(check-error (map4-image add1 + + + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as first argument")
(check-error (map4-image + add1 + + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as second argument")
(check-error (map4-image + + add1 + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as third argument")
(check-error (map4-image + + + add1 pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as fourth argument")
(check-error (map4-image + + + + 17)
"map4-image: Expected an image as fifth argument, but received 17")
)
; fold-image : ([x y] c X -> X) X image -> X
fold - image - internal : ( [ ] color X - > X ) X image - > image
(define (fold-image-internal f init img)
(define w (image-width img))
(define h (image-height img))
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(render-image img bdc 0 0)
(send bdc set-bitmap #f)
(define bytes (make-bytes (* w h 4)))
(send bm get-argb-pixels 0 0 w h bytes)
(define answer init)
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(set! answer (f x y (get-px x y w h bytes) answer)))
answer)
(define (fold-image f init img)
(unless (image? img)
(error 'fold-image
(format "Expected an image as third argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 4)
(fold-image-internal f init img)]
[(procedure-arity-includes? f 2) ; allow f : color X->X as a simple case
(fold-image-internal (lambda (x y c old-value) (f c old-value)) init img)]
[else (error 'fold-image "Expected a function of two or four parameters as first argument")]))
; fold-image/extra : ([x y] c X Y -> X) X image Y -> X
(define (fold-image/extra f init img extra)
(unless (image? img)
(error 'fold-image/extra
(format "Expected an image as third argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 5)
(fold-image-internal (lambda (x y c old-value) (f x y c old-value extra)) init img)]
[(procedure-arity-includes? f 3)
(fold-image-internal (lambda (x y c old-value) (f c old-value extra)) init img)]
[else (error 'fold-image/extra "Expected a function taking three or five parameters as first argument")]
))
(module+ test
; more checks
( check - error ( map - image ( lambda ( c ) c ) pic : bloch )
; "No, this should NOT produce an error.")
(test)
) ; end of test module
| null | https://raw.githubusercontent.com/racket/picturing-programs/9061b6c6f93a2aeba58a785a75c324225e12a47c/picturing-programs/private/map-image.rkt | racket | Added name->color and get-pixel-color.
Added build-masked-image and map-masked-image.
can write a function from color to color, and immediately map it
onto an image.
itself seems to work, and having the cache size >= the number of images DOES improve
performance for a series of get-pixel-color calls rotating among several images (each
(require picturing-programs/book-pictures)
(require scheme/gui)
maybe-color?
pixel-visible?
change-to-color
show-cache
(provide-higher-order-primitive map-masked-image (f _))
(provide-higher-order-primitive build-masked-image (_ _ f))
catch non-procedure arguments before the "(and (procedure? f) ..." test ever sees them,
but that's no longer true if you invoke them from an ISLL, ASL, or racket program,
so I'm keeping the test.
(image-color? thing) ; handles string & symbol color names
color->color% : does the obvious
Note that color% doesn't have an alpha component, so alpha is lost.
color%->color : does the obvious, with alpha defaulting to full-opaque.
name->color : string-or-symbol -> maybe-color
"grey" is normalized to "gray" by normalize-color-string
colorize : broad-color -> color -- returns #f for unrecognized names
colorize-func : (... -> broad-color) -> (... -> color)
natural? : anything -> boolean
(define (natural? it)
(and (integer? it)
(>= it 0)))
color=? : broad-color broad-color -> boolean
Both alphas MUST be equal.
if both alphas are 0...
get-px : x y w h bytes -> color
set-px! : bytes x y w h color -> void
get-pixel-color : x y image -> color
This will remember the last CACHE-SIZE images on which it was called.
Really terrible performance if you call it in alternation
lots of times on the same image.
Returns transparent if you ask about a position outside the picture.
search-cache: image cache -> bytes or #f
add-and-drop : ib cache -> cache
preserves size
drop-last : non-empty list -> list
exported temporarily for debugging
Like build-image, but passes a fixed extra argument to every call of the function.
For students who don't yet know function closures.
check-component : anything symbol string -> anything
too big
too small
not an integer
not even a number
map-image-internal : (int int color -> color) image -> image
map-image : ([int int] color -> broad-color) image -> image
allow f : color->color as a simple case
Like map-image, but passes a fixed extra argument to every call of the function.
For students who don't yet know function closures.
The version for use before students have seen structs:
map3-image :
image -> image
Note: by default, preserves alpha values from old image.
too small
too big
map4-image :
image -> image
fold-image : ([x y] c X -> X) X image -> X
allow f : color X->X as a simple case
fold-image/extra : ([x y] c X Y -> X) X image Y -> X
more checks
"No, this should NOT produce an error.")
end of test module | #lang racket/base
Spring 2010 : started trying to get this to work .
Late June 2010 : Got build - image and map - image working .
July 6 , 2010 : added change - to - color
July 28 , 2010 : added map3 - image and - image . Is change - to - color really useful ?
Dec. 26 , 2010 : added color= ? to export ( duh ! )
Dec. 26 , 2010 : API for bitmaps has changed for 5.1 , so I need to rewrite to match it .
Dec. 28 , 2010 : added alphas into the " color " type , and provided an implementation
of map - image . He recommends using racket / draw bitmaps rather than 2htdp / image bitmaps .
May 10 , 2011 : added build - image / extra and map - image / extra .
Dec 1 , 2011 : allowed map - image and map - image / extra to give their
function x and y or not , depending on their arity . This way one
Apr 27 , 2012 : get - pixel - color has long had a " cache " of one image so it does n't need
to keep re - rendering . Experimenting with increasing this cache to two images , so we
can call get - pixel - color on two images in alternation without thrashing . The cache
render seems to take about a ms ) .
Apr 28 , 2012 : added fold - image and fold - image / extra .
(require (except-in racket/draw make-color make-pen)
racket/snip
racket/class
2htdp/image
(only-in htdp/error natural?)
(only-in mrlib/image-core render-image string->color-object/f))
( require / image - core )
( require 2htdp / private / image - more )
( require 2htdp / private / img - err )
(require lang/prim)
(provide-primitives real->int
name->color
colorize
get-pixel-color
color=?
)
(provide-higher-order-primitive map-image (f _))
(provide-higher-order-primitive map3-image (rfunc gfunc bfunc _))
(provide-higher-order-primitive map4-image (rfunc gfunc bfunc afunc _))
(provide-higher-order-primitive build-image (_ _ f))
(provide-higher-order-primitive build3-image (_ _ rfunc gfunc bfunc))
(provide-higher-order-primitive build4-image (_ _ rfunc gfunc bfunc afunc))
(provide-higher-order-primitive build-image/extra (_ _ f _))
(provide-higher-order-primitive map-image/extra (f _ _))
(provide-higher-order-primitive fold-image (f _ _))
(provide-higher-order-primitive fold-image/extra (f _ _ _))
(module+ test
(require "book-pictures.rkt" test-engine/racket-tests)
)
check - procedure - arity : alleged - function nat - num symbol string
Note : if you invoke these things from a BSL or BSLL program , the syntax checker will
(define (check-procedure-arity f n func-name msg)
(unless (and (procedure? f) (procedure-arity-includes? f n))
(error func-name msg)))
(define transparent (make-color 0 0 0 0))
(define (maybe-color? thing)
(or (color? thing)
(eqv? thing #f)
))
(define (broad-color? thing)
(or (maybe-color? thing)
(image-color? thing)))
(define (color->color% c)
(if (string? c)
c
(make-object color%
(color-red c)
(color-green c)
(color-blue c))))
(define (color%->color c)
(make-color (send c red)
(send c green)
(send c blue)))
(define (name->color name)
(unless (or (string? name) (symbol? name))
(error 'name->color
(format "Expected a string or symbol, but received ~v" name)))
(let [[result (string->color-object/f
(if (string? name)
name
(symbol->string name)))]]
(if result
(color%->color result)
#f)))
(module+ test
(check-expect (name->color "red") (make-color 255 0 0 255))
(check-expect (name->color "plaid") #f)
(check-expect (name->color "grey") (make-color 190 190 190 255))
(check-error (name->color 7 "name->color: Expected a string or symbol, but received 7"))
)
(define (colorize thing)
(cond [(color? thing) thing]
[(eqv? thing #f) transparent]
[(image-color? thing) (name->color thing)]
[else (error 'colorize (format "Expected a color, but received ~v" thing))]))
(define (colorize-func f)
(compose colorize f))
(define (color=? c1 c2)
(let [[rc1 (colorize c1)]
[rc2 (colorize c2)]]
(unless (color? rc1)
(error 'color=?
(format "Expected a color or color name as first argument, but received ~v" c1)))
(unless (color? rc2)
(error 'color=?
(format "Expected a color or color name as second argument, but received ~v" c2)))
If both are transparent , ignore rgb .
(and (= (color-red rc1) (color-red rc2))
(= (color-green rc1) (color-green rc2))
(= (color-blue rc1) (color-blue rc2)))))))
(module+ test
(check-expect (color=? "red" (make-color 255 0 0)) #t)
(check-expect (color=? (make-color 0 255 0) 'green) #t)
(check-expect (color=? "red" (make-color 255 0 1)) #f)
(check-expect (color=? (make-color 0 255 0 254) 'green) #f)
(check-error (color=? 87 (make-color 87 87 87)) "colorize: Expected a color, but received 87")
(check-error (color=? "red" #t) "colorize: Expected a color, but received #t")
)
(define (real->int num)
(inexact->exact (round num)))
(define (get-px x y w h bytes)
(define offset (* 4 (+ x (* y w))))
(make-color (bytes-ref bytes (+ offset 1))
(bytes-ref bytes (+ offset 2))
(bytes-ref bytes (+ offset 3))
(bytes-ref bytes offset)))
(define (set-px! bytes x y w h new-color)
(define offset (* 4 (+ x (* y w))))
(bytes-set! bytes offset (color-alpha new-color))
(bytes-set! bytes (+ offset 1) (color-red new-color))
(bytes-set! bytes (+ offset 2) (color-green new-color))
(bytes-set! bytes (+ offset 3) (color-blue new-color)))
on CACHE - SIZE+1 different images , but should be OK if you call it
(define CACHE-SIZE 3)
(define-struct ib (image bytes) #:transparent)
A cache is a list of at most CACHE - SIZE ib 's .
(define (search-cache pic cache)
(cond [(null? cache) #f]
[(eqv? pic (ib-image (car cache))) (ib-bytes (car cache))]
[else (search-cache pic (cdr cache))]))
We 'll do a simple LRU cache - replacement .
(define (add-and-drop new-ib cache)
(cons new-ib (drop-last cache)))
(define (drop-last L)
(cond [(null? L) (error 'drop-last "list is empty")]
[(null? (cdr L)) '()]
[else (cons (car L) (drop-last (cdr L)))]))
(define cache (build-list CACHE-SIZE (lambda (n) (ib #f #f))))
(define (get-pixel-color x y pic)
(let* [(w (image-width pic))
(h (image-height pic))
(bytes
(or (search-cache pic cache)
(let* [(bm (make-bitmap w h))
(bmdc (make-object bitmap-dc% bm))
(new-bytes (make-bytes (* 4 w h)))]
(render-image pic bmdc 0 0)
(send bmdc set-bitmap #f)
(send bm get-argb-pixels 0 0 w h new-bytes)
(set! cache (add-and-drop (ib pic new-bytes) cache))
new-bytes)))]
(if (and (<= 0 x (sub1 w))
(<= 0 y (sub1 h)))
(get-px x y w h bytes)
transparent))
)
build - image - internal : ) ) ( > color ) - > image
(define (build-image-internal w h f)
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(define bytes (make-bytes (* w h 4)))
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(set-px! bytes x y w h (f x y))
)
(send bm set-argb-pixels 0 0 w h bytes)
(make-object image-snip% bm))
build - image : ) ) ( > broad - color ) - > image
(define (build-image w h f)
(unless (natural? w)
(error 'build-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity f 2 'build-image "Expected a function with contract num(x) num(y) -> color as third argument")
(build-image-internal w h (colorize-func f)))
(module+ test
(check-expect (build-image 50 30 (lambda (x y) "red"))
(rectangle 50 30 "solid" "red"))
(check-error (build-image "a" 30 (lambda (x y) "red"))
"build-image: Expected a natural number as first argument, but received \"a\"")
(check-error (build-image 50 #f (lambda (x y) "red"))
"build-image: Expected a natural number as second argument, but received #f")
(check-error (build-image 50 30 70)
"build-image: Expected a function with contract num(x) num(y) -> color as third argument")
(check-error (build-image 50 30 add1)
"build-image: Expected a function with contract num(x) num(y) -> color as third argument")
(check-error (build-image 50 30 +)
"colorize: Expected a color, but received 0")
)
build - image / extra : ) ) ( any - > broad - color ) any - > image
(define (build-image/extra w h f extra)
(unless (natural? w)
(error 'build-image/extra
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build-image/extra
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity f 3 'build-image/extra "Expected a function with contract num(x) num(y) any -> color as third argument")
(build-image-internal w h
(colorize-func (lambda (x y) (f x y extra)))))
(module+ test
(check-expect (build-image/extra 50 30 (lambda (x y dummy) "red") "blue")
(rectangle 50 30 "solid" "red"))
(check-error (build-image/extra "a" 30 (lambda (x y dummy) "red") "blue")
"build-image/extra: Expected a natural number as first argument, but received \"a\"")
(check-error (build-image/extra 50 #f (lambda (x y dummy) "red") "blue")
"build-image/extra: Expected a natural number as second argument, but received #f")
(check-error (build-image/extra 50 30 70 "blue")
"build-image/extra: Expected a function with contract num(x) num(y) any -> color as third argument")
(check-error (build-image/extra 50 30 add1 "blue")
"build-image/extra: Expected a function with contract num(x) num(y) any -> color as third argument")
(check-error (build-image/extra 50 30 + 7)
"colorize: Expected a color, but received 7")
)
returns first argument unaltered if it 's an integer in [ 0 - 255 ]
(define (check-component it plaintiff message)
(if (and (integer? it) (>= it 0) (<= it 255))
it
(error plaintiff message)))
build3 - image : nat(width ) nat(height ) rfunc gfunc bfunc - > image
where each of rfunc , gfunc , is ( nat(x ) nat(y ) - > nat )
(define (build3-image w h rfunc gfunc bfunc)
(unless (natural? w)
(error 'build3-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build3-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity rfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-procedure-arity gfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-procedure-arity bfunc 2 'build3-image "Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(build-image-internal w h
(lambda (x y)
(make-color (check-component (rfunc x y) 'build3-image "Expected third argument to return integer in range 0-255")
(check-component (gfunc x y) 'build3-image "Expected fourth argument to return integer in range 0-255")
(check-component (bfunc x y) 'build3-image "Expected fifth argument to return integer in range 0-255")
)))
)
(module+ test
(check-expect (build3-image 7 7 (lambda (x y) 0) (lambda (x y) 255) (lambda (x y) 0))
(square 7 "solid" "green"))
(check-error (build3-image 100 100 add1 + +)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-error (build3-image 100 100 + add1 +)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-error (build3-image 100 100 + + add1)
"build3-image: Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-error (build3-image 100 100 * + +)
(check-error (build3-image 100 100 + - +)
(check-error (build3-image 100 100 + + (compose sqrt +))
)
build4 - image : nat(width ) nat(height ) rfunc gfunc bfunc afunc - > image
where each of rfunc , gfunc , bfunc , is ( nat(x ) nat(y ) - > nat )
(define (build4-image w h rfunc gfunc bfunc afunc)
(unless (natural? w)
(error 'build4-image
(format "Expected a natural number as first argument, but received ~v" w)))
(unless (natural? h)
(error 'build4-image
(format "Expected a natural number as second argument, but received ~v" h)))
(check-procedure-arity rfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-procedure-arity gfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-procedure-arity bfunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-procedure-arity afunc 2 'build4-image "Expected a function with contract num(x) num(y) -> [0-255] as sixth argument")
(build-image-internal w h
(lambda (x y)
(make-color (check-component (rfunc x y) 'build4-image "Expected third argument to return integer in range 0-255")
(check-component (gfunc x y) 'build4-image "Expected fourth argument to return integer in range 0-255")
(check-component (bfunc x y) 'build4-image "Expected fifth argument to return integer in range 0-255")
(check-component (afunc x y) 'build4-image "Expected sixth argument to return integer in range 0-255")
))))
(module+ test
(check-expect (build4-image 5 3
(lambda (x y) 0)
(lambda (x y) 0)
(lambda (x y) 255)
(lambda (x y) 127))
(rectangle 5 3 "solid" (make-color 0 0 255 127)))
(check-error (build4-image 100 100 add1 + + +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as third argument")
(check-error (build4-image 100 100 + add1 + +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as fourth argument")
(check-error (build4-image 100 100 + + add1 +)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as fifth argument")
(check-error (build4-image 100 100 + + + add1)
"build4-image: Expected a function with contract num(x) num(y) -> [0-255] as sixth argument")
(check-error (build4-image 100 100 + + + (lambda (x y) "hello world"))
)
(define (map-image-internal f img)
(define w (image-width img))
(define h (image-height img))
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(render-image img bdc 0 0)
(send bdc set-bitmap #f)
(define bytes (make-bytes (* w h 4)))
(send bm get-argb-pixels 0 0 w h bytes)
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(define answer (f x y (get-px x y w h bytes)))
(if (color? answer)
(set-px! bytes x y w h answer)
(error 'map-image "Expected a function that returns a color")))
(send bm set-argb-pixels 0 0 w h bytes)
(make-object image-snip% bm))
(define (map-image f img)
(unless (image? img)
(error 'map-image
(format "Expected an image as second argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 3)
(map-image-internal (colorize-func f) img)]
(map-image-internal (colorize-func (lambda (x y c) (f c))) img)]
[else (error 'map-image "Expected a function of one or three parameters, returning a color, as first argument")]))
(module+ test
(check-expect (map-image (lambda (c) "blue") (rectangle 5 3 "solid" "green"))
(rectangle 5 3 "solid" "blue"))
(check-expect (map-image (lambda (x y c) (if (< x 5) "blue" "green")) (rectangle 10 3 "solid" "red"))
(beside (rectangle 5 3 "solid" "blue") (rectangle 5 3 "solid" "green")))
(check-error (map-image (lambda (c) "blue") "green")
"map-image: Expected an image as second argument, but received \"green\"")
(check-error (map-image (lambda (c) 0) (rectangle 5 3 "solid" "green"))
"colorize: Expected a color, but received 0")
)
map - image / extra : ( color X - > broad - color ) image X - > image
(define (map-image/extra f img extra)
(unless (image? img)
(error 'map-image/extra
(format "Expected an image as second argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 4)
(map-image-internal (colorize-func (lambda (x y c) (f x y c extra))) img)]
[(procedure-arity-includes? f 2)
(map-image-internal (colorize-func (lambda (x y c) (f c extra))) img)]
[else (error 'map-image/extra "Expected a function taking two or four parameters, returning a color, as first argument")]))
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(r ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(g ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) - > int(b ) )
(define (map3-image rfunc gfunc bfunc pic)
(check-procedure-arity rfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as first argument")
(check-procedure-arity gfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as second argument")
(check-procedure-arity bfunc 5 'map3-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as third argument")
(unless (image? pic)
(error 'map3-image
(format "Expected an image as fourth argument, but received ~v" pic)))
(map-image-internal
(lambda (x y c)
(define r (color-red c))
(define g (color-green c))
(define b (color-blue c))
(make-color (check-component (rfunc x y r g b) 'map3-image "Expected first argument to return integer in range 0-255")
(check-component (gfunc x y r g b) 'map3-image "Expected second argument to return integer in range 0-255")
(check-component (bfunc x y r g b) 'map3-image "Expected third argument to return integer in range 0-255")
(color-alpha c)))
pic))
(module+ test
(check-error (map3-image add1 + + pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as first argument")
(check-error (map3-image + add1 + pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as second argument")
(check-error (map3-image + + add1 pic:bloch)
"map3-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) -> [0-255] as third argument")
(check-error (map3-image + + + 17)
"map3-image: Expected an image as fourth argument, but received 17")
"map3-image: Expected first argument to return integer in range 0-255")
(check-error (map3-image max - max (rectangle 5 3 "solid" "blue"))
"map3-image: Expected second argument to return integer in range 0-255")
(check-error (map3-image max max - (rectangle 5 3 "solid" "blue"))
"map3-image: Expected third argument to return integer in range 0-255")
(check-error (map3-image + max max (rectangle 5 3 "solid" "blue"))
)
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(r ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(g ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(b ) )
( int(x ) int(y ) int(r ) int(g ) int(b ) int(a ) - > int(a ) )
(define (map4-image rfunc gfunc bfunc afunc pic)
(check-procedure-arity rfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as first argument")
(check-procedure-arity gfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as second argument")
(check-procedure-arity bfunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as third argument")
(check-procedure-arity afunc 6 'map4-image "Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as fourth argument")
(unless (image? pic)
(error 'map4-image
"Expected an image as fifth argument, but received ~v" pic))
(map-image-internal
(lambda (x y c)
(define r (color-red c))
(define g (color-green c))
(define b (color-blue c))
(define a (color-alpha c))
(make-color (check-component (rfunc x y r g b a) 'map4-image "Expected first argument to return integer in range 0-255")
(check-component (gfunc x y r g b a) 'map4-image "Expected second argument to return integer in range 0-255")
(check-component (bfunc x y r g b a) 'map4-image "Expected third argument to return integer in range 0-255")
(check-component (afunc x y r g b a) 'map4-image "Expected fourth argument to return integer in range 0-255")
))
pic))
(module+ test
(check-error (map4-image add1 + + + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as first argument")
(check-error (map4-image + add1 + + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as second argument")
(check-error (map4-image + + add1 + pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as third argument")
(check-error (map4-image + + + add1 pic:bloch)
"map4-image: Expected a function with contract num(x) num(y) num(r) num(g) num(b) num(a) -> [0-255] as fourth argument")
(check-error (map4-image + + + + 17)
"map4-image: Expected an image as fifth argument, but received 17")
)
fold - image - internal : ( [ ] color X - > X ) X image - > image
(define (fold-image-internal f init img)
(define w (image-width img))
(define h (image-height img))
(define bm (make-bitmap w h))
(define bdc (make-object bitmap-dc% bm))
(render-image img bdc 0 0)
(send bdc set-bitmap #f)
(define bytes (make-bytes (* w h 4)))
(send bm get-argb-pixels 0 0 w h bytes)
(define answer init)
(for* ((y (in-range 0 h))
(x (in-range 0 w)))
(set! answer (f x y (get-px x y w h bytes) answer)))
answer)
(define (fold-image f init img)
(unless (image? img)
(error 'fold-image
(format "Expected an image as third argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 4)
(fold-image-internal f init img)]
(fold-image-internal (lambda (x y c old-value) (f c old-value)) init img)]
[else (error 'fold-image "Expected a function of two or four parameters as first argument")]))
(define (fold-image/extra f init img extra)
(unless (image? img)
(error 'fold-image/extra
(format "Expected an image as third argument, but received ~v" img)))
(cond [(procedure-arity-includes? f 5)
(fold-image-internal (lambda (x y c old-value) (f x y c old-value extra)) init img)]
[(procedure-arity-includes? f 3)
(fold-image-internal (lambda (x y c old-value) (f c old-value extra)) init img)]
[else (error 'fold-image/extra "Expected a function taking three or five parameters as first argument")]
))
(module+ test
( check - error ( map - image ( lambda ( c ) c ) pic : bloch )
(test)
|
1fd71ab41b10286b37023a9a9c1aa4dea37c69b04b41e045bf48ef455d9b4999 | INRIA/zelus | unsafe.ml | (***********************************************************************)
(* *)
(* *)
(* Zelus, a synchronous language for hybrid systems *)
(* *)
( c ) 2020 Paris ( see the file )
(* *)
(* Copyright Institut National de Recherche en Informatique et en *)
Automatique . All rights reserved . This file is distributed under
the terms of the INRIA Non - Commercial License Agreement ( see the
(* LICENSE file). *)
(* *)
(* *********************************************************************)
(* safe/unsafe expressions and equations. *)
(* A computation is safe when it is combinatorial, that is, it *)
(* has no side effect, total and no state *)
open Zelus
open Zident
open Deftypes
open Zaux
(** An expression or equation is unsafe if it contains an unsafe operation. *)
let rec exp { e_desc = desc } =
match desc with
| Eapp(_, e, e_list) ->
(* look if (e e1...en) is combinatorial *)
(not (Ztypes.is_combinatorial (List.length e_list) e.e_typ))
|| (exp e) || (List.exists exp e_list)
| Erecord_access(e, _) | Etypeconstraint(e, _) -> exp e
| Erecord(f_e_list) ->
List.exists (fun (_, e) -> exp e) f_e_list
| Erecord_with(e, f_e_list) ->
exp e || List.exists (fun (_, e) -> exp e) f_e_list
| Eseq(e1, e2) -> (exp e1) || (exp e2)
| Elocal _ | Elast _ | Econst _ | Econstr0 _
| Eglobal _ | Eperiod _ | Eop _ -> false
| Elet _ | Eblock _ -> true
| Econstr1(_, e_list) | Etuple(e_list) -> List.exists exp e_list
| Epresent _ | Ematch _ -> assert false
let rec equation { eq_desc = desc } =
match desc with
| EQeq(_, e) | EQinit(_, e) | EQder(_, e, None, []) | EQpluseq(_, e) -> exp e
| EQmatch(_, e, m_h_list) ->
exp e
|| List.exists
(fun { m_body = b_eq_list } -> block_eq_list b_eq_list) m_h_list
| EQreset(eq_list, e) ->
exp e || List.exists equation eq_list
| EQand(eq_list)
| EQbefore(eq_list) -> List.exists equation eq_list
| EQforall
{ for_index = i_list; for_init = init_list; for_body = b_eq_list } ->
let index { desc = desc } =
match desc with
| Einput(_, e) -> exp e
| Eoutput _ -> false
| Eindex(_, e1, e2) -> exp e1 || exp e2 in
let init { desc = desc } =
match desc with
| Einit_last(_, e) -> exp e in
List.exists index i_list ||
List.exists init init_list ||
block_eq_list b_eq_list
| EQder _ | EQnext _ | EQautomaton _
| EQpresent _ | EQemit _ | EQblock _ -> assert false
and block_eq_list { b_locals = l_list; b_body = eq_list } =
(List.exists (fun { l_eq = eq_list } -> List.exists equation eq_list) l_list)
|| List.exists equation eq_list
| null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/compiler/rewrite/unsafe.ml | ocaml | *********************************************************************
Zelus, a synchronous language for hybrid systems
Copyright Institut National de Recherche en Informatique et en
LICENSE file).
********************************************************************
safe/unsafe expressions and equations.
A computation is safe when it is combinatorial, that is, it
has no side effect, total and no state
* An expression or equation is unsafe if it contains an unsafe operation.
look if (e e1...en) is combinatorial | ( c ) 2020 Paris ( see the file )
Automatique . All rights reserved . This file is distributed under
the terms of the INRIA Non - Commercial License Agreement ( see the
open Zelus
open Zident
open Deftypes
open Zaux
let rec exp { e_desc = desc } =
match desc with
| Eapp(_, e, e_list) ->
(not (Ztypes.is_combinatorial (List.length e_list) e.e_typ))
|| (exp e) || (List.exists exp e_list)
| Erecord_access(e, _) | Etypeconstraint(e, _) -> exp e
| Erecord(f_e_list) ->
List.exists (fun (_, e) -> exp e) f_e_list
| Erecord_with(e, f_e_list) ->
exp e || List.exists (fun (_, e) -> exp e) f_e_list
| Eseq(e1, e2) -> (exp e1) || (exp e2)
| Elocal _ | Elast _ | Econst _ | Econstr0 _
| Eglobal _ | Eperiod _ | Eop _ -> false
| Elet _ | Eblock _ -> true
| Econstr1(_, e_list) | Etuple(e_list) -> List.exists exp e_list
| Epresent _ | Ematch _ -> assert false
let rec equation { eq_desc = desc } =
match desc with
| EQeq(_, e) | EQinit(_, e) | EQder(_, e, None, []) | EQpluseq(_, e) -> exp e
| EQmatch(_, e, m_h_list) ->
exp e
|| List.exists
(fun { m_body = b_eq_list } -> block_eq_list b_eq_list) m_h_list
| EQreset(eq_list, e) ->
exp e || List.exists equation eq_list
| EQand(eq_list)
| EQbefore(eq_list) -> List.exists equation eq_list
| EQforall
{ for_index = i_list; for_init = init_list; for_body = b_eq_list } ->
let index { desc = desc } =
match desc with
| Einput(_, e) -> exp e
| Eoutput _ -> false
| Eindex(_, e1, e2) -> exp e1 || exp e2 in
let init { desc = desc } =
match desc with
| Einit_last(_, e) -> exp e in
List.exists index i_list ||
List.exists init init_list ||
block_eq_list b_eq_list
| EQder _ | EQnext _ | EQautomaton _
| EQpresent _ | EQemit _ | EQblock _ -> assert false
and block_eq_list { b_locals = l_list; b_body = eq_list } =
(List.exists (fun { l_eq = eq_list } -> List.exists equation eq_list) l_list)
|| List.exists equation eq_list
|
3279c9df40593cbde284c8bf92b4e268f53551a3c21b9877d8728d33530812ed | scalaris-team/scalaris | benchmark_SUITE.erl | 2008 , 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Runs the basic benchmarks from src/bench.erl
%% The results are stored in several files in the main directory, so
%% that the buildbot can fetch the data from there.
%% @end
%% @version $Id$
-module(benchmark_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("unittest.hrl").
all() ->
[run_increment_1_1000, run_increment_10_100,
run_read_1_100000, run_read_10_10000].
suite() -> [ {timetrap, {seconds, 120}} ].
init_per_suite(Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config.
end_per_suite(_Config) ->
ok.
run_increment_1_1000(_Config) ->
Threads = 1,
Iterations = 10000,
Start = os:timestamp(),
{ok, _} = bench:increment(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_increment_1_10000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_increment_10_100(_Config) ->
Threads = 10,
Iterations = 1000,
Start = os:timestamp(),
{ok, _} = bench:increment(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_increment_10_1000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_read_1_100000(_Config) ->
Threads = 1,
Iterations = 100000,
Start = os:timestamp(),
{ok, _} = bench:quorum_read(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_read_1_100000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_read_10_10000(_Config) ->
Threads = 10,
Iterations = 10000,
Start = os:timestamp(),
{ok, _} = bench:quorum_read(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_read_10_10000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
-spec write_result(Filename::string(), Result::term()) -> ok.
write_result(Filename, Result) ->
% make_ring switched to the bin sub-dir...go to top-level:
{ok, F} = file:open("../" ++ Filename, [write]),
io:fwrite(F, "~p~n", [Result]),
_ = file:close(F),
ok.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/test/benchmark_SUITE.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Runs the basic benchmarks from src/bench.erl
The results are stored in several files in the main directory, so
that the buildbot can fetch the data from there.
@end
@version $Id$
make_ring switched to the bin sub-dir...go to top-level: | 2008 , 2011 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
-module(benchmark_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("unittest.hrl").
all() ->
[run_increment_1_1000, run_increment_10_100,
run_read_1_100000, run_read_10_10000].
suite() -> [ {timetrap, {seconds, 120}} ].
init_per_suite(Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config.
end_per_suite(_Config) ->
ok.
run_increment_1_1000(_Config) ->
Threads = 1,
Iterations = 10000,
Start = os:timestamp(),
{ok, _} = bench:increment(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_increment_1_10000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_increment_10_100(_Config) ->
Threads = 10,
Iterations = 1000,
Start = os:timestamp(),
{ok, _} = bench:increment(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_increment_10_1000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_read_1_100000(_Config) ->
Threads = 1,
Iterations = 100000,
Start = os:timestamp(),
{ok, _} = bench:quorum_read(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_read_1_100000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
run_read_10_10000(_Config) ->
Threads = 10,
Iterations = 10000,
Start = os:timestamp(),
{ok, _} = bench:quorum_read(Threads, Iterations),
Stop = os:timestamp(),
RunTime = erlang:max(1, timer:now_diff(Stop, Start)),
write_result("result_read_10_10000.txt", Threads * Iterations / RunTime * 1000000.0),
ok.
-spec write_result(Filename::string(), Result::term()) -> ok.
write_result(Filename, Result) ->
{ok, F} = file:open("../" ++ Filename, [write]),
io:fwrite(F, "~p~n", [Result]),
_ = file:close(F),
ok.
|
00fe754277ca674605bde04ae6060c4ec51a9dac63228f3fde5805e911e88475 | berberman/arch-hs | Compat.hs | # LANGUAGE CPP #
# LANGUAGE PatternSynonyms #
module Distribution.ArchHs.Compat
( pattern PkgFlag,
PkgFlag,
licenseFile,
)
where
import Data.Maybe (listToMaybe)
import Distribution.Types.ConfVar
import Distribution.Types.Flag
import Distribution.Types.PackageDescription (PackageDescription, licenseFiles)
#if MIN_VERSION_Cabal(3,6,0)
import Distribution.Utils.Path (getSymbolicPath)
#endif
pattern PkgFlag :: FlagName -> ConfVar
{-# COMPLETE PkgFlag #-}
#if MIN_VERSION_Cabal(3,4,0)
type PkgFlag = PackageFlag
pattern PkgFlag x = PackageFlag x
#else
type PkgFlag = Flag
pattern PkgFlag x = Flag x
#endif
licenseFile :: PackageDescription -> Maybe FilePath
#if MIN_VERSION_Cabal(3,6,0)
licenseFile = fmap getSymbolicPath . listToMaybe . licenseFiles
#else
licenseFile = listToMaybe . licenseFiles
#endif
| null | https://raw.githubusercontent.com/berberman/arch-hs/ae448df37e203a294e1a4814b5ec171f04f6a7fb/src/Distribution/ArchHs/Compat.hs | haskell | # COMPLETE PkgFlag # | # LANGUAGE CPP #
# LANGUAGE PatternSynonyms #
module Distribution.ArchHs.Compat
( pattern PkgFlag,
PkgFlag,
licenseFile,
)
where
import Data.Maybe (listToMaybe)
import Distribution.Types.ConfVar
import Distribution.Types.Flag
import Distribution.Types.PackageDescription (PackageDescription, licenseFiles)
#if MIN_VERSION_Cabal(3,6,0)
import Distribution.Utils.Path (getSymbolicPath)
#endif
pattern PkgFlag :: FlagName -> ConfVar
#if MIN_VERSION_Cabal(3,4,0)
type PkgFlag = PackageFlag
pattern PkgFlag x = PackageFlag x
#else
type PkgFlag = Flag
pattern PkgFlag x = Flag x
#endif
licenseFile :: PackageDescription -> Maybe FilePath
#if MIN_VERSION_Cabal(3,6,0)
licenseFile = fmap getSymbolicPath . listToMaybe . licenseFiles
#else
licenseFile = listToMaybe . licenseFiles
#endif
|
f7858092f5c1994749c8099a890d93ffe409fc74b2313827564ab5a314ca235a | CRogers/obc | gTree.mli | (**************************************************************************)
(* Lablgtk *)
(* *)
(* This program is free software; you can redistribute it *)
and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
(* comes with the library. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
GNU Library General Public License for more details .
(* *)
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
(* *)
(* *)
(**************************************************************************)
$ I d : gTree.mli 1523 2010 - 07 - 25 12:42:26Z garrigue $
open Gobject
open Gtk
open GObj
open GContainer
* Tree and list widgets
@gtkdoc gtk TreeWidget
@gtkdoc gtk TreeWidget *)
* { 3 New GtkTreeView / Model framework }
type 'a column = {index: int; conv: 'a data_conv; creator: int}
class column_list :
object
method add : 'a data_conv -> 'a column
method id : int
method types : g_type list
method lock : unit -> unit
end
class row_reference : Gtk.row_reference -> model:[> `treemodel ] obj ->
object
method as_ref : Gtk.row_reference
method iter : tree_iter
method path : tree_path
method valid : bool
end
* { 4 Models }
(** @gtkdoc gtk GtkTreeModel *)
class model_signals : [> `treemodel] obj ->
object ('a)
method after : 'a
method row_changed :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method row_deleted : callback:(tree_path -> unit) -> GtkSignal.id
method row_has_child_toggled :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method row_inserted :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method rows_reordered :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
end
val model_ids : (int,int) Hashtbl.t
(** @gtkdoc gtk GtkTreeModel *)
class model : ([> `treemodel] as 'a) obj ->
object
val obj : 'a obj
val id : int
method as_model : Gtk.tree_model
method misc : gobject_ops
method coerce : model
method flags : GtkEnums.tree_model_flags list
method n_columns : int
method get_column_type : int -> Gobject.g_type
method get_iter : tree_path -> tree_iter
method get_path : tree_iter -> tree_path
method get_row_reference : tree_path -> row_reference
method get : row:tree_iter -> column:'b column -> 'b
method get_iter_first : tree_iter option
method iter_next : tree_iter -> bool
method iter_has_child : tree_iter -> bool
method iter_n_children : tree_iter option -> int
method iter_children : ?nth:int -> tree_iter option -> tree_iter
(** @raise Invalid_argument if arguments do not designate a valid node *)
method iter_parent : tree_iter -> tree_iter option
method foreach : (tree_path -> tree_iter -> bool) -> unit
method row_changed : tree_path -> tree_iter -> unit
end
(** @gtkdoc gtk GtkTreeSortable *)
class tree_sortable_signals : ([> `treesortable|`treemodel] as 'a) obj ->
object
inherit model_signals
method sort_column_changed : callback:(unit -> unit) -> GtkSignal.id
end
(** @gtkdoc gtk GtkTreeSortable *)
class tree_sortable : ([> `treesortable|`treemodel] as 'a) obj ->
object
inherit model
val obj : 'a obj
method connect : tree_sortable_signals
method sort_column_changed : unit -> unit
method get_sort_column_id : (int * Gtk.Tags.sort_type) option
method set_sort_column_id : int -> Gtk.Tags.sort_type -> unit
method set_sort_func : int -> (model -> Gtk.tree_iter -> Gtk.tree_iter -> int) -> unit
method set_default_sort_func : (model -> Gtk.tree_iter -> Gtk.tree_iter -> int) -> unit
method has_default_sort_func : bool
end
(** Special value for the [#set_sort_column_id] method of {!GTree.tree_sortable}. *)
val default_sort_column_id : int
val unsorted_sort_column_id : int
(** @gtkdoc gtk GtkTreeStore *)
class tree_store : Gtk.tree_store ->
object
inherit tree_sortable
val obj : Gtk.tree_store
method append : ?parent:tree_iter -> unit -> tree_iter
method clear : unit -> unit
method insert : ?parent:tree_iter -> int -> tree_iter
method insert_after : ?parent:tree_iter -> tree_iter -> tree_iter
method insert_before : ?parent:tree_iter -> tree_iter -> tree_iter
method is_ancestor : iter:tree_iter -> descendant:tree_iter -> bool
method iter_depth : tree_iter -> int
* @since GTK 2.2
* @since GTK 2.2
* @since GTK 2.2
method prepend : ?parent:tree_iter -> unit -> tree_iter
method remove : tree_iter -> bool
method set : row:tree_iter -> column:'a column -> 'a -> unit
* @since GTK 2.2
end
(** @gtkdoc gtk GtkTreeStore *)
val tree_store : column_list -> tree_store
(** @gtkdoc gtk GtkListStore *)
class list_store : Gtk.list_store ->
object
inherit tree_sortable
val obj : Gtk.list_store
method append : unit -> tree_iter
method clear : unit -> unit
method insert : int -> tree_iter
method insert_after : tree_iter -> tree_iter
method insert_before : tree_iter -> tree_iter
* @since GTK 2.2
* @since GTK 2.2
* @since GTK 2.2
method prepend : unit -> tree_iter
method remove : tree_iter -> bool
method set : row:tree_iter -> column:'a column -> 'a -> unit
* @since GTK 2.2
end
(** @gtkdoc gtk GtkListStore *)
val list_store : column_list -> list_store
(** Convenience function to map a caml list into a {!GTree.list_store} with a single column *)
val store_of_list : 'a Gobject.data_conv -> 'a list -> list_store * 'a column
(** @gtkdoc gtk GtkTreeModelSort *)
class model_sort : Gtk.tree_model_sort ->
object
inherit tree_sortable
val obj : Gtk.tree_model_sort
method model : model
method convert_child_path_to_path : Gtk.tree_path -> Gtk.tree_path
method convert_child_iter_to_iter : Gtk.tree_iter -> Gtk.tree_iter
method convert_path_to_child_path : Gtk.tree_path -> Gtk.tree_path
method convert_iter_to_child_iter : Gtk.tree_iter -> Gtk.tree_iter
method reset_default_sort_func : unit -> unit
* @since GTK 2.2
end
(** @gtkdoc gtk GtkTreeModelSort *)
val model_sort : #model -> model_sort
* @since GTK 2.4
@gtkdoc gtk GtkTreeModelFilter
@gtkdoc gtk GtkTreeModelFilter *)
class model_filter : Gtk.tree_model_filter ->
object
inherit model
val obj : Gtk.tree_model_filter
method connect : model_signals
method child_model : model
method virtual_root : Gtk.tree_path
method set_visible_func : (model -> Gtk.tree_iter -> bool) -> unit
method set_visible_column : bool column -> unit
method convert_child_path_to_path : Gtk.tree_path -> Gtk.tree_path
method convert_child_iter_to_iter : Gtk.tree_iter -> Gtk.tree_iter
method convert_path_to_child_path : Gtk.tree_path -> Gtk.tree_path
method convert_iter_to_child_iter : Gtk.tree_iter -> Gtk.tree_iter
method refilter : unit -> unit
end
* @since GTK 2.4
@gtkdoc gtk GtkTreeModelFilter
@gtkdoc gtk GtkTreeModelFilter *)
val model_filter : ?virtual_root:Gtk.tree_path -> #model -> model_filter
module Path : sig
val create : int list -> Gtk.tree_path
val copy : Gtk.tree_path -> Gtk.tree_path
val get_indices : Gtk.tree_path -> int array
val from_string : string -> Gtk.tree_path
val to_string : Gtk.tree_path -> string
val get_depth : Gtk.tree_path -> int
val is_ancestor : Gtk.tree_path -> Gtk.tree_path -> bool
* { 5 Mutating functions }
val append_index : Gtk.tree_path -> int -> unit
val prepend_index : Gtk.tree_path -> int -> unit
val next : Gtk.tree_path -> unit
val prev : Gtk.tree_path -> bool
val up : Gtk.tree_path -> bool
val down : Gtk.tree_path -> unit
end
(** {4 Selection} *)
(** @gtkdoc gtk GtkTreeSelection *)
class selection_signals : tree_selection ->
object ('a)
method after : 'a
method changed : callback:(unit -> unit) -> GtkSignal.id
end
(** The selection object for {!GTree.view}
@gtkdoc gtk GtkTreeSelection *)
class selection :
Gtk.tree_selection ->
object
val obj : Gtk.tree_selection
method connect : selection_signals
method misc : gobject_ops
* @since GTK 2.2
method get_mode : Tags.selection_mode
method get_selected_rows : tree_path list
method iter_is_selected : tree_iter -> bool
method path_is_selected : tree_path -> bool
method select_all : unit -> unit
method select_iter : tree_iter -> unit
method select_path : tree_path -> unit
method select_range : tree_path -> tree_path -> unit
method set_mode : Tags.selection_mode -> unit
method set_select_function : (tree_path -> bool -> bool) -> unit
method unselect_all : unit -> unit
method unselect_iter : tree_iter -> unit
method unselect_path : tree_path -> unit
* @since GTK 2.2
end
* { 4 Views }
class type cell_renderer = object
method as_renderer : Gtk.cell_renderer obj
end
* @since GTK 2.4
@gtkdoc gtk GtkCellLayout
@gtkdoc gtk GtkCellLayout *)
class cell_layout : ([> Gtk.cell_layout] as 'a) Gtk.obj ->
object
method pack :
?expand:bool ->
?from:Tags.pack_type -> #cell_renderer -> unit
(** @param expand default value is [false]
@param from default value is [`START] *)
method reorder : #cell_renderer -> int -> unit
method clear : unit -> unit
method add_attribute : #cell_renderer -> string -> 'b column -> unit
method clear_attributes : #cell_renderer -> unit
method set_cell_data_func : #cell_renderer -> (model -> Gtk.tree_iter -> unit) -> unit
method unset_cell_data_func : #cell_renderer -> unit
end
(** @gtkdoc gtk GtkTreeViewColumn *)
class view_column_signals : [> `gtk | `treeviewcolumn] obj ->
object
inherit GObj.gtkobj_signals
method clicked : callback:(unit -> unit) -> GtkSignal.id
end
* A visible column in a { ! GTree.view } widget
@gtkdoc gtk GtkTreeViewColumn
@gtkdoc gtk GtkTreeViewColumn *)
class view_column : tree_view_column obj ->
object
inherit GObj.gtkobj
inherit cell_layout
val obj : tree_view_column obj
method as_column : Gtk.tree_view_column obj
method misc : GObj.gobject_ops
method alignment : float
method clickable : bool
method connect : view_column_signals
method fixed_width : int
method get_sort_column_id : int
method max_width : int
method min_width : int
method reorderable : bool
method resizable : bool
method set_alignment : float -> unit
method set_clickable : bool -> unit
method set_fixed_width : int -> unit
method set_max_width : int -> unit
method set_min_width : int -> unit
method set_reorderable : bool -> unit
method set_resizable : bool -> unit
method set_sizing : Tags.tree_view_column_sizing -> unit
method set_sort_column_id : int -> unit
method set_sort_indicator : bool -> unit
method set_sort_order : Tags.sort_type -> unit
method set_title : string -> unit
method set_visible : bool -> unit
method set_widget : widget option -> unit
method sizing : Tags.tree_view_column_sizing
method sort_indicator : bool
method sort_order : Tags.sort_type
method title : string
method visible : bool
method widget : widget option
method width : int
end
(** @gtkdoc gtk GtkTreeViewColumn *)
val view_column :
?title:string ->
?renderer:(#cell_renderer * (string * 'a column) list) ->
unit -> view_column
(** @gtkdoc gtk GtkTreeView *)
class view_signals : [> tree_view] obj ->
object ('a)
inherit GContainer.container_signals
method columns_changed : callback:(unit -> unit) -> GtkSignal.id
method cursor_changed : callback:(unit -> unit) -> GtkSignal.id
method expand_collapse_cursor_row :
callback:(logical:bool -> expand:bool -> all:bool -> bool) ->
GtkSignal.id
method move_cursor :
callback:(Tags.movement_step -> int -> bool) -> GtkSignal.id
method row_activated :
callback:(tree_path -> view_column -> unit) -> GtkSignal.id
method row_collapsed :
callback:(tree_iter -> tree_path -> unit) -> GtkSignal.id
method row_expanded :
callback:(tree_iter -> tree_path -> unit) -> GtkSignal.id
method select_all : callback:(unit -> bool) -> GtkSignal.id
method select_cursor_parent : callback:(unit -> bool) -> GtkSignal.id
method select_cursor_row :
callback:(start_editing:bool -> bool) -> GtkSignal.id
method set_scroll_adjustments :
callback:(GData.adjustment option -> GData.adjustment option -> unit) ->
GtkSignal.id
method start_interactive_search : callback:(unit -> bool) -> GtkSignal.id
method test_collapse_row :
callback:(tree_iter -> tree_path -> bool) -> GtkSignal.id
method test_expand_row :
callback:(tree_iter -> tree_path -> bool) -> GtkSignal.id
method toggle_cursor_row : callback:(unit -> bool) -> GtkSignal.id
method unselect_all : callback:(unit -> bool) -> GtkSignal.id
end
* A widget for displaying both trees and lists
@gtkdoc gtk GtkTreeView
@gtkdoc gtk GtkTreeView *)
class view : tree_view obj ->
object
inherit GContainer.container
val obj : tree_view obj
method as_tree_view : Gtk.tree_view Gtk.obj
method connect : view_signals
method append_column : view_column -> int
method collapse_all : unit -> unit
method collapse_row : tree_path -> unit
method enable_search : bool
method event : GObj.event_ops
method expand_all : unit -> unit
method expand_row : ?all:bool -> tree_path -> unit
(** @param all default value is [false] *)
* @since GTK 2.2
method expander_column : view_column option
method fixed_height_mode : bool
method get_column : int -> view_column
method get_cursor : unit -> tree_path option * view_column option
method get_path_at_pos :
x:int -> y:int -> (tree_path * view_column * int * int) option
method get_cell_area :
?path:tree_path -> ?col:view_column -> unit -> Gdk.Rectangle.t
method get_visible_range : unit -> (tree_path * tree_path) option
method hadjustment : GData.adjustment
method headers_visible : bool
method insert_column : view_column -> int -> int
method model : model
method move_column : view_column -> after:view_column -> int
method remove_column : view_column -> int
method reorderable : bool
method row_activated : tree_path -> view_column -> unit
method row_expanded : tree_path -> bool
method rules_hint : bool
method scroll_to_cell :
?align:float * float -> tree_path -> view_column -> unit
method scroll_to_point : int -> int -> unit
method search_column : int
method selection : selection
method set_cursor :
?cell:#cell_renderer ->
* @since GTK 2.2
(** @param edit default value is [false] *)
method set_enable_search : bool -> unit
method set_expander_column : view_column option -> unit
method set_fixed_height_mode : bool -> unit
method set_hadjustment : GData.adjustment -> unit
method set_headers_clickable : bool -> unit
method set_headers_visible : bool -> unit
method set_model : model option -> unit
method set_reorderable : bool -> unit
method set_rules_hint : bool -> unit
method set_search_column : int -> unit
method set_tooltip_column : int -> unit
method set_vadjustment : GData.adjustment -> unit
method tooltip_column : int
method vadjustment : GData.adjustment
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
end
(** @gtkdoc gtk GtkTreeView *)
val view :
?model:#model ->
?hadjustment:GData.adjustment ->
?vadjustment:GData.adjustment ->
?enable_search:bool ->
?fixed_height_mode:bool ->
?headers_clickable:bool ->
?headers_visible:bool ->
?reorderable:bool ->
?rules_hint:bool ->
?search_column:int ->
?tooltip_column:int ->
?border_width:int -> ?width:int -> ?height:int ->
?packing:(widget -> unit) -> ?show:bool -> unit -> view
(** @param enable_search default value is [true]
@param fixed_height_mode default value is [false]
@param headers_clickable default value is [false]
@param headers_visible default value is [true]
@param reorderable default value is [false]
@param rules_hint default value is [false] *)
(** {4 Cell Renderers} *)
type cell_properties =
[ `CELL_BACKGROUND of string
| `CELL_BACKGROUND_GDK of Gdk.color
| `CELL_BACKGROUND_SET of bool
| `HEIGHT of int
| `IS_EXPANDED of bool
| `IS_EXPANDER of bool
| `MODE of Tags.cell_renderer_mode
| `VISIBLE of bool
| `WIDTH of int
| `XALIGN of float
| `XPAD of int
| `YALIGN of float
| `YPAD of int ]
type cell_properties_pixbuf =
[ cell_properties
| `PIXBUF of GdkPixbuf.pixbuf
| `PIXBUF_EXPANDER_CLOSED of GdkPixbuf.pixbuf
| `PIXBUF_EXPANDER_OPEN of GdkPixbuf.pixbuf
| `STOCK_DETAIL of string
| `STOCK_ID of string
| `STOCK_SIZE of Tags.icon_size ]
type cell_properties_text =
[ cell_properties
| `BACKGROUND of string
| `BACKGROUND_GDK of Gdk.color
| `BACKGROUND_SET of bool
| `EDITABLE of bool
| `FAMILY of string
| `FONT of string
| `FONT_DESC of Pango.font_description
| `FOREGROUND of string
| `FOREGROUND_GDK of Gdk.color
| `FOREGROUND_SET of bool
| `MARKUP of string
| `RISE of int
| `SCALE of Pango.Tags.scale
| `SINGLE_PARAGRAPH_MODE of bool
| `SIZE of int
| `SIZE_POINTS of float
| `STRETCH of Pango.Tags.stretch
| `STRIKETHROUGH of bool
| `STYLE of Pango.Tags.style
| `TEXT of string
| `UNDERLINE of Pango.Tags.underline
| `VARIANT of Pango.Tags.variant
| `WEIGHT of Pango.Tags.weight ]
type cell_properties_toggle =
[ cell_properties
| `ACTIVATABLE of bool
| `ACTIVE of bool
| `INCONSISTENT of bool
| `RADIO of bool ]
type cell_properties_progress =
[ cell_properties
| `VALUE of int
| `TEXT of string option ]
type cell_properties_combo =
[ cell_properties_text
| `MODEL of model option
| `TEXT_COLUMN of string column
| `HAS_ENTRY of bool ]
type cell_properties_accel =
[ cell_properties_text
| `KEY of Gdk.keysym
| `ACCEL_MODE of GtkEnums.cell_renderer_accel_mode
| `MODS of GdkEnums.modifier list
| `KEYCODE of int ]
(** @gtkdoc gtk GtkCellRenderer *)
class type ['a, 'b] cell_renderer_skel =
object
inherit GObj.gtkobj
val obj : 'a obj
method as_renderer : Gtk.cell_renderer obj
method get_property : ('a, 'c) property -> 'c
method set_properties : 'b list -> unit
end
(** @gtkdoc gtk GtkCellRenderer *)
class virtual ['a, 'b] cell_renderer_impl : ([>Gtk.cell_renderer] as 'a) obj ->
object
inherit ['a,'b] cell_renderer_skel
method private virtual param : 'b -> 'a param
end
(** @gtkdoc gtk GtkCellRendererPixbuf *)
class cell_renderer_pixbuf : Gtk.cell_renderer_pixbuf obj ->
object
inherit[Gtk.cell_renderer_pixbuf,cell_properties_pixbuf] cell_renderer_skel
method connect : GObj.gtkobj_signals_impl
end
(** @gtkdoc gtk GtkCellRendererText *)
class cell_renderer_text_signals : ([>Gtk.cell_renderer_text] as 'a) obj ->
object
inherit GObj.gtkobj_signals
val obj : 'a obj
method edited : callback:(Gtk.tree_path -> string -> unit) -> GtkSignal.id
end
(** @gtkdoc gtk GtkCellRendererText *)
class cell_renderer_text : Gtk.cell_renderer_text obj ->
object
inherit [Gtk.cell_renderer_text,cell_properties_text] cell_renderer_skel
method connect : cell_renderer_text_signals
method set_fixed_height_from_font : int -> unit
end
(** @gtkdoc gtk GtkCellRendererToggle *)
class cell_renderer_toggle_signals : Gtk.cell_renderer_toggle obj ->
object
inherit GObj.gtkobj_signals
method toggled : callback:(Gtk.tree_path -> unit) -> GtkSignal.id
end
(** @gtkdoc gtk GtkCellRendererToggle *)
class cell_renderer_toggle : Gtk.cell_renderer_toggle obj ->
object
inherit[Gtk.cell_renderer_toggle,cell_properties_toggle] cell_renderer_skel
method connect : cell_renderer_toggle_signals
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererProgress
@gtkdoc gtk GtkCellRendererProgress *)
class cell_renderer_progress : Gtk.cell_renderer_progress obj ->
object
inherit[Gtk.cell_renderer_progress,cell_properties_progress] cell_renderer_skel
method connect : GObj.gtkobj_signals_impl
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
class cell_renderer_combo_signals : ([>Gtk.cell_renderer_combo] as 'a) obj ->
object
inherit cell_renderer_text_signals
val obj : 'a obj
method changed :
callback:(Gtk.tree_path -> Gtk.tree_iter -> unit) -> GtkSignal.id
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
class cell_renderer_combo : Gtk.cell_renderer_combo obj ->
object
inherit[Gtk.cell_renderer_combo,cell_properties_combo] cell_renderer_skel
method connect : cell_renderer_combo_signals
method set_fixed_height_from_font : int -> unit
end
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererText
@gtkdoc gtk GtkCellRendererText *)
class cell_renderer_accel_signals : Gtk.cell_renderer_accel obj ->
object
inherit GObj.gtkobj_signals
method edited : callback:(Gtk.tree_path -> string -> unit) -> GtkSignal.id
method accel_edited :
callback:(tree_path -> accel_key:int -> accel_mods:int
-> hardware_keycode:int -> unit)
-> GtkSignal.id
method accel_cleared : callback:(tree_path -> unit) -> GtkSignal.id
end
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererAccel
@gtkdoc gtk GtkCellRendererAccel *)
class cell_renderer_accel : Gtk.cell_renderer_accel obj ->
object
inherit[Gtk.cell_renderer_accel,cell_properties_accel] cell_renderer_skel
method connect : cell_renderer_accel_signals
end
(** @gtkdoc gtk GtkCellRendererPixbuf *)
val cell_renderer_pixbuf : cell_properties_pixbuf list -> cell_renderer_pixbuf
(** @gtkdoc gtk GtkCellRendererText *)
val cell_renderer_text : cell_properties_text list -> cell_renderer_text
(** @gtkdoc gtk GtkCellRendererToggle *)
val cell_renderer_toggle : cell_properties_toggle list -> cell_renderer_toggle
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererProgress
@gtkdoc gtk GtkCellRendererProgress *)
val cell_renderer_progress : cell_properties_progress list -> cell_renderer_progress
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
val cell_renderer_combo : cell_properties_combo list -> cell_renderer_combo
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererAccel
@gtkdoc gtk GtkCellRendererAccel *)
val cell_renderer_accel : cell_properties_accel list -> cell_renderer_accel
* { 3 GtkIconView }
* @gtkdoc gtk GtkIconView
@since GTK 2.6
@since GTK 2.6 *)
class icon_view_signals : [> Gtk.icon_view] Gtk.obj ->
object
inherit GContainer.container_signals
method item_activated : callback:(Gtk.tree_path -> unit) -> GtkSignal.id
method selection_changed : callback:(unit -> unit) -> GtkSignal.id
end
* A widget which displays a list of icons in a grid
@gtkdoc gtk GtkIconView
@since GTK 2.6
@gtkdoc gtk GtkIconView
@since GTK 2.6 *)
class icon_view :
([> Gtk.icon_view] as 'a) Gtk.obj ->
object
inherit GContainer.container
val obj : 'a Gtk.obj
method connect : icon_view_signals
method event : GObj.event_ops
(** Properties *)
method model : model
method set_model : model option -> unit
method set_markup_column : string column -> unit
method set_pixbuf_column : GdkPixbuf.pixbuf column -> unit
method set_text_column : string column -> unit
method orientation : GtkEnums.orientation
method set_orientation : GtkEnums.orientation -> unit
method selection_mode : GtkEnums.selection_mode
method set_selection_mode : GtkEnums.selection_mode -> unit
method column_spacing : int
method set_column_spacing : int -> unit
method item_width : int
method set_item_width : int -> unit
method margin : int
method set_margin : int -> unit
method columns : int
method set_columns : int -> unit
method row_spacing : int
method set_row_spacing : int -> unit
method spacing : int
method set_spacing : int -> unit
method get_path_at_pos : int -> int -> Gtk.tree_path
method selected_foreach : (Gtk.tree_path -> unit) -> unit
method get_selected_items : Gtk.tree_path list
method path_is_selected : Gtk.tree_path -> bool
method select_path : Gtk.tree_path -> unit
method unselect_path : Gtk.tree_path -> unit
method select_all : unit -> unit
method unselect_all : unit -> unit
method item_activated : Gtk.tree_path -> unit
end
* A widget which displays a list of icons in a grid
@gtkdoc gtk GtkIconView
@since GTK 2.6
@gtkdoc gtk GtkIconView
@since GTK 2.6 *)
val icon_view :
?model:#model ->
?columns:int ->
?orientation:GtkEnums.orientation ->
?selection_mode:GtkEnums.selection_mode ->
?border_width:int ->
?width:int ->
?height:int ->
?packing:(GObj.widget -> unit) ->
?show:bool ->
unit -> icon_view
class type virtual ['obj,'row,'a,'b,'c] custom_tree_model_type =
object
inherit model
val obj : 'obj
method connect : model_signals
(** Signal emitters *)
method custom_row_changed : Gtk.tree_path -> 'row -> unit
method custom_row_deleted : Gtk.tree_path -> unit
method custom_row_has_child_toggled :
Gtk.tree_path -> 'row -> unit
method custom_row_inserted : Gtk.tree_path -> 'row -> unit
method custom_rows_reordered :
Gtk.tree_path -> 'row option -> int array -> unit
(** Override these to implement a cache of rows *)
method custom_unref_node : 'row -> unit
method custom_ref_node : 'row -> unit
method custom_flags : GtkEnums.tree_model_flags list
* Functions of the custom model . They must act exactly as described in the documentation
of Gtk orelse Gtk may emit fatal errors .
of Gtk orelse Gtk may emit fatal errors. *)
method virtual custom_get_iter : Gtk.tree_path -> 'row option
method virtual custom_get_path : 'row -> Gtk.tree_path
method virtual custom_value : Gobject.g_type -> 'row -> column:int -> Gobject.basic
* [ custom_value typ row ] is the value to set in [ row ] for column [ column ] .
It must must be of the type [ typ ] , i.e. the type declared for column [ column ] .
It must must be of the type [typ], i.e. the type declared for column [column]. *)
method virtual custom_iter_children : 'row option -> 'row option
method virtual custom_iter_has_child : 'row -> bool
method virtual custom_iter_n_children : 'row option -> int
method virtual custom_iter_next : 'row -> 'row option
method virtual custom_iter_nth_child : 'row option -> int -> 'row option
method virtual custom_iter_parent : 'row -> 'row option
method virtual custom_decode_iter : 'a -> 'b -> 'c -> 'row
method virtual custom_encode_iter : 'row -> 'a * 'b * 'c
(** For internal use only. Do not override these methods. *)
method custom_n_columns : int
method custom_get_column_type : int -> Gobject.g_type
method custom_get_value :
'row -> int -> Gobject.g_value -> unit
end
(** A base class to inherit from to make a custom tree model. *)
class virtual ['row,'a,'b,'c] custom_tree_model :
column_list -> [Gtk.tree_model_custom,'row,'a,'b,'c] custom_tree_model_type
| null | https://raw.githubusercontent.com/CRogers/obc/49064db244e0c9d2ec2a83420c8d0ee917b54196/lablgtk/gTree.mli | ocaml | ************************************************************************
Lablgtk
This program is free software; you can redistribute it
comes with the library.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
************************************************************************
* @gtkdoc gtk GtkTreeModel
* @gtkdoc gtk GtkTreeModel
* @raise Invalid_argument if arguments do not designate a valid node
* @gtkdoc gtk GtkTreeSortable
* @gtkdoc gtk GtkTreeSortable
* Special value for the [#set_sort_column_id] method of {!GTree.tree_sortable}.
* @gtkdoc gtk GtkTreeStore
* @gtkdoc gtk GtkTreeStore
* @gtkdoc gtk GtkListStore
* @gtkdoc gtk GtkListStore
* Convenience function to map a caml list into a {!GTree.list_store} with a single column
* @gtkdoc gtk GtkTreeModelSort
* @gtkdoc gtk GtkTreeModelSort
* {4 Selection}
* @gtkdoc gtk GtkTreeSelection
* The selection object for {!GTree.view}
@gtkdoc gtk GtkTreeSelection
* @param expand default value is [false]
@param from default value is [`START]
* @gtkdoc gtk GtkTreeViewColumn
* @gtkdoc gtk GtkTreeViewColumn
* @gtkdoc gtk GtkTreeView
* @param all default value is [false]
* @param edit default value is [false]
* @gtkdoc gtk GtkTreeView
* @param enable_search default value is [true]
@param fixed_height_mode default value is [false]
@param headers_clickable default value is [false]
@param headers_visible default value is [true]
@param reorderable default value is [false]
@param rules_hint default value is [false]
* {4 Cell Renderers}
* @gtkdoc gtk GtkCellRenderer
* @gtkdoc gtk GtkCellRenderer
* @gtkdoc gtk GtkCellRendererPixbuf
* @gtkdoc gtk GtkCellRendererText
* @gtkdoc gtk GtkCellRendererText
* @gtkdoc gtk GtkCellRendererToggle
* @gtkdoc gtk GtkCellRendererToggle
* @gtkdoc gtk GtkCellRendererPixbuf
* @gtkdoc gtk GtkCellRendererText
* @gtkdoc gtk GtkCellRendererToggle
* Properties
* Signal emitters
* Override these to implement a cache of rows
* For internal use only. Do not override these methods.
* A base class to inherit from to make a custom tree model. | and/or modify it under the terms of the GNU Library General
Public License as published by the Free Software Foundation
version 2 , with the exception described in file COPYING which
GNU Library General Public License for more details .
You should have received a copy of the GNU Library General
Public License along with this program ; if not , write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
$ I d : gTree.mli 1523 2010 - 07 - 25 12:42:26Z garrigue $
open Gobject
open Gtk
open GObj
open GContainer
* Tree and list widgets
@gtkdoc gtk TreeWidget
@gtkdoc gtk TreeWidget *)
* { 3 New GtkTreeView / Model framework }
type 'a column = {index: int; conv: 'a data_conv; creator: int}
class column_list :
object
method add : 'a data_conv -> 'a column
method id : int
method types : g_type list
method lock : unit -> unit
end
class row_reference : Gtk.row_reference -> model:[> `treemodel ] obj ->
object
method as_ref : Gtk.row_reference
method iter : tree_iter
method path : tree_path
method valid : bool
end
* { 4 Models }
class model_signals : [> `treemodel] obj ->
object ('a)
method after : 'a
method row_changed :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method row_deleted : callback:(tree_path -> unit) -> GtkSignal.id
method row_has_child_toggled :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method row_inserted :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
method rows_reordered :
callback:(tree_path -> tree_iter -> unit) -> GtkSignal.id
end
val model_ids : (int,int) Hashtbl.t
class model : ([> `treemodel] as 'a) obj ->
object
val obj : 'a obj
val id : int
method as_model : Gtk.tree_model
method misc : gobject_ops
method coerce : model
method flags : GtkEnums.tree_model_flags list
method n_columns : int
method get_column_type : int -> Gobject.g_type
method get_iter : tree_path -> tree_iter
method get_path : tree_iter -> tree_path
method get_row_reference : tree_path -> row_reference
method get : row:tree_iter -> column:'b column -> 'b
method get_iter_first : tree_iter option
method iter_next : tree_iter -> bool
method iter_has_child : tree_iter -> bool
method iter_n_children : tree_iter option -> int
method iter_children : ?nth:int -> tree_iter option -> tree_iter
method iter_parent : tree_iter -> tree_iter option
method foreach : (tree_path -> tree_iter -> bool) -> unit
method row_changed : tree_path -> tree_iter -> unit
end
class tree_sortable_signals : ([> `treesortable|`treemodel] as 'a) obj ->
object
inherit model_signals
method sort_column_changed : callback:(unit -> unit) -> GtkSignal.id
end
class tree_sortable : ([> `treesortable|`treemodel] as 'a) obj ->
object
inherit model
val obj : 'a obj
method connect : tree_sortable_signals
method sort_column_changed : unit -> unit
method get_sort_column_id : (int * Gtk.Tags.sort_type) option
method set_sort_column_id : int -> Gtk.Tags.sort_type -> unit
method set_sort_func : int -> (model -> Gtk.tree_iter -> Gtk.tree_iter -> int) -> unit
method set_default_sort_func : (model -> Gtk.tree_iter -> Gtk.tree_iter -> int) -> unit
method has_default_sort_func : bool
end
val default_sort_column_id : int
val unsorted_sort_column_id : int
class tree_store : Gtk.tree_store ->
object
inherit tree_sortable
val obj : Gtk.tree_store
method append : ?parent:tree_iter -> unit -> tree_iter
method clear : unit -> unit
method insert : ?parent:tree_iter -> int -> tree_iter
method insert_after : ?parent:tree_iter -> tree_iter -> tree_iter
method insert_before : ?parent:tree_iter -> tree_iter -> tree_iter
method is_ancestor : iter:tree_iter -> descendant:tree_iter -> bool
method iter_depth : tree_iter -> int
* @since GTK 2.2
* @since GTK 2.2
* @since GTK 2.2
method prepend : ?parent:tree_iter -> unit -> tree_iter
method remove : tree_iter -> bool
method set : row:tree_iter -> column:'a column -> 'a -> unit
* @since GTK 2.2
end
val tree_store : column_list -> tree_store
class list_store : Gtk.list_store ->
object
inherit tree_sortable
val obj : Gtk.list_store
method append : unit -> tree_iter
method clear : unit -> unit
method insert : int -> tree_iter
method insert_after : tree_iter -> tree_iter
method insert_before : tree_iter -> tree_iter
* @since GTK 2.2
* @since GTK 2.2
* @since GTK 2.2
method prepend : unit -> tree_iter
method remove : tree_iter -> bool
method set : row:tree_iter -> column:'a column -> 'a -> unit
* @since GTK 2.2
end
val list_store : column_list -> list_store
val store_of_list : 'a Gobject.data_conv -> 'a list -> list_store * 'a column
class model_sort : Gtk.tree_model_sort ->
object
inherit tree_sortable
val obj : Gtk.tree_model_sort
method model : model
method convert_child_path_to_path : Gtk.tree_path -> Gtk.tree_path
method convert_child_iter_to_iter : Gtk.tree_iter -> Gtk.tree_iter
method convert_path_to_child_path : Gtk.tree_path -> Gtk.tree_path
method convert_iter_to_child_iter : Gtk.tree_iter -> Gtk.tree_iter
method reset_default_sort_func : unit -> unit
* @since GTK 2.2
end
val model_sort : #model -> model_sort
* @since GTK 2.4
@gtkdoc gtk GtkTreeModelFilter
@gtkdoc gtk GtkTreeModelFilter *)
class model_filter : Gtk.tree_model_filter ->
object
inherit model
val obj : Gtk.tree_model_filter
method connect : model_signals
method child_model : model
method virtual_root : Gtk.tree_path
method set_visible_func : (model -> Gtk.tree_iter -> bool) -> unit
method set_visible_column : bool column -> unit
method convert_child_path_to_path : Gtk.tree_path -> Gtk.tree_path
method convert_child_iter_to_iter : Gtk.tree_iter -> Gtk.tree_iter
method convert_path_to_child_path : Gtk.tree_path -> Gtk.tree_path
method convert_iter_to_child_iter : Gtk.tree_iter -> Gtk.tree_iter
method refilter : unit -> unit
end
* @since GTK 2.4
@gtkdoc gtk GtkTreeModelFilter
@gtkdoc gtk GtkTreeModelFilter *)
val model_filter : ?virtual_root:Gtk.tree_path -> #model -> model_filter
module Path : sig
val create : int list -> Gtk.tree_path
val copy : Gtk.tree_path -> Gtk.tree_path
val get_indices : Gtk.tree_path -> int array
val from_string : string -> Gtk.tree_path
val to_string : Gtk.tree_path -> string
val get_depth : Gtk.tree_path -> int
val is_ancestor : Gtk.tree_path -> Gtk.tree_path -> bool
* { 5 Mutating functions }
val append_index : Gtk.tree_path -> int -> unit
val prepend_index : Gtk.tree_path -> int -> unit
val next : Gtk.tree_path -> unit
val prev : Gtk.tree_path -> bool
val up : Gtk.tree_path -> bool
val down : Gtk.tree_path -> unit
end
class selection_signals : tree_selection ->
object ('a)
method after : 'a
method changed : callback:(unit -> unit) -> GtkSignal.id
end
class selection :
Gtk.tree_selection ->
object
val obj : Gtk.tree_selection
method connect : selection_signals
method misc : gobject_ops
* @since GTK 2.2
method get_mode : Tags.selection_mode
method get_selected_rows : tree_path list
method iter_is_selected : tree_iter -> bool
method path_is_selected : tree_path -> bool
method select_all : unit -> unit
method select_iter : tree_iter -> unit
method select_path : tree_path -> unit
method select_range : tree_path -> tree_path -> unit
method set_mode : Tags.selection_mode -> unit
method set_select_function : (tree_path -> bool -> bool) -> unit
method unselect_all : unit -> unit
method unselect_iter : tree_iter -> unit
method unselect_path : tree_path -> unit
* @since GTK 2.2
end
* { 4 Views }
class type cell_renderer = object
method as_renderer : Gtk.cell_renderer obj
end
* @since GTK 2.4
@gtkdoc gtk GtkCellLayout
@gtkdoc gtk GtkCellLayout *)
class cell_layout : ([> Gtk.cell_layout] as 'a) Gtk.obj ->
object
method pack :
?expand:bool ->
?from:Tags.pack_type -> #cell_renderer -> unit
method reorder : #cell_renderer -> int -> unit
method clear : unit -> unit
method add_attribute : #cell_renderer -> string -> 'b column -> unit
method clear_attributes : #cell_renderer -> unit
method set_cell_data_func : #cell_renderer -> (model -> Gtk.tree_iter -> unit) -> unit
method unset_cell_data_func : #cell_renderer -> unit
end
class view_column_signals : [> `gtk | `treeviewcolumn] obj ->
object
inherit GObj.gtkobj_signals
method clicked : callback:(unit -> unit) -> GtkSignal.id
end
* A visible column in a { ! GTree.view } widget
@gtkdoc gtk GtkTreeViewColumn
@gtkdoc gtk GtkTreeViewColumn *)
class view_column : tree_view_column obj ->
object
inherit GObj.gtkobj
inherit cell_layout
val obj : tree_view_column obj
method as_column : Gtk.tree_view_column obj
method misc : GObj.gobject_ops
method alignment : float
method clickable : bool
method connect : view_column_signals
method fixed_width : int
method get_sort_column_id : int
method max_width : int
method min_width : int
method reorderable : bool
method resizable : bool
method set_alignment : float -> unit
method set_clickable : bool -> unit
method set_fixed_width : int -> unit
method set_max_width : int -> unit
method set_min_width : int -> unit
method set_reorderable : bool -> unit
method set_resizable : bool -> unit
method set_sizing : Tags.tree_view_column_sizing -> unit
method set_sort_column_id : int -> unit
method set_sort_indicator : bool -> unit
method set_sort_order : Tags.sort_type -> unit
method set_title : string -> unit
method set_visible : bool -> unit
method set_widget : widget option -> unit
method sizing : Tags.tree_view_column_sizing
method sort_indicator : bool
method sort_order : Tags.sort_type
method title : string
method visible : bool
method widget : widget option
method width : int
end
val view_column :
?title:string ->
?renderer:(#cell_renderer * (string * 'a column) list) ->
unit -> view_column
class view_signals : [> tree_view] obj ->
object ('a)
inherit GContainer.container_signals
method columns_changed : callback:(unit -> unit) -> GtkSignal.id
method cursor_changed : callback:(unit -> unit) -> GtkSignal.id
method expand_collapse_cursor_row :
callback:(logical:bool -> expand:bool -> all:bool -> bool) ->
GtkSignal.id
method move_cursor :
callback:(Tags.movement_step -> int -> bool) -> GtkSignal.id
method row_activated :
callback:(tree_path -> view_column -> unit) -> GtkSignal.id
method row_collapsed :
callback:(tree_iter -> tree_path -> unit) -> GtkSignal.id
method row_expanded :
callback:(tree_iter -> tree_path -> unit) -> GtkSignal.id
method select_all : callback:(unit -> bool) -> GtkSignal.id
method select_cursor_parent : callback:(unit -> bool) -> GtkSignal.id
method select_cursor_row :
callback:(start_editing:bool -> bool) -> GtkSignal.id
method set_scroll_adjustments :
callback:(GData.adjustment option -> GData.adjustment option -> unit) ->
GtkSignal.id
method start_interactive_search : callback:(unit -> bool) -> GtkSignal.id
method test_collapse_row :
callback:(tree_iter -> tree_path -> bool) -> GtkSignal.id
method test_expand_row :
callback:(tree_iter -> tree_path -> bool) -> GtkSignal.id
method toggle_cursor_row : callback:(unit -> bool) -> GtkSignal.id
method unselect_all : callback:(unit -> bool) -> GtkSignal.id
end
* A widget for displaying both trees and lists
@gtkdoc gtk GtkTreeView
@gtkdoc gtk GtkTreeView *)
class view : tree_view obj ->
object
inherit GContainer.container
val obj : tree_view obj
method as_tree_view : Gtk.tree_view Gtk.obj
method connect : view_signals
method append_column : view_column -> int
method collapse_all : unit -> unit
method collapse_row : tree_path -> unit
method enable_search : bool
method event : GObj.event_ops
method expand_all : unit -> unit
method expand_row : ?all:bool -> tree_path -> unit
* @since GTK 2.2
method expander_column : view_column option
method fixed_height_mode : bool
method get_column : int -> view_column
method get_cursor : unit -> tree_path option * view_column option
method get_path_at_pos :
x:int -> y:int -> (tree_path * view_column * int * int) option
method get_cell_area :
?path:tree_path -> ?col:view_column -> unit -> Gdk.Rectangle.t
method get_visible_range : unit -> (tree_path * tree_path) option
method hadjustment : GData.adjustment
method headers_visible : bool
method insert_column : view_column -> int -> int
method model : model
method move_column : view_column -> after:view_column -> int
method remove_column : view_column -> int
method reorderable : bool
method row_activated : tree_path -> view_column -> unit
method row_expanded : tree_path -> bool
method rules_hint : bool
method scroll_to_cell :
?align:float * float -> tree_path -> view_column -> unit
method scroll_to_point : int -> int -> unit
method search_column : int
method selection : selection
method set_cursor :
?cell:#cell_renderer ->
* @since GTK 2.2
method set_enable_search : bool -> unit
method set_expander_column : view_column option -> unit
method set_fixed_height_mode : bool -> unit
method set_hadjustment : GData.adjustment -> unit
method set_headers_clickable : bool -> unit
method set_headers_visible : bool -> unit
method set_model : model option -> unit
method set_reorderable : bool -> unit
method set_rules_hint : bool -> unit
method set_search_column : int -> unit
method set_tooltip_column : int -> unit
method set_vadjustment : GData.adjustment -> unit
method tooltip_column : int
method vadjustment : GData.adjustment
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
* @since GTK 2.6
end
val view :
?model:#model ->
?hadjustment:GData.adjustment ->
?vadjustment:GData.adjustment ->
?enable_search:bool ->
?fixed_height_mode:bool ->
?headers_clickable:bool ->
?headers_visible:bool ->
?reorderable:bool ->
?rules_hint:bool ->
?search_column:int ->
?tooltip_column:int ->
?border_width:int -> ?width:int -> ?height:int ->
?packing:(widget -> unit) -> ?show:bool -> unit -> view
type cell_properties =
[ `CELL_BACKGROUND of string
| `CELL_BACKGROUND_GDK of Gdk.color
| `CELL_BACKGROUND_SET of bool
| `HEIGHT of int
| `IS_EXPANDED of bool
| `IS_EXPANDER of bool
| `MODE of Tags.cell_renderer_mode
| `VISIBLE of bool
| `WIDTH of int
| `XALIGN of float
| `XPAD of int
| `YALIGN of float
| `YPAD of int ]
type cell_properties_pixbuf =
[ cell_properties
| `PIXBUF of GdkPixbuf.pixbuf
| `PIXBUF_EXPANDER_CLOSED of GdkPixbuf.pixbuf
| `PIXBUF_EXPANDER_OPEN of GdkPixbuf.pixbuf
| `STOCK_DETAIL of string
| `STOCK_ID of string
| `STOCK_SIZE of Tags.icon_size ]
type cell_properties_text =
[ cell_properties
| `BACKGROUND of string
| `BACKGROUND_GDK of Gdk.color
| `BACKGROUND_SET of bool
| `EDITABLE of bool
| `FAMILY of string
| `FONT of string
| `FONT_DESC of Pango.font_description
| `FOREGROUND of string
| `FOREGROUND_GDK of Gdk.color
| `FOREGROUND_SET of bool
| `MARKUP of string
| `RISE of int
| `SCALE of Pango.Tags.scale
| `SINGLE_PARAGRAPH_MODE of bool
| `SIZE of int
| `SIZE_POINTS of float
| `STRETCH of Pango.Tags.stretch
| `STRIKETHROUGH of bool
| `STYLE of Pango.Tags.style
| `TEXT of string
| `UNDERLINE of Pango.Tags.underline
| `VARIANT of Pango.Tags.variant
| `WEIGHT of Pango.Tags.weight ]
type cell_properties_toggle =
[ cell_properties
| `ACTIVATABLE of bool
| `ACTIVE of bool
| `INCONSISTENT of bool
| `RADIO of bool ]
type cell_properties_progress =
[ cell_properties
| `VALUE of int
| `TEXT of string option ]
type cell_properties_combo =
[ cell_properties_text
| `MODEL of model option
| `TEXT_COLUMN of string column
| `HAS_ENTRY of bool ]
type cell_properties_accel =
[ cell_properties_text
| `KEY of Gdk.keysym
| `ACCEL_MODE of GtkEnums.cell_renderer_accel_mode
| `MODS of GdkEnums.modifier list
| `KEYCODE of int ]
class type ['a, 'b] cell_renderer_skel =
object
inherit GObj.gtkobj
val obj : 'a obj
method as_renderer : Gtk.cell_renderer obj
method get_property : ('a, 'c) property -> 'c
method set_properties : 'b list -> unit
end
class virtual ['a, 'b] cell_renderer_impl : ([>Gtk.cell_renderer] as 'a) obj ->
object
inherit ['a,'b] cell_renderer_skel
method private virtual param : 'b -> 'a param
end
class cell_renderer_pixbuf : Gtk.cell_renderer_pixbuf obj ->
object
inherit[Gtk.cell_renderer_pixbuf,cell_properties_pixbuf] cell_renderer_skel
method connect : GObj.gtkobj_signals_impl
end
class cell_renderer_text_signals : ([>Gtk.cell_renderer_text] as 'a) obj ->
object
inherit GObj.gtkobj_signals
val obj : 'a obj
method edited : callback:(Gtk.tree_path -> string -> unit) -> GtkSignal.id
end
class cell_renderer_text : Gtk.cell_renderer_text obj ->
object
inherit [Gtk.cell_renderer_text,cell_properties_text] cell_renderer_skel
method connect : cell_renderer_text_signals
method set_fixed_height_from_font : int -> unit
end
class cell_renderer_toggle_signals : Gtk.cell_renderer_toggle obj ->
object
inherit GObj.gtkobj_signals
method toggled : callback:(Gtk.tree_path -> unit) -> GtkSignal.id
end
class cell_renderer_toggle : Gtk.cell_renderer_toggle obj ->
object
inherit[Gtk.cell_renderer_toggle,cell_properties_toggle] cell_renderer_skel
method connect : cell_renderer_toggle_signals
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererProgress
@gtkdoc gtk GtkCellRendererProgress *)
class cell_renderer_progress : Gtk.cell_renderer_progress obj ->
object
inherit[Gtk.cell_renderer_progress,cell_properties_progress] cell_renderer_skel
method connect : GObj.gtkobj_signals_impl
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
class cell_renderer_combo_signals : ([>Gtk.cell_renderer_combo] as 'a) obj ->
object
inherit cell_renderer_text_signals
val obj : 'a obj
method changed :
callback:(Gtk.tree_path -> Gtk.tree_iter -> unit) -> GtkSignal.id
end
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
class cell_renderer_combo : Gtk.cell_renderer_combo obj ->
object
inherit[Gtk.cell_renderer_combo,cell_properties_combo] cell_renderer_skel
method connect : cell_renderer_combo_signals
method set_fixed_height_from_font : int -> unit
end
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererText
@gtkdoc gtk GtkCellRendererText *)
class cell_renderer_accel_signals : Gtk.cell_renderer_accel obj ->
object
inherit GObj.gtkobj_signals
method edited : callback:(Gtk.tree_path -> string -> unit) -> GtkSignal.id
method accel_edited :
callback:(tree_path -> accel_key:int -> accel_mods:int
-> hardware_keycode:int -> unit)
-> GtkSignal.id
method accel_cleared : callback:(tree_path -> unit) -> GtkSignal.id
end
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererAccel
@gtkdoc gtk GtkCellRendererAccel *)
class cell_renderer_accel : Gtk.cell_renderer_accel obj ->
object
inherit[Gtk.cell_renderer_accel,cell_properties_accel] cell_renderer_skel
method connect : cell_renderer_accel_signals
end
val cell_renderer_pixbuf : cell_properties_pixbuf list -> cell_renderer_pixbuf
val cell_renderer_text : cell_properties_text list -> cell_renderer_text
val cell_renderer_toggle : cell_properties_toggle list -> cell_renderer_toggle
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererProgress
@gtkdoc gtk GtkCellRendererProgress *)
val cell_renderer_progress : cell_properties_progress list -> cell_renderer_progress
* @since GTK 2.6
@gtkdoc gtk GtkCellRendererCombo
@gtkdoc gtk GtkCellRendererCombo *)
val cell_renderer_combo : cell_properties_combo list -> cell_renderer_combo
* @since GTK 2.10
@gtkdoc gtk GtkCellRendererAccel
@gtkdoc gtk GtkCellRendererAccel *)
val cell_renderer_accel : cell_properties_accel list -> cell_renderer_accel
* { 3 GtkIconView }
* @gtkdoc gtk GtkIconView
@since GTK 2.6
@since GTK 2.6 *)
class icon_view_signals : [> Gtk.icon_view] Gtk.obj ->
object
inherit GContainer.container_signals
method item_activated : callback:(Gtk.tree_path -> unit) -> GtkSignal.id
method selection_changed : callback:(unit -> unit) -> GtkSignal.id
end
* A widget which displays a list of icons in a grid
@gtkdoc gtk GtkIconView
@since GTK 2.6
@gtkdoc gtk GtkIconView
@since GTK 2.6 *)
class icon_view :
([> Gtk.icon_view] as 'a) Gtk.obj ->
object
inherit GContainer.container
val obj : 'a Gtk.obj
method connect : icon_view_signals
method event : GObj.event_ops
method model : model
method set_model : model option -> unit
method set_markup_column : string column -> unit
method set_pixbuf_column : GdkPixbuf.pixbuf column -> unit
method set_text_column : string column -> unit
method orientation : GtkEnums.orientation
method set_orientation : GtkEnums.orientation -> unit
method selection_mode : GtkEnums.selection_mode
method set_selection_mode : GtkEnums.selection_mode -> unit
method column_spacing : int
method set_column_spacing : int -> unit
method item_width : int
method set_item_width : int -> unit
method margin : int
method set_margin : int -> unit
method columns : int
method set_columns : int -> unit
method row_spacing : int
method set_row_spacing : int -> unit
method spacing : int
method set_spacing : int -> unit
method get_path_at_pos : int -> int -> Gtk.tree_path
method selected_foreach : (Gtk.tree_path -> unit) -> unit
method get_selected_items : Gtk.tree_path list
method path_is_selected : Gtk.tree_path -> bool
method select_path : Gtk.tree_path -> unit
method unselect_path : Gtk.tree_path -> unit
method select_all : unit -> unit
method unselect_all : unit -> unit
method item_activated : Gtk.tree_path -> unit
end
* A widget which displays a list of icons in a grid
@gtkdoc gtk GtkIconView
@since GTK 2.6
@gtkdoc gtk GtkIconView
@since GTK 2.6 *)
val icon_view :
?model:#model ->
?columns:int ->
?orientation:GtkEnums.orientation ->
?selection_mode:GtkEnums.selection_mode ->
?border_width:int ->
?width:int ->
?height:int ->
?packing:(GObj.widget -> unit) ->
?show:bool ->
unit -> icon_view
class type virtual ['obj,'row,'a,'b,'c] custom_tree_model_type =
object
inherit model
val obj : 'obj
method connect : model_signals
method custom_row_changed : Gtk.tree_path -> 'row -> unit
method custom_row_deleted : Gtk.tree_path -> unit
method custom_row_has_child_toggled :
Gtk.tree_path -> 'row -> unit
method custom_row_inserted : Gtk.tree_path -> 'row -> unit
method custom_rows_reordered :
Gtk.tree_path -> 'row option -> int array -> unit
method custom_unref_node : 'row -> unit
method custom_ref_node : 'row -> unit
method custom_flags : GtkEnums.tree_model_flags list
* Functions of the custom model . They must act exactly as described in the documentation
of Gtk orelse Gtk may emit fatal errors .
of Gtk orelse Gtk may emit fatal errors. *)
method virtual custom_get_iter : Gtk.tree_path -> 'row option
method virtual custom_get_path : 'row -> Gtk.tree_path
method virtual custom_value : Gobject.g_type -> 'row -> column:int -> Gobject.basic
* [ custom_value typ row ] is the value to set in [ row ] for column [ column ] .
It must must be of the type [ typ ] , i.e. the type declared for column [ column ] .
It must must be of the type [typ], i.e. the type declared for column [column]. *)
method virtual custom_iter_children : 'row option -> 'row option
method virtual custom_iter_has_child : 'row -> bool
method virtual custom_iter_n_children : 'row option -> int
method virtual custom_iter_next : 'row -> 'row option
method virtual custom_iter_nth_child : 'row option -> int -> 'row option
method virtual custom_iter_parent : 'row -> 'row option
method virtual custom_decode_iter : 'a -> 'b -> 'c -> 'row
method virtual custom_encode_iter : 'row -> 'a * 'b * 'c
method custom_n_columns : int
method custom_get_column_type : int -> Gobject.g_type
method custom_get_value :
'row -> int -> Gobject.g_value -> unit
end
class virtual ['row,'a,'b,'c] custom_tree_model :
column_list -> [Gtk.tree_model_custom,'row,'a,'b,'c] custom_tree_model_type
|
0ddaca39d97bee8809aa0df275cbcbfe6310219fd361ff8e171688926c287be7 | Decentralized-Pictures/T4L3NT | client_proto_stresstest_commands.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
open Alpha_context
type transfer_strategy =
| Fixed_amount of {mutez : Tez.t} (** Amount to transfer *)
| Evaporation of {fraction : float}
* Maximum fraction of current wealth to transfer .
Minimum amount is 1 mutez regardless of total wealth .
Minimum amount is 1 mutez regardless of total wealth. *)
type parameters = {
seed : int;
fresh_probability : float;
(** Per-transfer probability that the destination will be fresh *)
tps : float; (** Transaction per seconds target *)
strategy : transfer_strategy;
fee_mutez : Tez.t; (** fees for each transfer, in mutez *)
gas_limit : Gas.Arith.integral; (** gas limit per operation *)
storage_limit : Z.t; (** storage limit per operation *)
account_creation_storage : Z.t;
(** upper bound on bytes consumed when creating a tz1 account *)
total_transfers : int option;
(** total number of transfers to perform; unbounded if None *)
single_op_per_pkh_per_block : bool;
(** if true, a single operation will be injected by pkh by block to
improve the chance for the injected operations to be included in the
next block *)
}
type origin = Explicit | Wallet_pkh | Wallet_alias of string
type source = {
pkh : public_key_hash;
pk : public_key;
sk : Signature.secret_key;
}
type input_source =
| Explicit of source
| Wallet_alias of string
| Wallet_pkh of public_key_hash
type source_origin = {source : source; origin : origin}
type transfer = {
src : source;
dst : public_key_hash;
fee : Tez.t;
amount : Tez.t;
counter : Z.t option;
fresh_dst : bool;
}
type state = {
current_head_on_start : Block_hash.t;
counters : (Block_hash.t * Z.t) Signature.Public_key_hash.Table.t;
mutable pool : source_origin list;
mutable pool_size : int;
(** [Some l] if [single_op_per_pkh_per_block] is true *)
mutable shuffled_pool : source list option;
mutable revealed : Signature.Public_key_hash.Set.t;
mutable last_block : Block_hash.t;
new_block_condition : unit Lwt_condition.t;
injected_operations : Operation_hash.t list Block_hash.Table.t;
}
let verbose = ref false
let debug = ref false
let debug_msg msg = if !debug then msg () else Lwt.return_unit
let default_parameters =
{
seed = 0x533D;
fresh_probability = 0.001;
tps = 5.0;
strategy = Fixed_amount {mutez = Tez.one};
fee_mutez = Tez.of_mutez_exn 2_000L;
gas_limit = Gas.Arith.integral_of_int_exn 1_600;
(* [gas_limit] corresponds to a slight overapproximation of the
gas needed to inject an operation. It was obtained by simulating
the operation using the client. *)
storage_limit = Z.zero;
account_creation_storage = Z.of_int 300;
(* [account_creation_storage] corresponds to a slight overapproximation
of the storage consumed when allocating a new implicit account.
It was obtained by simulating the operation using the client. *)
total_transfers = None;
single_op_per_pkh_per_block = false;
}
let input_source_encoding =
let open Data_encoding in
union
[
case
~title:"explicit"
(Tag 0)
(obj3
(req "pkh" Signature.Public_key_hash.encoding)
(req "pk" Signature.Public_key.encoding)
(req "sk" Signature.Secret_key.encoding))
(function Explicit {pkh; pk; sk} -> Some (pkh, pk, sk) | _ -> None)
(fun (pkh, pk, sk) -> Explicit {pkh; pk; sk});
case
~title:"alias"
(Tag 1)
(obj1 (req "alias" Data_encoding.string))
(function Wallet_alias alias -> Some alias | _ -> None)
(fun alias -> Wallet_alias alias);
case
~title:"pkh"
(Tag 2)
(obj1 (req "pkh" Signature.Public_key_hash.encoding))
(function Wallet_pkh pkh -> Some pkh | _ -> None)
(fun pkh -> Wallet_pkh pkh);
]
let input_source_list_encoding = Data_encoding.list input_source_encoding
let injected_operations_encoding =
let open Data_encoding in
list
(obj2
(req "block_hash_when_injected" Block_hash.encoding)
(req "operation_hashes" (list Operation_hash.encoding)))
let parse_strategy s =
match String.split ~limit:1 ':' s with
| ["fixed"; parameter] -> (
match int_of_string parameter with
| exception _ -> Error "invalid integer literal"
| mutez when mutez <= 0 -> Error "negative amount"
| mutez -> (
match Tez.of_mutez (Int64.of_int mutez) with
| None -> Error "invalid mutez"
| Some mutez -> Ok (Fixed_amount {mutez})))
| ["evaporation"; parameter] -> (
match float_of_string parameter with
| exception _ -> Error "invalid float literal"
| fraction when fraction < 0.0 || fraction > 1.0 ->
Error "invalid evaporation rate"
| fraction -> Ok (Evaporation {fraction}))
| _ -> Error "invalid argument"
* This command uses two different data structures for sources :
- The in - output files one ,
- The normalized one .
The data structure used for in - output files does not directly contain the
data required to forge operations . For efficiency purposes , the sources are
converted into a normalized data structure that contains all the required
data to forge operations and the format originally used to be able to
revert this conversion .
- The in-output files one,
- The normalized one.
The data structure used for in-output files does not directly contain the
data required to forge operations. For efficiency purposes, the sources are
converted into a normalized data structure that contains all the required
data to forge operations and the format originally used to be able to
revert this conversion. *)
* [ normalize_source cctxt src ] converts [ src ] from in - output data structure
to normalized one . If the conversion fails , [ None ] is returned and a
warning message is printed in [ cctxt ] .
Only unencrypted and encrypted sources from the wallet of [ cctxt ] are
supported .
to normalized one. If the conversion fails, [None] is returned and a
warning message is printed in [cctxt].
Only unencrypted and encrypted sources from the wallet of [cctxt] are
supported. *)
let normalize_source cctxt =
let sk_of_sk_uri sk_uri =
match
Signature.Secret_key.of_b58check
(Uri.path (sk_uri : Client_keys.sk_uri :> Uri.t))
with
| Ok sk -> Lwt.return_some sk
| Error _ -> (
Tezos_signer_backends.Encrypted.decrypt cctxt sk_uri >>= function
| Error _ -> Lwt.return_none
| Ok sk -> Lwt.return_some sk)
in
let key_from_alias alias =
let warning msg alias =
cctxt#warning msg alias >>= fun () -> Lwt.return_none
in
(Client_keys.alias_keys cctxt alias >>= function
| Error _ | Ok None -> warning "Alias \"%s\" not found in the wallet" alias
| Ok (Some (_, None, _)) | Ok (Some (_, _, None)) ->
warning
"Alias \"%s\" does not contain public or secret key and could not \
be used for stresstest"
alias
| Ok (Some (pkh, Some pk, Some sk_uri)) -> (
sk_of_sk_uri sk_uri >>= function
| None ->
warning
"Cannot extract the secret key form the alias \"%s\" of the \
wallet"
alias
| Some sk ->
Lwt.return_some
{source = {pkh; pk; sk}; origin = Wallet_alias alias}))
>>= function
| None -> warning "Source given as alias \"%s\" ignored" alias
| key -> Lwt.return key
in
let key_from_wallet pkh =
let warning msg pkh =
cctxt#warning msg Signature.Public_key_hash.pp pkh >>= fun () ->
Lwt.return_none
in
(Client_keys.get_key cctxt pkh >>= function
| Error _ -> warning "Pkh \"%a\" not found in the wallet" pkh
| Ok (alias, pk, sk_uri) -> (
sk_of_sk_uri sk_uri >>= function
| None ->
cctxt#warning
"Cannot extract the secret key form the pkh \"%a\" (alias: \
\"%s\") of the wallet"
Signature.Public_key_hash.pp
pkh
alias
>>= fun () -> Lwt.return_none
| Some sk ->
Lwt.return_some {source = {pkh; pk; sk}; origin = Wallet_pkh}))
>>= function
| None -> warning "Source given as pkh \"%a\" ignored" pkh
| key -> Lwt.return key
in
function
| Explicit source -> Lwt.return_some {source; origin = Explicit}
| Wallet_alias alias -> key_from_alias alias
| Wallet_pkh pkh -> key_from_wallet pkh
(** [unnormalize_source src_org] converts [src_org] from normalized data
structure to in-output one. *)
let unnormalize_source src_org =
match src_org.origin with
| Explicit -> Explicit src_org.source
| Wallet_pkh -> Wallet_pkh src_org.source.pkh
| Wallet_alias alias -> Wallet_alias alias
(** Samples from [state.pool]. Used to generate the destination of a
transfer, and its source only when [state.shuffled_pool] is [None]
meaning that [--single-op-per-pkh-per-block] is not set. *)
let sample_any_source_from_pool state rng_state =
let idx = Random.State.int rng_state state.pool_size in
match List.nth state.pool idx with
| None -> assert false
| Some src_org -> Lwt.return src_org.source
(** Generates the source of a transfer. If [state.shuffled_pool] has a
value (meaning that [--single-op-per-pkh-per-block] is active) then
it is sampled from there, otherwise from [state.pool]. *)
let rec sample_source_from_pool state rng_state
(cctxt : Protocol_client_context.full) =
match state.shuffled_pool with
| None -> sample_any_source_from_pool state rng_state
| Some (source :: l) ->
state.shuffled_pool <- Some l ;
debug_msg (fun () ->
cctxt#message
"sample_transfer: %d unused sources for the block next to %a"
(List.length l)
Block_hash.pp
state.last_block)
>>= fun () -> Lwt.return source
| Some [] ->
cctxt#message
"all available sources have been used for block next to %a"
Block_hash.pp
state.last_block
>>= fun () ->
Lwt_condition.wait state.new_block_condition >>= fun () ->
sample_source_from_pool state rng_state cctxt
let random_seed rng_state =
Bytes.init 32 (fun _ -> Char.chr (Random.State.int rng_state 256))
let generate_fresh_source pool rng_state =
let seed = random_seed rng_state in
let (pkh, pk, sk) = Signature.generate_key ~seed () in
let fresh = {source = {pkh; pk; sk}; origin = Explicit} in
pool.pool <- fresh :: pool.pool ;
pool.pool_size <- pool.pool_size + 1 ;
fresh.source
[ cctxt f ] calls [ f head ] each time there is a new head
received by the streamed RPC /monitor / heads / main
received by the streamed RPC /monitor/heads/main *)
let on_new_head (cctxt : Protocol_client_context.full) f =
Shell_services.Monitor.heads cctxt `Main >>=? fun (heads_stream, stopper) ->
Lwt_stream.iter_s f heads_stream >>= fun () ->
stopper () ;
return_unit
(* We perform rejection sampling of valid sources.
We could maintain a local cache of existing contracts with sufficient balance. *)
let rec sample_transfer (cctxt : Protocol_client_context.full) chain block
(parameters : parameters) (state : state) rng_state =
sample_source_from_pool state rng_state cctxt >>= fun src ->
Alpha_services.Contract.balance
cctxt
(chain, block)
(Contract.implicit_contract src.pkh)
>>=? fun tez ->
if Tez.(tez = zero) then
debug_msg (fun () ->
cctxt#message
"sample_transfer: invalid balance %a"
Signature.Public_key_hash.pp
src.pkh)
>>= fun () ->
Sampled source has zero balance : the transfer that created that
address was not included yet . Retry
address was not included yet. Retry *)
sample_transfer cctxt chain block parameters state rng_state
else
let fresh =
Random.State.float rng_state 1.0 < parameters.fresh_probability
in
(if fresh then Lwt.return (generate_fresh_source state rng_state)
else sample_any_source_from_pool state rng_state)
>>= fun dest ->
let amount =
match parameters.strategy with
| Fixed_amount {mutez} -> mutez
| Evaporation {fraction} ->
let mutez = Int64.to_float (Tez.to_mutez tez) in
let max_fraction = Int64.of_float (mutez *. fraction) in
let amount =
if max_fraction = 0L then 1L
else max 1L (Random.State.int64 rng_state max_fraction)
in
Tez.of_mutez_exn amount
in
let fee = parameters.fee_mutez in
return {src; dst = dest.pkh; fee; amount; counter = None; fresh_dst = fresh}
let inject_contents (cctxt : Protocol_client_context.full) chain branch sk
contents =
let bytes =
Data_encoding.Binary.to_bytes_exn
Operation.unsigned_encoding
({branch}, Contents_list contents)
in
let signature =
Some (Signature.sign ~watermark:Signature.Generic_operation sk bytes)
in
let op : _ Operation.t =
{shell = {branch}; protocol_data = {contents; signature}}
in
let bytes =
Data_encoding.Binary.to_bytes_exn Operation.encoding (Operation.pack op)
in
Shell_services.Injection.operation cctxt ~chain bytes
(* counter _must_ be set before calling this function *)
let manager_op_of_transfer parameters
{src; dst; fee; amount; counter; fresh_dst} =
let source = src.pkh in
let gas_limit = parameters.gas_limit in
let storage_limit =
if fresh_dst then
Z.add parameters.account_creation_storage parameters.storage_limit
else parameters.storage_limit
in
let operation =
let parameters =
let open Tezos_micheline in
Script.lazy_expr
@@ Micheline.strip_locations
(Prim (0, Michelson_v1_primitives.D_Unit, [], []))
in
let entrypoint = "default" in
let destination = Contract.implicit_contract dst in
Transaction {amount; parameters; entrypoint; destination}
in
match counter with
| None -> assert false
| Some counter ->
Manager_operation
{source; fee; counter; operation; gas_limit; storage_limit}
let cost_of_manager_operation = Gas.Arith.integral_of_int_exn 1_000
let inject_transfer (cctxt : Protocol_client_context.full) parameters state
rng_state chain block transfer =
Alpha_services.Contract.counter cctxt (chain, block) transfer.src.pkh
>>=? fun pcounter ->
Shell_services.Blocks.hash cctxt ~chain ~block () >>=? fun branch ->
(* If there is a new block refresh the fresh_pool *)
if not (Block_hash.equal branch state.last_block) then (
state.last_block <- branch ;
if Option.is_some state.shuffled_pool then
state.shuffled_pool <-
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) state.pool))) ;
let freshest_counter =
match
Signature.Public_key_hash.Table.find state.counters transfer.src.pkh
with
| None ->
This is the first operation we inject for this pkh : the counter given
by the RPC _ must _ be the freshest one .
by the RPC _must_ be the freshest one. *)
pcounter
| Some (previous_branch, previous_counter) ->
if Block_hash.equal branch previous_branch then
We already injected an operation on top of this block : the one stored
locally is the freshest one .
locally is the freshest one. *)
previous_counter
else
It seems the block changed since we last injected an operation :
this invalidates the previously stored counter . We return the counter
given by the RPC .
this invalidates the previously stored counter. We return the counter
given by the RPC. *)
pcounter
in
(if Signature.Public_key_hash.Set.mem transfer.src.pkh state.revealed then
return true
else (
(* Either the [manager_key] RPC tells us the key is already
revealed, or we immediately inject a reveal operation: in any
case the key is revealed in the end. *)
state.revealed <-
Signature.Public_key_hash.Set.add transfer.src.pkh state.revealed ;
Alpha_services.Contract.manager_key cctxt (chain, block) transfer.src.pkh
>>=? fun pk_opt -> return (Option.is_some pk_opt)))
>>=? fun already_revealed ->
(if not already_revealed then (
let reveal_counter = Z.succ freshest_counter in
let transf_counter = Z.succ reveal_counter in
let reveal =
Manager_operation
{
source = transfer.src.pkh;
fee = Tez.zero;
counter = reveal_counter;
gas_limit = cost_of_manager_operation;
storage_limit = Z.zero;
operation = Reveal transfer.src.pk;
}
in
let manager_op =
manager_op_of_transfer
parameters
{transfer with counter = Some transf_counter}
in
let list = Cons (reveal, Single manager_op) in
Signature.Public_key_hash.Table.remove state.counters transfer.src.pkh ;
Signature.Public_key_hash.Table.add
state.counters
transfer.src.pkh
(branch, transf_counter) ;
(if !verbose then
cctxt#message
"injecting reveal+transfer from %a (counters=%a,%a) to %a"
Signature.Public_key_hash.pp
transfer.src.pkh
Z.pp_print
reveal_counter
Z.pp_print
transf_counter
Signature.Public_key_hash.pp
transfer.dst
else Lwt.return_unit)
>>= fun () ->
NB : regardless of our best efforts to keep track of counters , injection can fail with
" counter in the future " if a block switch happens in between the moment we
get the branch and the moment we inject , and the new block does not include
all the operations we injected .
"counter in the future" if a block switch happens in between the moment we
get the branch and the moment we inject, and the new block does not include
all the operations we injected. *)
inject_contents cctxt chain branch transfer.src.sk list)
else
let transf_counter = Z.succ freshest_counter in
let manager_op =
manager_op_of_transfer
parameters
{transfer with counter = Some transf_counter}
in
let list = Single manager_op in
Signature.Public_key_hash.Table.remove state.counters transfer.src.pkh ;
Signature.Public_key_hash.Table.add
state.counters
transfer.src.pkh
(branch, transf_counter) ;
(if !verbose then
cctxt#message
"injecting transfer from %a (counter=%a) to %a"
Signature.Public_key_hash.pp
transfer.src.pkh
Z.pp_print
transf_counter
Signature.Public_key_hash.pp
transfer.dst
else Lwt.return_unit)
>>= fun () ->
(* See comment above. *)
inject_contents cctxt chain branch transfer.src.sk list)
>>= function
| Ok op_hash ->
debug_msg (fun () ->
cctxt#message
"inject_transfer: op injected %a"
Operation_hash.pp
op_hash)
>>= fun () ->
let ops =
Option.value
~default:[]
(Block_hash.Table.find state.injected_operations branch)
in
Block_hash.Table.replace state.injected_operations branch (op_hash :: ops) ;
return_unit
| Error e ->
debug_msg (fun () ->
cctxt#message
"inject_transfer: error, op not injected: %a"
Error_monad.pp_print_trace
e)
>>= fun () -> return_unit
let save_injected_operations (cctxt : Protocol_client_context.full) state =
let json =
Data_encoding.Json.construct
injected_operations_encoding
(Block_hash.Table.fold
(fun k v acc -> (k, v) :: acc)
state.injected_operations
[])
in
let path =
Filename.temp_file "client-stresstest-injected_operations-" ".json"
in
cctxt#message "writing injected operations in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= function
| Error e ->
cctxt#message
"could not write injected operations json file: %a"
Error_monad.pp_print_trace
e
| Ok _ -> Lwt.return_unit
let stat_on_exit (cctxt : Protocol_client_context.full) state =
let ratio_injected_included_op () =
Shell_services.Blocks.hash cctxt () >>=? fun current_head_on_exit ->
let inter_cardinal s1 s2 =
Operation_hash.Set.cardinal
(Operation_hash.Set.inter
(Operation_hash.Set.of_list s1)
(Operation_hash.Set.of_list s2))
in
let get_included_ops older_block =
let rec get_included_ops block acc_included_ops =
if block = older_block then return acc_included_ops
else
Shell_services.Chain.Blocks.Operation_hashes.operation_hashes_in_pass
cctxt
~chain:`Main
~block:(`Hash (block, 0))
3
>>=? fun included_ops ->
Shell_services.Blocks.list
cctxt
~chain:`Main
~heads:[block]
~length:2
()
>>=? function
| [[current; predecessor]] when current = block ->
get_included_ops
predecessor
(List.append acc_included_ops included_ops)
| _ -> cctxt#error "Error while computing stats: invalid block list"
in
get_included_ops current_head_on_exit []
in
let injected_ops =
Block_hash.Table.fold
(fun k l acc ->
(* The operations injected during the last block are ignored because
they should not be currently included. *)
if current_head_on_exit <> k then List.append acc l else acc)
state.injected_operations
[]
in
get_included_ops state.current_head_on_start >>=? fun included_ops ->
let included_ops_count = inter_cardinal injected_ops included_ops in
debug_msg (fun () ->
cctxt#message
"injected : %a\nincluded: %a"
(Format.pp_print_list Operation_hash.pp)
injected_ops
(Format.pp_print_list Operation_hash.pp)
included_ops)
>>= fun () ->
let injected_ops_count = List.length injected_ops in
cctxt#message
"%s of the injected operations have been included (%d injected, %d \
included). Note that the operations injected during the last block are \
ignored because they should not be currently included."
(if Int.equal injected_ops_count 0 then "N/A"
else Format.sprintf "%d%%" (included_ops_count * 100 / injected_ops_count))
injected_ops_count
included_ops_count
>>= fun () -> return_unit
in
ratio_injected_included_op ()
let launch (cctxt : Protocol_client_context.full) (parameters : parameters)
state rng_state save_pool_callback =
let injected = ref 0 in
let dt = 1. /. parameters.tps in
let terminated () =
match parameters.total_transfers with
| None -> false
| Some bound -> bound <= !injected
in
let rec loop () =
if terminated () then
save_pool_callback () >>= fun () ->
save_injected_operations cctxt state >>= fun () ->
stat_on_exit cctxt state
else
let start = Mtime_clock.elapsed () in
debug_msg (fun () -> cctxt#message "launch.loop: invoke sample_transfer")
>>= fun () ->
sample_transfer cctxt cctxt#chain cctxt#block parameters state rng_state
>>=? fun transfer ->
debug_msg (fun () -> cctxt#message "launch.loop: invoke inject_transfer")
>>= fun () ->
inject_transfer
cctxt
parameters
state
rng_state
cctxt#chain
cctxt#block
transfer
>>=? fun () ->
incr injected ;
let stop = Mtime_clock.elapsed () in
let elapsed = Mtime.Span.(to_s stop -. to_s start) in
let remaining = dt -. elapsed in
(if remaining <= 0.0 then
cctxt#warning
"warning: tps target could not be reached, consider using a lower \
value for --tps"
else Lwt_unix.sleep remaining)
>>= loop
in
(* True, if and only if [single_op_per_pkh_per_block] is true. *)
if Option.is_some state.shuffled_pool then
dont_wait
(fun () ->
on_new_head cctxt (fun (block, _) ->
if not (Block_hash.equal block state.last_block) then (
state.last_block <- block ;
state.shuffled_pool <-
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) state.pool))) ;
Lwt_condition.broadcast state.new_block_condition () ;
Lwt.return_unit))
(fun trace ->
ignore
(cctxt#error
"an error while getting the new head has been returned: %a"
Error_monad.pp_print_trace
trace))
(fun exn ->
ignore
(cctxt#error
"an exception while getting the new head has been raised: %s"
(Printexc.to_string exn))) ;
loop ()
let group =
Clic.{name = "stresstest"; title = "Commands for stress-testing the network"}
type pool_source =
| From_string of {json : Ezjsonm.value}
| From_file of {path : string; json : Ezjsonm.value}
let json_of_pool_source = function
| From_string {json} | From_file {json; _} -> json
let json_file_or_text_parameter =
Clic.parameter (fun _ p ->
match String.split ~limit:1 ':' p with
| ["text"; text] -> return (From_string {json = Ezjsonm.from_string text})
| ["file"; path] ->
Lwt_utils_unix.Json.read_file path >|=? fun json ->
From_file {path; json}
| _ -> (
if Sys.file_exists p then
Lwt_utils_unix.Json.read_file p >|=? fun json ->
From_file {path = p; json}
else
try return (From_string {json = Ezjsonm.from_string p})
with Ezjsonm.Parse_error _ ->
failwith "Neither an existing file nor valid JSON: '%s'" p))
let seed_arg =
let open Clic in
arg
~long:"seed"
~placeholder:"int"
~doc:"random seed"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match int_of_string s with
| exception _ ->
cctxt#error
"While parsing --seed: could not convert argument to int"
| i -> return i))
let tps_arg =
let open Clic in
arg
~long:"tps"
~placeholder:"float"
~doc:"transactions per seconds target"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match float_of_string s with
| exception _ ->
cctxt#error
"While parsing --tps: could not convert argument to float"
| f when f < 0.0 ->
cctxt#error "While parsing --tps: negative argument"
| f -> return f))
let fresh_probability_arg =
let open Clic in
arg
~long:"fresh-probability"
~placeholder:"float in [0;1]"
~doc:
(Format.sprintf
"Probability for each transaction's destination to be a fresh \
account. The default value is %g. This new account may then be used \
as source or destination of subsequent transactions, just like the \
accounts that were initially provided to the command. Note that when \
[--single-op-per-pkh-per-block] is set, the new account will not be \
used as source until the head changes."
default_parameters.fresh_probability)
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match float_of_string s with
| exception _ ->
cctxt#error
"While parsing --fresh-probability: could not convert argument \
to float"
| f when f < 0.0 || f > 1.0 ->
cctxt#error "While parsing --fresh-probability: invalid argument"
| f -> return f))
let strategy_arg =
let open Clic in
arg
~long:"strategy"
~placeholder:"fixed:mutez | evaporation:[0;1]"
~doc:"wealth redistribution strategy"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match parse_strategy s with
| Error msg -> cctxt#error "While parsing --strategy: %s" msg
| Ok strategy -> return strategy))
let gas_limit_arg =
let open Clic in
let gas_limit_kind =
parameter (fun _ s ->
try
let v = Z.of_string s in
return (Gas.Arith.integral_exn v)
with _ -> failwith "invalid gas limit (must be a positive number)")
in
arg
~long:"gas-limit"
~short:'G'
~placeholder:"amount"
~doc:
(Format.asprintf
"Set the gas limit of the transaction instead of using the default \
value of %a"
Gas.Arith.pp_integral
default_parameters.gas_limit)
gas_limit_kind
let storage_limit_arg =
let open Clic in
let storage_limit_kind =
parameter (fun _ s ->
try
let v = Z.of_string s in
assert (Compare.Z.(v >= Z.zero)) ;
return v
with _ ->
failwith "invalid storage limit (must be a positive number of bytes)")
in
arg
~long:"storage-limit"
~short:'S'
~placeholder:"amount"
~doc:
(Format.asprintf
"Set the storage limit of the transaction instead of using the \
default value of %a"
Z.pp_print
default_parameters.storage_limit)
storage_limit_kind
let transfers_arg =
let open Clic in
arg
~long:"transfers"
~placeholder:"integer"
~doc:"total number of transfers to perform, unbounded if not specified"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match int_of_string s with
| exception _ ->
cctxt#error "While parsing --transfers: invalid integer literal"
| i when i <= 0 ->
cctxt#error "While parsing --transfers: negative integer"
| i -> return i))
let single_op_per_pkh_per_block_arg =
Clic.switch
~long:"single-op-per-pkh-per-block"
~doc:
"ensure that the operations are not rejected by limiting the injection \
to 1 operation per public_key_hash per block."
()
let verbose_arg =
Clic.switch
~long:"verbose"
~doc:"Display detailed logs of the injected operations"
()
let debug_arg = Clic.switch ~long:"debug" ~doc:"Display debug logs" ()
let set_option opt f x = Option.fold ~none:x ~some:(f x) opt
let save_pool_callback (cctxt : Protocol_client_context.full) pool_source state
=
let json =
Data_encoding.Json.construct
input_source_list_encoding
(List.map unnormalize_source state.pool)
in
let catch_write_error = function
| Error e ->
cctxt#message
"could not write back json file: %a"
Error_monad.pp_print_trace
e
| Ok () -> Lwt.return_unit
in
match pool_source with
| From_string _ ->
If the initial pool was given directly as json , save pool to
a temp file .
a temp file. *)
let path = Filename.temp_file "client-stresstest-pool-" ".json" in
cctxt#message "writing back address pool in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= catch_write_error
| From_file {path; _} ->
(* If the pool specification was a json file, save pool to
the same file. *)
cctxt#message "writing back address pool in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= catch_write_error
let generate_random_transactions =
let open Clic in
command
~group
~desc:"Generate random transactions"
(args11
seed_arg
tps_arg
fresh_probability_arg
strategy_arg
Client_proto_args.fee_arg
gas_limit_arg
storage_limit_arg
transfers_arg
single_op_per_pkh_per_block_arg
verbose_arg
debug_arg)
(prefixes ["stresstest"; "transfer"; "using"]
@@ param
~name:"sources.json"
~desc:
{|List of accounts from which to perform transfers in JSON format. The input JSON must be an array of objects of the form {"pkh":"<pkh>","pk":"<pk>","sk":"<sk>"} or {"alias":"<alias from wallet>"} or {"pkh":"<pkh from wallet>"} with the pkh, pk and sk encoded in B58 form."|}
json_file_or_text_parameter
@@ stop)
(fun ( seed,
tps,
freshp,
strat,
fee,
gas_limit,
storage_limit,
transfers,
single_op_per_pkh_per_block,
verbose_flag,
debug_flag )
sources_json
(cctxt : Protocol_client_context.full) ->
verbose := verbose_flag ;
debug := debug_flag ;
let parameters =
default_parameters
|> set_option seed (fun parameter seed -> {parameter with seed})
|> set_option tps (fun parameter tps -> {parameter with tps})
|> set_option freshp (fun parameter fresh_probability ->
{parameter with fresh_probability})
|> set_option strat (fun parameter strategy ->
{parameter with strategy})
|> set_option fee (fun parameter fee_mutez ->
{parameter with fee_mutez})
|> set_option gas_limit (fun parameter gas_limit ->
{parameter with gas_limit})
|> set_option storage_limit (fun parameter storage_limit ->
{parameter with storage_limit})
|> set_option transfers (fun parameter transfers ->
{parameter with total_transfers = Some transfers})
|> fun parameter -> {parameter with single_op_per_pkh_per_block}
in
match
Data_encoding.Json.destruct
input_source_list_encoding
(json_of_pool_source sources_json)
with
| exception _ -> cctxt#error "Could not decode list of sources"
| [] -> cctxt#error "It is required to provide sources"
| sources ->
List.filter_map_p (normalize_source cctxt) sources >>= fun sources ->
let counters = Signature.Public_key_hash.Table.create 1023 in
let rng_state = Random.State.make [|parameters.seed|] in
Shell_services.Blocks.hash cctxt () >>=? fun current_head_on_start ->
let state =
{
current_head_on_start;
counters;
pool = sources;
pool_size = List.length sources;
shuffled_pool =
(if parameters.single_op_per_pkh_per_block then
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) sources))
else None);
revealed = Signature.Public_key_hash.Set.empty;
last_block = current_head_on_start;
new_block_condition = Lwt_condition.create ();
injected_operations = Block_hash.Table.create 1023;
}
in
let exit_callback_id =
Lwt_exit.register_clean_up_callback ~loc:__LOC__ (fun _retcode ->
stat_on_exit cctxt state >>= function
| Ok () -> Lwt.return_unit
| Error e ->
cctxt#message "Error: %a" Error_monad.pp_print_trace e)
in
let save_pool () = save_pool_callback cctxt sources_json state in
(* Register a callback for saving the pool when the tool is interrupted
through ctrl-c *)
let exit_callback_id =
Lwt_exit.register_clean_up_callback
~loc:__LOC__
~after:[exit_callback_id]
(fun _retcode -> save_pool ())
in
let save_injected_operations () =
save_injected_operations cctxt state
in
ignore
(Lwt_exit.register_clean_up_callback
~loc:__LOC__
~after:[exit_callback_id]
(fun _retcode -> save_injected_operations ())) ;
launch cctxt parameters state rng_state save_pool)
let commands network () =
match network with
| Some `Mainnet -> []
| Some `Testnet | None -> [generate_random_transactions]
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_012_Psithaca/lib_client_commands/client_proto_stresstest_commands.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Amount to transfer
* Per-transfer probability that the destination will be fresh
* Transaction per seconds target
* fees for each transfer, in mutez
* gas limit per operation
* storage limit per operation
* upper bound on bytes consumed when creating a tz1 account
* total number of transfers to perform; unbounded if None
* if true, a single operation will be injected by pkh by block to
improve the chance for the injected operations to be included in the
next block
* [Some l] if [single_op_per_pkh_per_block] is true
[gas_limit] corresponds to a slight overapproximation of the
gas needed to inject an operation. It was obtained by simulating
the operation using the client.
[account_creation_storage] corresponds to a slight overapproximation
of the storage consumed when allocating a new implicit account.
It was obtained by simulating the operation using the client.
* [unnormalize_source src_org] converts [src_org] from normalized data
structure to in-output one.
* Samples from [state.pool]. Used to generate the destination of a
transfer, and its source only when [state.shuffled_pool] is [None]
meaning that [--single-op-per-pkh-per-block] is not set.
* Generates the source of a transfer. If [state.shuffled_pool] has a
value (meaning that [--single-op-per-pkh-per-block] is active) then
it is sampled from there, otherwise from [state.pool].
We perform rejection sampling of valid sources.
We could maintain a local cache of existing contracts with sufficient balance.
counter _must_ be set before calling this function
If there is a new block refresh the fresh_pool
Either the [manager_key] RPC tells us the key is already
revealed, or we immediately inject a reveal operation: in any
case the key is revealed in the end.
See comment above.
The operations injected during the last block are ignored because
they should not be currently included.
True, if and only if [single_op_per_pkh_per_block] is true.
If the pool specification was a json file, save pool to
the same file.
Register a callback for saving the pool when the tool is interrupted
through ctrl-c | Copyright ( c ) 2021 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Alpha_context
type transfer_strategy =
| Evaporation of {fraction : float}
* Maximum fraction of current wealth to transfer .
Minimum amount is 1 mutez regardless of total wealth .
Minimum amount is 1 mutez regardless of total wealth. *)
type parameters = {
seed : int;
fresh_probability : float;
strategy : transfer_strategy;
account_creation_storage : Z.t;
total_transfers : int option;
single_op_per_pkh_per_block : bool;
}
type origin = Explicit | Wallet_pkh | Wallet_alias of string
type source = {
pkh : public_key_hash;
pk : public_key;
sk : Signature.secret_key;
}
type input_source =
| Explicit of source
| Wallet_alias of string
| Wallet_pkh of public_key_hash
type source_origin = {source : source; origin : origin}
type transfer = {
src : source;
dst : public_key_hash;
fee : Tez.t;
amount : Tez.t;
counter : Z.t option;
fresh_dst : bool;
}
type state = {
current_head_on_start : Block_hash.t;
counters : (Block_hash.t * Z.t) Signature.Public_key_hash.Table.t;
mutable pool : source_origin list;
mutable pool_size : int;
mutable shuffled_pool : source list option;
mutable revealed : Signature.Public_key_hash.Set.t;
mutable last_block : Block_hash.t;
new_block_condition : unit Lwt_condition.t;
injected_operations : Operation_hash.t list Block_hash.Table.t;
}
let verbose = ref false
let debug = ref false
let debug_msg msg = if !debug then msg () else Lwt.return_unit
let default_parameters =
{
seed = 0x533D;
fresh_probability = 0.001;
tps = 5.0;
strategy = Fixed_amount {mutez = Tez.one};
fee_mutez = Tez.of_mutez_exn 2_000L;
gas_limit = Gas.Arith.integral_of_int_exn 1_600;
storage_limit = Z.zero;
account_creation_storage = Z.of_int 300;
total_transfers = None;
single_op_per_pkh_per_block = false;
}
let input_source_encoding =
let open Data_encoding in
union
[
case
~title:"explicit"
(Tag 0)
(obj3
(req "pkh" Signature.Public_key_hash.encoding)
(req "pk" Signature.Public_key.encoding)
(req "sk" Signature.Secret_key.encoding))
(function Explicit {pkh; pk; sk} -> Some (pkh, pk, sk) | _ -> None)
(fun (pkh, pk, sk) -> Explicit {pkh; pk; sk});
case
~title:"alias"
(Tag 1)
(obj1 (req "alias" Data_encoding.string))
(function Wallet_alias alias -> Some alias | _ -> None)
(fun alias -> Wallet_alias alias);
case
~title:"pkh"
(Tag 2)
(obj1 (req "pkh" Signature.Public_key_hash.encoding))
(function Wallet_pkh pkh -> Some pkh | _ -> None)
(fun pkh -> Wallet_pkh pkh);
]
let input_source_list_encoding = Data_encoding.list input_source_encoding
let injected_operations_encoding =
let open Data_encoding in
list
(obj2
(req "block_hash_when_injected" Block_hash.encoding)
(req "operation_hashes" (list Operation_hash.encoding)))
let parse_strategy s =
match String.split ~limit:1 ':' s with
| ["fixed"; parameter] -> (
match int_of_string parameter with
| exception _ -> Error "invalid integer literal"
| mutez when mutez <= 0 -> Error "negative amount"
| mutez -> (
match Tez.of_mutez (Int64.of_int mutez) with
| None -> Error "invalid mutez"
| Some mutez -> Ok (Fixed_amount {mutez})))
| ["evaporation"; parameter] -> (
match float_of_string parameter with
| exception _ -> Error "invalid float literal"
| fraction when fraction < 0.0 || fraction > 1.0 ->
Error "invalid evaporation rate"
| fraction -> Ok (Evaporation {fraction}))
| _ -> Error "invalid argument"
* This command uses two different data structures for sources :
- The in - output files one ,
- The normalized one .
The data structure used for in - output files does not directly contain the
data required to forge operations . For efficiency purposes , the sources are
converted into a normalized data structure that contains all the required
data to forge operations and the format originally used to be able to
revert this conversion .
- The in-output files one,
- The normalized one.
The data structure used for in-output files does not directly contain the
data required to forge operations. For efficiency purposes, the sources are
converted into a normalized data structure that contains all the required
data to forge operations and the format originally used to be able to
revert this conversion. *)
* [ normalize_source cctxt src ] converts [ src ] from in - output data structure
to normalized one . If the conversion fails , [ None ] is returned and a
warning message is printed in [ cctxt ] .
Only unencrypted and encrypted sources from the wallet of [ cctxt ] are
supported .
to normalized one. If the conversion fails, [None] is returned and a
warning message is printed in [cctxt].
Only unencrypted and encrypted sources from the wallet of [cctxt] are
supported. *)
let normalize_source cctxt =
let sk_of_sk_uri sk_uri =
match
Signature.Secret_key.of_b58check
(Uri.path (sk_uri : Client_keys.sk_uri :> Uri.t))
with
| Ok sk -> Lwt.return_some sk
| Error _ -> (
Tezos_signer_backends.Encrypted.decrypt cctxt sk_uri >>= function
| Error _ -> Lwt.return_none
| Ok sk -> Lwt.return_some sk)
in
let key_from_alias alias =
let warning msg alias =
cctxt#warning msg alias >>= fun () -> Lwt.return_none
in
(Client_keys.alias_keys cctxt alias >>= function
| Error _ | Ok None -> warning "Alias \"%s\" not found in the wallet" alias
| Ok (Some (_, None, _)) | Ok (Some (_, _, None)) ->
warning
"Alias \"%s\" does not contain public or secret key and could not \
be used for stresstest"
alias
| Ok (Some (pkh, Some pk, Some sk_uri)) -> (
sk_of_sk_uri sk_uri >>= function
| None ->
warning
"Cannot extract the secret key form the alias \"%s\" of the \
wallet"
alias
| Some sk ->
Lwt.return_some
{source = {pkh; pk; sk}; origin = Wallet_alias alias}))
>>= function
| None -> warning "Source given as alias \"%s\" ignored" alias
| key -> Lwt.return key
in
let key_from_wallet pkh =
let warning msg pkh =
cctxt#warning msg Signature.Public_key_hash.pp pkh >>= fun () ->
Lwt.return_none
in
(Client_keys.get_key cctxt pkh >>= function
| Error _ -> warning "Pkh \"%a\" not found in the wallet" pkh
| Ok (alias, pk, sk_uri) -> (
sk_of_sk_uri sk_uri >>= function
| None ->
cctxt#warning
"Cannot extract the secret key form the pkh \"%a\" (alias: \
\"%s\") of the wallet"
Signature.Public_key_hash.pp
pkh
alias
>>= fun () -> Lwt.return_none
| Some sk ->
Lwt.return_some {source = {pkh; pk; sk}; origin = Wallet_pkh}))
>>= function
| None -> warning "Source given as pkh \"%a\" ignored" pkh
| key -> Lwt.return key
in
function
| Explicit source -> Lwt.return_some {source; origin = Explicit}
| Wallet_alias alias -> key_from_alias alias
| Wallet_pkh pkh -> key_from_wallet pkh
let unnormalize_source src_org =
match src_org.origin with
| Explicit -> Explicit src_org.source
| Wallet_pkh -> Wallet_pkh src_org.source.pkh
| Wallet_alias alias -> Wallet_alias alias
let sample_any_source_from_pool state rng_state =
let idx = Random.State.int rng_state state.pool_size in
match List.nth state.pool idx with
| None -> assert false
| Some src_org -> Lwt.return src_org.source
let rec sample_source_from_pool state rng_state
(cctxt : Protocol_client_context.full) =
match state.shuffled_pool with
| None -> sample_any_source_from_pool state rng_state
| Some (source :: l) ->
state.shuffled_pool <- Some l ;
debug_msg (fun () ->
cctxt#message
"sample_transfer: %d unused sources for the block next to %a"
(List.length l)
Block_hash.pp
state.last_block)
>>= fun () -> Lwt.return source
| Some [] ->
cctxt#message
"all available sources have been used for block next to %a"
Block_hash.pp
state.last_block
>>= fun () ->
Lwt_condition.wait state.new_block_condition >>= fun () ->
sample_source_from_pool state rng_state cctxt
let random_seed rng_state =
Bytes.init 32 (fun _ -> Char.chr (Random.State.int rng_state 256))
let generate_fresh_source pool rng_state =
let seed = random_seed rng_state in
let (pkh, pk, sk) = Signature.generate_key ~seed () in
let fresh = {source = {pkh; pk; sk}; origin = Explicit} in
pool.pool <- fresh :: pool.pool ;
pool.pool_size <- pool.pool_size + 1 ;
fresh.source
[ cctxt f ] calls [ f head ] each time there is a new head
received by the streamed RPC /monitor / heads / main
received by the streamed RPC /monitor/heads/main *)
let on_new_head (cctxt : Protocol_client_context.full) f =
Shell_services.Monitor.heads cctxt `Main >>=? fun (heads_stream, stopper) ->
Lwt_stream.iter_s f heads_stream >>= fun () ->
stopper () ;
return_unit
let rec sample_transfer (cctxt : Protocol_client_context.full) chain block
(parameters : parameters) (state : state) rng_state =
sample_source_from_pool state rng_state cctxt >>= fun src ->
Alpha_services.Contract.balance
cctxt
(chain, block)
(Contract.implicit_contract src.pkh)
>>=? fun tez ->
if Tez.(tez = zero) then
debug_msg (fun () ->
cctxt#message
"sample_transfer: invalid balance %a"
Signature.Public_key_hash.pp
src.pkh)
>>= fun () ->
Sampled source has zero balance : the transfer that created that
address was not included yet . Retry
address was not included yet. Retry *)
sample_transfer cctxt chain block parameters state rng_state
else
let fresh =
Random.State.float rng_state 1.0 < parameters.fresh_probability
in
(if fresh then Lwt.return (generate_fresh_source state rng_state)
else sample_any_source_from_pool state rng_state)
>>= fun dest ->
let amount =
match parameters.strategy with
| Fixed_amount {mutez} -> mutez
| Evaporation {fraction} ->
let mutez = Int64.to_float (Tez.to_mutez tez) in
let max_fraction = Int64.of_float (mutez *. fraction) in
let amount =
if max_fraction = 0L then 1L
else max 1L (Random.State.int64 rng_state max_fraction)
in
Tez.of_mutez_exn amount
in
let fee = parameters.fee_mutez in
return {src; dst = dest.pkh; fee; amount; counter = None; fresh_dst = fresh}
let inject_contents (cctxt : Protocol_client_context.full) chain branch sk
contents =
let bytes =
Data_encoding.Binary.to_bytes_exn
Operation.unsigned_encoding
({branch}, Contents_list contents)
in
let signature =
Some (Signature.sign ~watermark:Signature.Generic_operation sk bytes)
in
let op : _ Operation.t =
{shell = {branch}; protocol_data = {contents; signature}}
in
let bytes =
Data_encoding.Binary.to_bytes_exn Operation.encoding (Operation.pack op)
in
Shell_services.Injection.operation cctxt ~chain bytes
let manager_op_of_transfer parameters
{src; dst; fee; amount; counter; fresh_dst} =
let source = src.pkh in
let gas_limit = parameters.gas_limit in
let storage_limit =
if fresh_dst then
Z.add parameters.account_creation_storage parameters.storage_limit
else parameters.storage_limit
in
let operation =
let parameters =
let open Tezos_micheline in
Script.lazy_expr
@@ Micheline.strip_locations
(Prim (0, Michelson_v1_primitives.D_Unit, [], []))
in
let entrypoint = "default" in
let destination = Contract.implicit_contract dst in
Transaction {amount; parameters; entrypoint; destination}
in
match counter with
| None -> assert false
| Some counter ->
Manager_operation
{source; fee; counter; operation; gas_limit; storage_limit}
let cost_of_manager_operation = Gas.Arith.integral_of_int_exn 1_000
let inject_transfer (cctxt : Protocol_client_context.full) parameters state
rng_state chain block transfer =
Alpha_services.Contract.counter cctxt (chain, block) transfer.src.pkh
>>=? fun pcounter ->
Shell_services.Blocks.hash cctxt ~chain ~block () >>=? fun branch ->
if not (Block_hash.equal branch state.last_block) then (
state.last_block <- branch ;
if Option.is_some state.shuffled_pool then
state.shuffled_pool <-
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) state.pool))) ;
let freshest_counter =
match
Signature.Public_key_hash.Table.find state.counters transfer.src.pkh
with
| None ->
This is the first operation we inject for this pkh : the counter given
by the RPC _ must _ be the freshest one .
by the RPC _must_ be the freshest one. *)
pcounter
| Some (previous_branch, previous_counter) ->
if Block_hash.equal branch previous_branch then
We already injected an operation on top of this block : the one stored
locally is the freshest one .
locally is the freshest one. *)
previous_counter
else
It seems the block changed since we last injected an operation :
this invalidates the previously stored counter . We return the counter
given by the RPC .
this invalidates the previously stored counter. We return the counter
given by the RPC. *)
pcounter
in
(if Signature.Public_key_hash.Set.mem transfer.src.pkh state.revealed then
return true
else (
state.revealed <-
Signature.Public_key_hash.Set.add transfer.src.pkh state.revealed ;
Alpha_services.Contract.manager_key cctxt (chain, block) transfer.src.pkh
>>=? fun pk_opt -> return (Option.is_some pk_opt)))
>>=? fun already_revealed ->
(if not already_revealed then (
let reveal_counter = Z.succ freshest_counter in
let transf_counter = Z.succ reveal_counter in
let reveal =
Manager_operation
{
source = transfer.src.pkh;
fee = Tez.zero;
counter = reveal_counter;
gas_limit = cost_of_manager_operation;
storage_limit = Z.zero;
operation = Reveal transfer.src.pk;
}
in
let manager_op =
manager_op_of_transfer
parameters
{transfer with counter = Some transf_counter}
in
let list = Cons (reveal, Single manager_op) in
Signature.Public_key_hash.Table.remove state.counters transfer.src.pkh ;
Signature.Public_key_hash.Table.add
state.counters
transfer.src.pkh
(branch, transf_counter) ;
(if !verbose then
cctxt#message
"injecting reveal+transfer from %a (counters=%a,%a) to %a"
Signature.Public_key_hash.pp
transfer.src.pkh
Z.pp_print
reveal_counter
Z.pp_print
transf_counter
Signature.Public_key_hash.pp
transfer.dst
else Lwt.return_unit)
>>= fun () ->
NB : regardless of our best efforts to keep track of counters , injection can fail with
" counter in the future " if a block switch happens in between the moment we
get the branch and the moment we inject , and the new block does not include
all the operations we injected .
"counter in the future" if a block switch happens in between the moment we
get the branch and the moment we inject, and the new block does not include
all the operations we injected. *)
inject_contents cctxt chain branch transfer.src.sk list)
else
let transf_counter = Z.succ freshest_counter in
let manager_op =
manager_op_of_transfer
parameters
{transfer with counter = Some transf_counter}
in
let list = Single manager_op in
Signature.Public_key_hash.Table.remove state.counters transfer.src.pkh ;
Signature.Public_key_hash.Table.add
state.counters
transfer.src.pkh
(branch, transf_counter) ;
(if !verbose then
cctxt#message
"injecting transfer from %a (counter=%a) to %a"
Signature.Public_key_hash.pp
transfer.src.pkh
Z.pp_print
transf_counter
Signature.Public_key_hash.pp
transfer.dst
else Lwt.return_unit)
>>= fun () ->
inject_contents cctxt chain branch transfer.src.sk list)
>>= function
| Ok op_hash ->
debug_msg (fun () ->
cctxt#message
"inject_transfer: op injected %a"
Operation_hash.pp
op_hash)
>>= fun () ->
let ops =
Option.value
~default:[]
(Block_hash.Table.find state.injected_operations branch)
in
Block_hash.Table.replace state.injected_operations branch (op_hash :: ops) ;
return_unit
| Error e ->
debug_msg (fun () ->
cctxt#message
"inject_transfer: error, op not injected: %a"
Error_monad.pp_print_trace
e)
>>= fun () -> return_unit
let save_injected_operations (cctxt : Protocol_client_context.full) state =
let json =
Data_encoding.Json.construct
injected_operations_encoding
(Block_hash.Table.fold
(fun k v acc -> (k, v) :: acc)
state.injected_operations
[])
in
let path =
Filename.temp_file "client-stresstest-injected_operations-" ".json"
in
cctxt#message "writing injected operations in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= function
| Error e ->
cctxt#message
"could not write injected operations json file: %a"
Error_monad.pp_print_trace
e
| Ok _ -> Lwt.return_unit
let stat_on_exit (cctxt : Protocol_client_context.full) state =
let ratio_injected_included_op () =
Shell_services.Blocks.hash cctxt () >>=? fun current_head_on_exit ->
let inter_cardinal s1 s2 =
Operation_hash.Set.cardinal
(Operation_hash.Set.inter
(Operation_hash.Set.of_list s1)
(Operation_hash.Set.of_list s2))
in
let get_included_ops older_block =
let rec get_included_ops block acc_included_ops =
if block = older_block then return acc_included_ops
else
Shell_services.Chain.Blocks.Operation_hashes.operation_hashes_in_pass
cctxt
~chain:`Main
~block:(`Hash (block, 0))
3
>>=? fun included_ops ->
Shell_services.Blocks.list
cctxt
~chain:`Main
~heads:[block]
~length:2
()
>>=? function
| [[current; predecessor]] when current = block ->
get_included_ops
predecessor
(List.append acc_included_ops included_ops)
| _ -> cctxt#error "Error while computing stats: invalid block list"
in
get_included_ops current_head_on_exit []
in
let injected_ops =
Block_hash.Table.fold
(fun k l acc ->
if current_head_on_exit <> k then List.append acc l else acc)
state.injected_operations
[]
in
get_included_ops state.current_head_on_start >>=? fun included_ops ->
let included_ops_count = inter_cardinal injected_ops included_ops in
debug_msg (fun () ->
cctxt#message
"injected : %a\nincluded: %a"
(Format.pp_print_list Operation_hash.pp)
injected_ops
(Format.pp_print_list Operation_hash.pp)
included_ops)
>>= fun () ->
let injected_ops_count = List.length injected_ops in
cctxt#message
"%s of the injected operations have been included (%d injected, %d \
included). Note that the operations injected during the last block are \
ignored because they should not be currently included."
(if Int.equal injected_ops_count 0 then "N/A"
else Format.sprintf "%d%%" (included_ops_count * 100 / injected_ops_count))
injected_ops_count
included_ops_count
>>= fun () -> return_unit
in
ratio_injected_included_op ()
let launch (cctxt : Protocol_client_context.full) (parameters : parameters)
state rng_state save_pool_callback =
let injected = ref 0 in
let dt = 1. /. parameters.tps in
let terminated () =
match parameters.total_transfers with
| None -> false
| Some bound -> bound <= !injected
in
let rec loop () =
if terminated () then
save_pool_callback () >>= fun () ->
save_injected_operations cctxt state >>= fun () ->
stat_on_exit cctxt state
else
let start = Mtime_clock.elapsed () in
debug_msg (fun () -> cctxt#message "launch.loop: invoke sample_transfer")
>>= fun () ->
sample_transfer cctxt cctxt#chain cctxt#block parameters state rng_state
>>=? fun transfer ->
debug_msg (fun () -> cctxt#message "launch.loop: invoke inject_transfer")
>>= fun () ->
inject_transfer
cctxt
parameters
state
rng_state
cctxt#chain
cctxt#block
transfer
>>=? fun () ->
incr injected ;
let stop = Mtime_clock.elapsed () in
let elapsed = Mtime.Span.(to_s stop -. to_s start) in
let remaining = dt -. elapsed in
(if remaining <= 0.0 then
cctxt#warning
"warning: tps target could not be reached, consider using a lower \
value for --tps"
else Lwt_unix.sleep remaining)
>>= loop
in
if Option.is_some state.shuffled_pool then
dont_wait
(fun () ->
on_new_head cctxt (fun (block, _) ->
if not (Block_hash.equal block state.last_block) then (
state.last_block <- block ;
state.shuffled_pool <-
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) state.pool))) ;
Lwt_condition.broadcast state.new_block_condition () ;
Lwt.return_unit))
(fun trace ->
ignore
(cctxt#error
"an error while getting the new head has been returned: %a"
Error_monad.pp_print_trace
trace))
(fun exn ->
ignore
(cctxt#error
"an exception while getting the new head has been raised: %s"
(Printexc.to_string exn))) ;
loop ()
let group =
Clic.{name = "stresstest"; title = "Commands for stress-testing the network"}
type pool_source =
| From_string of {json : Ezjsonm.value}
| From_file of {path : string; json : Ezjsonm.value}
let json_of_pool_source = function
| From_string {json} | From_file {json; _} -> json
let json_file_or_text_parameter =
Clic.parameter (fun _ p ->
match String.split ~limit:1 ':' p with
| ["text"; text] -> return (From_string {json = Ezjsonm.from_string text})
| ["file"; path] ->
Lwt_utils_unix.Json.read_file path >|=? fun json ->
From_file {path; json}
| _ -> (
if Sys.file_exists p then
Lwt_utils_unix.Json.read_file p >|=? fun json ->
From_file {path = p; json}
else
try return (From_string {json = Ezjsonm.from_string p})
with Ezjsonm.Parse_error _ ->
failwith "Neither an existing file nor valid JSON: '%s'" p))
let seed_arg =
let open Clic in
arg
~long:"seed"
~placeholder:"int"
~doc:"random seed"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match int_of_string s with
| exception _ ->
cctxt#error
"While parsing --seed: could not convert argument to int"
| i -> return i))
let tps_arg =
let open Clic in
arg
~long:"tps"
~placeholder:"float"
~doc:"transactions per seconds target"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match float_of_string s with
| exception _ ->
cctxt#error
"While parsing --tps: could not convert argument to float"
| f when f < 0.0 ->
cctxt#error "While parsing --tps: negative argument"
| f -> return f))
let fresh_probability_arg =
let open Clic in
arg
~long:"fresh-probability"
~placeholder:"float in [0;1]"
~doc:
(Format.sprintf
"Probability for each transaction's destination to be a fresh \
account. The default value is %g. This new account may then be used \
as source or destination of subsequent transactions, just like the \
accounts that were initially provided to the command. Note that when \
[--single-op-per-pkh-per-block] is set, the new account will not be \
used as source until the head changes."
default_parameters.fresh_probability)
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match float_of_string s with
| exception _ ->
cctxt#error
"While parsing --fresh-probability: could not convert argument \
to float"
| f when f < 0.0 || f > 1.0 ->
cctxt#error "While parsing --fresh-probability: invalid argument"
| f -> return f))
let strategy_arg =
let open Clic in
arg
~long:"strategy"
~placeholder:"fixed:mutez | evaporation:[0;1]"
~doc:"wealth redistribution strategy"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match parse_strategy s with
| Error msg -> cctxt#error "While parsing --strategy: %s" msg
| Ok strategy -> return strategy))
let gas_limit_arg =
let open Clic in
let gas_limit_kind =
parameter (fun _ s ->
try
let v = Z.of_string s in
return (Gas.Arith.integral_exn v)
with _ -> failwith "invalid gas limit (must be a positive number)")
in
arg
~long:"gas-limit"
~short:'G'
~placeholder:"amount"
~doc:
(Format.asprintf
"Set the gas limit of the transaction instead of using the default \
value of %a"
Gas.Arith.pp_integral
default_parameters.gas_limit)
gas_limit_kind
let storage_limit_arg =
let open Clic in
let storage_limit_kind =
parameter (fun _ s ->
try
let v = Z.of_string s in
assert (Compare.Z.(v >= Z.zero)) ;
return v
with _ ->
failwith "invalid storage limit (must be a positive number of bytes)")
in
arg
~long:"storage-limit"
~short:'S'
~placeholder:"amount"
~doc:
(Format.asprintf
"Set the storage limit of the transaction instead of using the \
default value of %a"
Z.pp_print
default_parameters.storage_limit)
storage_limit_kind
let transfers_arg =
let open Clic in
arg
~long:"transfers"
~placeholder:"integer"
~doc:"total number of transfers to perform, unbounded if not specified"
(parameter (fun (cctxt : Protocol_client_context.full) s ->
match int_of_string s with
| exception _ ->
cctxt#error "While parsing --transfers: invalid integer literal"
| i when i <= 0 ->
cctxt#error "While parsing --transfers: negative integer"
| i -> return i))
let single_op_per_pkh_per_block_arg =
Clic.switch
~long:"single-op-per-pkh-per-block"
~doc:
"ensure that the operations are not rejected by limiting the injection \
to 1 operation per public_key_hash per block."
()
let verbose_arg =
Clic.switch
~long:"verbose"
~doc:"Display detailed logs of the injected operations"
()
let debug_arg = Clic.switch ~long:"debug" ~doc:"Display debug logs" ()
let set_option opt f x = Option.fold ~none:x ~some:(f x) opt
let save_pool_callback (cctxt : Protocol_client_context.full) pool_source state
=
let json =
Data_encoding.Json.construct
input_source_list_encoding
(List.map unnormalize_source state.pool)
in
let catch_write_error = function
| Error e ->
cctxt#message
"could not write back json file: %a"
Error_monad.pp_print_trace
e
| Ok () -> Lwt.return_unit
in
match pool_source with
| From_string _ ->
If the initial pool was given directly as json , save pool to
a temp file .
a temp file. *)
let path = Filename.temp_file "client-stresstest-pool-" ".json" in
cctxt#message "writing back address pool in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= catch_write_error
| From_file {path; _} ->
cctxt#message "writing back address pool in file %s" path >>= fun () ->
Lwt_utils_unix.Json.write_file path json >>= catch_write_error
let generate_random_transactions =
let open Clic in
command
~group
~desc:"Generate random transactions"
(args11
seed_arg
tps_arg
fresh_probability_arg
strategy_arg
Client_proto_args.fee_arg
gas_limit_arg
storage_limit_arg
transfers_arg
single_op_per_pkh_per_block_arg
verbose_arg
debug_arg)
(prefixes ["stresstest"; "transfer"; "using"]
@@ param
~name:"sources.json"
~desc:
{|List of accounts from which to perform transfers in JSON format. The input JSON must be an array of objects of the form {"pkh":"<pkh>","pk":"<pk>","sk":"<sk>"} or {"alias":"<alias from wallet>"} or {"pkh":"<pkh from wallet>"} with the pkh, pk and sk encoded in B58 form."|}
json_file_or_text_parameter
@@ stop)
(fun ( seed,
tps,
freshp,
strat,
fee,
gas_limit,
storage_limit,
transfers,
single_op_per_pkh_per_block,
verbose_flag,
debug_flag )
sources_json
(cctxt : Protocol_client_context.full) ->
verbose := verbose_flag ;
debug := debug_flag ;
let parameters =
default_parameters
|> set_option seed (fun parameter seed -> {parameter with seed})
|> set_option tps (fun parameter tps -> {parameter with tps})
|> set_option freshp (fun parameter fresh_probability ->
{parameter with fresh_probability})
|> set_option strat (fun parameter strategy ->
{parameter with strategy})
|> set_option fee (fun parameter fee_mutez ->
{parameter with fee_mutez})
|> set_option gas_limit (fun parameter gas_limit ->
{parameter with gas_limit})
|> set_option storage_limit (fun parameter storage_limit ->
{parameter with storage_limit})
|> set_option transfers (fun parameter transfers ->
{parameter with total_transfers = Some transfers})
|> fun parameter -> {parameter with single_op_per_pkh_per_block}
in
match
Data_encoding.Json.destruct
input_source_list_encoding
(json_of_pool_source sources_json)
with
| exception _ -> cctxt#error "Could not decode list of sources"
| [] -> cctxt#error "It is required to provide sources"
| sources ->
List.filter_map_p (normalize_source cctxt) sources >>= fun sources ->
let counters = Signature.Public_key_hash.Table.create 1023 in
let rng_state = Random.State.make [|parameters.seed|] in
Shell_services.Blocks.hash cctxt () >>=? fun current_head_on_start ->
let state =
{
current_head_on_start;
counters;
pool = sources;
pool_size = List.length sources;
shuffled_pool =
(if parameters.single_op_per_pkh_per_block then
Some
(List.shuffle
~rng_state
(List.map (fun src_org -> src_org.source) sources))
else None);
revealed = Signature.Public_key_hash.Set.empty;
last_block = current_head_on_start;
new_block_condition = Lwt_condition.create ();
injected_operations = Block_hash.Table.create 1023;
}
in
let exit_callback_id =
Lwt_exit.register_clean_up_callback ~loc:__LOC__ (fun _retcode ->
stat_on_exit cctxt state >>= function
| Ok () -> Lwt.return_unit
| Error e ->
cctxt#message "Error: %a" Error_monad.pp_print_trace e)
in
let save_pool () = save_pool_callback cctxt sources_json state in
let exit_callback_id =
Lwt_exit.register_clean_up_callback
~loc:__LOC__
~after:[exit_callback_id]
(fun _retcode -> save_pool ())
in
let save_injected_operations () =
save_injected_operations cctxt state
in
ignore
(Lwt_exit.register_clean_up_callback
~loc:__LOC__
~after:[exit_callback_id]
(fun _retcode -> save_injected_operations ())) ;
launch cctxt parameters state rng_state save_pool)
let commands network () =
match network with
| Some `Mainnet -> []
| Some `Testnet | None -> [generate_random_transactions]
|
03a1c812bb9ccb3d23d760730c7ab95dfcd12944b517fbfeee3f2d4e4f93c307 | robert-strandh/SICL | binary-lcm-defmethods.lisp | (cl:in-package #:sicl-arithmetic)
(defmethod binary-lcm ((x integer) (y integer))
(if (or (zerop x) (zerop y))
0
;; Rather than the obvious (/ (abs (* a b)) (gcd a b)), we divide the
GCD out first , so that we do n't need to compute a large intermediate
;; which we then immediately shrink.
;; TODO?: Could use an exact division algorithm for the truncation.
(let ((x (abs x)) (y (abs y)))
(multiple-value-bind (high low) (if (> x y) (values x y) (values y x))
(* (truncate high (gcd x y)) low)))))
| null | https://raw.githubusercontent.com/robert-strandh/SICL/8822ce17afe352923e0a08c79b010c4ef73d2011/Code/Arithmetic/binary-lcm-defmethods.lisp | lisp | Rather than the obvious (/ (abs (* a b)) (gcd a b)), we divide the
which we then immediately shrink.
TODO?: Could use an exact division algorithm for the truncation. | (cl:in-package #:sicl-arithmetic)
(defmethod binary-lcm ((x integer) (y integer))
(if (or (zerop x) (zerop y))
0
GCD out first , so that we do n't need to compute a large intermediate
(let ((x (abs x)) (y (abs y)))
(multiple-value-bind (high low) (if (> x y) (values x y) (values y x))
(* (truncate high (gcd x y)) low)))))
|
639c233eac5799fa818853e2e55aeb03fa2d2d520b7002a9f6b4a98335bfd26c | jwiegley/notes | Reflection1.hs | module Main where
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
data Foo = Foo [Int] [String]
deriving Show
instance Arbitrary Foo where
arbitrary = do
xs <- listOf chooseAny
len <- choose (1, 100)
ys <- vectorOf len (shuffle "Hello, world")
return $ Foo xs ys
main :: IO ()
main = print =<< generate (arbitrary :: Gen Foo)
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/haskell/Reflection1.hs | haskell | module Main where
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
data Foo = Foo [Int] [String]
deriving Show
instance Arbitrary Foo where
arbitrary = do
xs <- listOf chooseAny
len <- choose (1, 100)
ys <- vectorOf len (shuffle "Hello, world")
return $ Foo xs ys
main :: IO ()
main = print =<< generate (arbitrary :: Gen Foo)
|
|
ea299eff1cb704874606f8052e4657ced42931e020c64bf637c14906860647dc | scrintal/heroicons-reagent | arrow_left_circle.cljs | (ns com.scrintal.heroicons.mini.arrow-left-circle)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:g {:clipPath "url(#clip0_9_2121)"}
[:path {:fillRule "evenodd"
:d "M10 18a8 8 0 100-16 8 8 0 000 16zm3.25-7.25a.75.75 0 000-1.5H8.66l2.1-1.95a.75.75 0 10-1.02-1.1l-3.5 3.25a.75.75 0 000 1.1l3.5 3.25a.75.75 0 001.02-1.1l-2.1-1.95h4.59z"
:clipRule "evenodd"}]]
[:defs
[:clipPath {:id "clip0_9_2121"}
[:path {:d "M0 0h20v20H0z"}]]]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/arrow_left_circle.cljs | clojure | (ns com.scrintal.heroicons.mini.arrow-left-circle)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:g {:clipPath "url(#clip0_9_2121)"}
[:path {:fillRule "evenodd"
:d "M10 18a8 8 0 100-16 8 8 0 000 16zm3.25-7.25a.75.75 0 000-1.5H8.66l2.1-1.95a.75.75 0 10-1.02-1.1l-3.5 3.25a.75.75 0 000 1.1l3.5 3.25a.75.75 0 001.02-1.1l-2.1-1.95h4.59z"
:clipRule "evenodd"}]]
[:defs
[:clipPath {:id "clip0_9_2121"}
[:path {:d "M0 0h20v20H0z"}]]]]) |
|
26c3ab321d552dad5217b5b6724acc5c5d790b3d13be15b8df2a072617a8dc51 | crategus/cl-cffi-gtk | gtk.paper-size.lisp | ;;; ----------------------------------------------------------------------------
;;; gtk.paper-size.lisp
;;;
The documentation of this file is taken from the GTK+ 3 Reference Manual
Version 3.24 and modified to document the Lisp binding to the GTK+ library .
;;; See <>. The API documentation of the Lisp binding is
available from < -cffi-gtk/ > .
;;;
Copyright ( C ) 2009 - 2011
Copyright ( C ) 2011 - 2021
;;;
;;; This program is free software: you can redistribute it and/or modify
;;; it under the terms of the GNU Lesser General Public License for Lisp
as published by the Free Software Foundation , either version 3 of the
;;; License, or (at your option) any later version and with a preamble to
the GNU Lesser General Public License that clarifies the terms for use
;;; with Lisp programs and is referred as the LLGPL.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
;;;
You should have received a copy of the GNU Lesser General Public
License along with this program and the preamble to the Gnu Lesser
;;; General Public License. If not, see </>
;;; and <>.
;;; ----------------------------------------------------------------------------
;;;
;;; GtkPaperSize
;;;
;;; Support for named paper sizes
;;;
;;; Types and Values
;;;
;;; GtkUnit
;;; GtkPaperSize
;;;
;;; GTK_UNIT_PIXEL
;;;
;;; GTK_PAPER_NAME_A3
;;; GTK_PAPER_NAME_A4
;;; GTK_PAPER_NAME_A5
;;; GTK_PAPER_NAME_B5
GTK_PAPER_NAME_LETTER
;;; GTK_PAPER_NAME_EXECUTIVE
GTK_PAPER_NAME_LEGAL
;;;
;;; Functions
;;;
;;; gtk_paper_size_new
;;; gtk_paper_size_new_from_ppd
;;; gtk_paper_size_new_from_ipp
;;; gtk_paper_size_new_custom
;;;
;;; gtk_paper_size_copy
;;; gtk_paper_size_free
;;; gtk_paper_size_is_equal
;;;
;;; gtk_paper_size_get_paper_sizes
;;; gtk_paper_size_get_name
;;; gtk_paper_size_get_display_name
;;; gtk_paper_size_get_ppd_name
;;; gtk_paper_size_get_width
;;; gtk_paper_size_get_height
;;;
;;; gtk_paper_size_is_ipp
;;; gtk_paper_size_is_custom
;;;
;;; gtk_paper_size_set_size
;;; gtk_paper_size_get_default_top_margin
;;; gtk_paper_size_get_default_bottom_margin
;;; gtk_paper_size_get_default_left_margin
;;; gtk_paper_size_get_default_right_margin
;;; gtk_paper_size_get_default
;;;
;;; gtk_paper_size_new_from_key_file
;;; gtk_paper_size_new_from_gvariant
;;; gtk_paper_size_to_key_file
;;; gtk_paper_size_to_gvariant
;;;
;;; Object Hierarchy
;;;
;;; GBoxed
;;; ╰── GtkPaperSize
;;; ----------------------------------------------------------------------------
(in-package :gtk)
;;; ----------------------------------------------------------------------------
enum GtkUnit
;;; ----------------------------------------------------------------------------
(define-g-enum "GtkUnit" gtk-unit
(:export t
:type-initializer "gtk_unit_get_type")
(:none 0)
(:pixel 0) ; alias for :none
(:points 1)
(:inch 2)
(:mm 3))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-unit atdoc:*symbol-name-alias*)
"Enum"
(gethash 'gtk-unit atdoc:*external-symbols*)
"@version{2021-3-17}
@short{Enumeration for dimenstions of paper sizes.}
@begin{pre}
(define-g-enum \"GtkUnit\" gtk-unit
(:export t
:type-initializer \"gtk_unit_get_type\")
(:none 0)
(:points 1)
(:inch 2)
(:mm 3))
@end{pre}
@begin[code]{table}
@entry[:none]{No units.}
@entry[:points]{Dimensions in points.}
@entry[:inch]{Dimensions in inches.}
@entry[:mm]{Dimensions in millimeters.}
@end{table}
@see-class{gtk-paper-size}")
;;; ----------------------------------------------------------------------------
;;; GtkPaperSize
;;; ----------------------------------------------------------------------------
(glib-init::at-init () (foreign-funcall "gtk_paper_size_get_type" g-size))
(define-g-boxed-opaque gtk-paper-size "GtkPaperSize"
:alloc (%gtk-paper-size-new (null-pointer)))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-paper-size atdoc:*class-name-alias*)
"Boxed CStruct"
(documentation 'gtk-paper-size 'type)
"@version{2021-3-17}
@begin{short}
The @sym{gtk-paper-size} instance handles paper sizes.
@end{short}
It uses the standard called \"PWG 5101.1-2002 PWG: Standard for Media
Standardized Names\" to name the paper sizes and to get the data for the page
sizes. In addition to standard paper sizes, the @sym{gtk-paper-size} structure
allows to construct custom paper sizes with arbitrary dimensions.
The @sym{gtk-paper-size} structure stores not only the dimensions (width and
height) of a paper size and its name, it also provides default print margins.
@see-class{gtk-page-setup}")
(export 'gtk-paper-size)
;;; ----------------------------------------------------------------------------
;;; GTK_PAPER_NAME_A3
;;;
;;; #define GTK_PAPER_NAME_A3 "iso_a3"
;;;
;;; Name for the A4 paper size.
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
;;; GTK_PAPER_NAME_A4
;;;
# define " iso_a4 "
;;;
;;; Name for the A4 paper size.
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
;;; GTK_PAPER_NAME_A5
;;;
;;; #define GTK_PAPER_NAME_A5 "iso_a5"
;;;
Name for the A5 paper size .
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
;;; GTK_PAPER_NAME_B5
;;;
# define "
;;;
Name for the B5 paper size .
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
GTK_PAPER_NAME_LETTER
;;;
# define GTK_PAPER_NAME_LETTER " na_letter "
;;;
;;; Name for the Letter paper size.
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
;;; GTK_PAPER_NAME_EXECUTIVE
;;;
;;; #define GTK_PAPER_NAME_EXECUTIVE "na_executive"
;;;
;;; Name for the Executive paper size.
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
;;;
# define GTK_PAPER_NAME_LEGAL " na_legal "
;;;
;;; Name for the Legal paper size.
;;; ----------------------------------------------------------------------------
;;; ----------------------------------------------------------------------------
gtk_paper_size_new ( )
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new" %gtk-paper-size-new)
(g-boxed-foreign gtk-paper-size)
(name :string))
(defun gtk-paper-size-new (&optional name)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the paper size name, or @code{nil}}
@return{A new @class{gtk-paper-size} instance.}
@begin{short}
Creates a new @class{gtk-paper-size} instance by parsing a PWG 5101.1-2002
paper name.
@end{short}
If @arg{name} is @code{nil}, the default paper size is returned, see the
function @fun{gtk-paper-size-default}.
@see-class{gtk-paper-size}
@see-function{gtk-paper-size-default}"
(%gtk-paper-size-new (if name name (null-pointer))))
(export 'gtk-paper-size-new)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_new_from_ppd ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new_from_ppd" %gtk-paper-size-new-from-ppd)
(g-boxed-foreign gtk-paper-size)
(name :string)
(displayname :string)
(width :double)
(height :double))
(defun gtk-paper-size-new-from-ppd (name &optional (displayname "")
(width 0.0d0)
(height 0.0d0))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the PPD paper name}
@argument[displayname]{a string with the corresponding human readable name}
@argument[width]{a double float with the paper width, in points}
@argument[height]{a double float with the paper height in points}
@begin{return}
A new @class{gtk-paper-size} instance.
@end{return}
@begin{short}
Creates a new @class{gtk-paper-size} instance by using PPD information.
@end{short}
If @arg{name} is not a recognized PPD paper name, @arg{displayname},
@arg{width} and @arg{height} are used to construct a custom
@class{gtk-paper-size} instance.
@see-class{gtk-paper-size}"
(%gtk-paper-size-new-from-ppd name
displayname
(coerce width 'double-float)
(coerce height 'double-float)))
(export 'gtk-paper-size-new-from-ppd)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_new_from_ipp ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new_from_ipp" %gtk-paper-size-new-from-ipp)
(g-boxed-foreign gtk-paper-size)
(name :string)
(width :double)
(height :double))
(defun gtk-paper-size-new-from-ipp (name &optional (width 0.0d0) (height 0.0d0))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the IPP paper name}
@argument[width]{a double float with the paper width, in points}
@argument[height]{a double float with the paper height in points}
@begin{return}
A new @class{gtk-paper-size} instance.
@end{return}
@begin{short}
Creates a new @class{gtk-paper-size} instance by using PPD information.
@end{short}
If @arg{name} is not a recognized IPP paper name, @arg{width} and @arg{height}
are used to construct a custom @class{gtk-paper-size} instance.
@see-class{gtk-paper-size}"
(%gtk-paper-size-new-from-ipp name
(coerce width 'double-float)
(coerce height 'double-float)))
(export 'gtk-paper-size-new-from-ipp)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_new_custom ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new_custom" %gtk-paper-size-new-custom)
(g-boxed-foreign gtk-paper-size)
(name :string)
(displayname :string)
(width :double)
(height :double)
(unit gtk-unit))
(defun gtk-paper-size-new-custom (name displayname width height unit)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the paper name}
@argument[displayname]{a string with the human readable name}
@argument[width]{a double float with the paper width, in units of @arg{unit}}
@argument[height]{a double float with the paper height, in units of
@arg{unit}}
@argument[unit]{a @symbol{gtk-unit} value for @arg{width} and @arg{height},
not @code{:none}}
@return{A new @class{gtk-paper-size} instance.}
@begin{short}
Creates a new @class{gtk-paper-size} instance with the given parameters.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}"
(%gtk-paper-size-new-custom name
displayname
(coerce width 'double-float)
(coerce height 'double-float)
unit))
(export 'gtk-paper-size-new-custom)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_copy ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_copy" gtk-paper-size-copy)
(g-boxed-foreign gtk-paper-size)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A copy of @arg{size}.}
@begin{short}
Copies an existing @class{gtk-paper-size} instance.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-copy)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_free ()
;;; ----------------------------------------------------------------------------
;; not exported
(defcfun ("gtk_paper_size_free" %gtk-paper-size-free) :void
#+cl-cffi-gtk-documentation
"@version{2013-11-13}
@argument[size]{a @class{gtk-paper-size} structure}
@begin{short}
Free the given @class{gtk-paper-size} structure.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_is_equal ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_is_equal" gtk-paper-size-is-equal) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size1]{a @class{gtk-paper-size} instance}
@argument[size2]{another @class{gtk-paper-size} instance}
@begin{return}
@em{True}, if @arg{size1} and @arg{size2} represent the same paper size.
@end{return}
@begin{short}
Compares two @class{gtk-paper-size} instances.
@end{short}
@see-class{gtk-paper-size}"
(size1 (g-boxed-foreign gtk-paper-size))
(size2 (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-equal)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_paper_sizes () -> gtk-paper-size-paper-sizes
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_paper_sizes" gtk-paper-size-paper-sizes)
(g-list (g-boxed-foreign gtk-paper-size))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[custom]{a boolean whether to include custom paper sizes as defined
in the page setup dialog}
@begin{return}
A list of @class{gtk-paper-size} instances.
@end{return}
@begin{short}
Creates a list of known paper sizes.
@end{short}
@see-class{gtk-paper-size}"
(custom :boolean))
(export 'gtk-paper-size-paper-sizes)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_name () -> gtk-paper-size-name
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_name" gtk-paper-size-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the name of the paper size.}
@begin{short}
Gets the name of the paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-name)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_display_name () -> gtk-paper-size-display-name
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_display_name" gtk-paper-size-display-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the human readable name of the paper size.}
@begin{short}
Gets the human readable name of the paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-display-name)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_ppd_name () -> gtk-paper-size-ppd-name
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_ppd_name" gtk-paper-size-ppd-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the PPD name of the paper size.}
@begin{short}
Gets the PPD name of the paper size, which may be @code{nil}.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-ppd-name)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_width () -> gtk-paper-size-width
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_width" gtk-paper-size-width) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the paper width.}
@begin{short}
Gets the paper width of the paper size, in units of @arg{unit}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-height}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-width)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_height () -> gtk-paper-size-height
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_height" gtk-paper-size-height) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the paper height.}
@begin{short}
Gets the paper height of the paper size, in units of @arg{unit}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-width}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-height)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_is_ipp ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_is_ipp" gtk-paper-size-is-ipp) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A boolean whether the paper size is an IPP paper size.}
@begin{short}
Returns @em{true} if the paper size is an IPP standard paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-ipp)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_is_custom ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_is_custom" gtk-paper-size-is-custom) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A boolean whether @arg{size} is a custom paper size.}
@short{Returns @em{true} if @arg{size} is not a standard paper size.}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-custom)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_set_size ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_set_size" %gtk-paper-size-set-size) :void
(size (g-boxed-foreign gtk-paper-size))
(width :double)
(height :double)
(unit gtk-unit))
(defun gtk-paper-size-set-size (size width height unit)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a custom @class{gtk-paper-size} instance}
@argument[width]{a double float with the new width in units of @arg{unit}}
@argument[height]{a double float with the new height in units of @arg{unit}}
@argument[unit]{the @symbol{gtk-unit} value for @arg{width} and @arg{height}}
@begin{short}
Changes the dimensions of a paper size to @arg{width} x @arg{height}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}"
(%gtk-paper-size-set-size size
(coerce width 'double-float)
(coerce height 'double-float)
unit))
(export 'gtk-paper-size-set-size)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_default_top_margin ()
;;; -> gtk-paper-size-default-top-margin
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_default_top_margin"
gtk-paper-size-default-top-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default top margin.}
@begin{short}
Gets the default top margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-bottom-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-top-margin)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_default_bottom_margin ()
;;; -> gtk-paper-size-default-bottom-margin
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_default_bottom_margin"
gtk-paper-size-default-bottom-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default bottom margin.}
@begin{short}
Gets the default bottom margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-top-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-bottom-margin)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_default_left_margin ()
;;; -> gtk-paper-size-default-left-margin
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_default_left_margin"
gtk-paper-size-default-left-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default left margin.}
@begin{short}
Gets the default left margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-right-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-left-margin)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_default_right_margin ()
;;; -> gtk-paper-size-default-right-margin
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_default_right_margin"
gtk-paper-size-default-right-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default right margin.}
@begin{short}
Gets the default right margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-left-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-right-margin)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_get_default () -> gtk-paper-size-default
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_get_default" gtk-paper-size-default)
(:string :free-from-foreign nil)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@begin{return}
A string with the name of the default paper size.
@end{return}
@begin{short}
Returns the name of the default paper size, which depends on the current
locale.
@end{short}
@begin[Example]{dictionary}
@begin{pre}
(gtk-paper-size-default)
=> \"iso_a4\"
@end{pre}
@end{dictionary}
@see-class{gtk-paper-size}")
(export 'gtk-paper-size-default)
;;; ----------------------------------------------------------------------------
gtk_paper_size_new_from_key_file ( )
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new_from_key_file" %gtk-paper-size-new-from-key-file)
(g-boxed-foreign gtk-paper-size)
(keyfile (:pointer (:struct g-key-file)))
(groupname :string)
(err :pointer))
(defun gtk-paper-size-new-from-key-file (keyfile groupname)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[keyfile]{the @type{g-key-file} instance to retrieve the paper size
from}
@argument[groupname]{a string with the name of the group in the key file to
read, or @code{nil} to read the first group}
@begin{return}
A new @class{gtk-paper-size} instance with the restored paper size, or
@code{nil} if an error occurred.
@end{return}
@begin{short}
Reads a paper size from the group @arg{groupname} in the key file
@arg{keyfile}.
@end{short}
@see-class{gtk-paper-size}
@see-type{g-key-file}"
(with-g-error (err)
(%gtk-paper-size-new-from-key-file keyfile groupname err)))
(export 'gtk-paper-size-new-from-key-file)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_new_from_gvariant ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_new_from_gvariant" gtk-paper-size-new-from-gvariant)
(g-boxed-foreign gtk-paper-size)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[value]{a @code{a{sv@}} @type{g-variant} instance}
@return{A @class{gtk-paper-size} instance.}
@begin{short}
Deserialize a paper size from a @code{a{sv@}} variant in the format
produced by the function @fun{gtk-paper-size-to-gvariant}.
@end{short}
Since 3.22
@see-class{gtk-paper-size}
@see-type{g-variant}
@see-function{gtk-paper-size-to-gvariant}"
(value (:pointer (:struct g-variant))))
(export 'gtk-paper-size-new-from-gvariant)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_to_key_file ()
;;; ----------------------------------------------------------------------------
(defcfun ("gtk_paper_size_to_key_file" gtk-paper-size-to-key-file) :void
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[keyfile]{the @type{g-key-file} instance to save the paper size to}
@argument[groupname]{a string with the group name to add the settings to in
@arg{keyfile}}
@begin{short}
This function adds the paper size from @arg{size} to @arg{keyfile}.
@end{short}
@see-class{gtk-paper-size}
@see-type{g-key-file}"
(size (g-boxed-foreign gtk-paper-size))
(keyfile (:pointer (:struct g-key-file)))
(groupname :string))
(export 'gtk-paper-size-to-key-file)
;;; ----------------------------------------------------------------------------
;;; gtk_paper_size_to_gvariant ()
;;; ----------------------------------------------------------------------------
#+gtk-3-22
(defcfun ("gtk_paper_size_to_gvariant" gtk-paper-size-to-gvariant)
(:pointer (:struct g-variant))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A new @type{g-variant} instance.}
@begin{short}
Serialize a paper size to a @code{a{sv@}} variant instance.
@end{short}
Since 3.22
@begin[Example]{dictionary}
@begin{pre}
(gtk-paper-size-to-gvariant (gtk-paper-size-new))
=> #.(SB-SYS:INT-SAP #X00F02070)
(g-variant-print * nil)
=> \"{'PPDName': <'A4'>, 'DisplayName': <'A4'>, 'Width': <210.0>, 'Height': <297.0>@}\"
@end{pre}
@end{dictionary}
@see-class{gtk-paper-size}
@see-type{g-variant}"
(size (g-boxed-foreign gtk-paper-size)))
#+gtk-3-22
(export 'gtk-paper-size-to-gvariant)
;;; --- End of file gtk.paper-size.lisp ----------------------------------------
| null | https://raw.githubusercontent.com/crategus/cl-cffi-gtk/b613a266a5f8e7f477b66a33d4df84fbed3dc7bc/gtk/gtk.paper-size.lisp | lisp | ----------------------------------------------------------------------------
gtk.paper-size.lisp
See <>. The API documentation of the Lisp binding is
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License for Lisp
License, or (at your option) any later version and with a preamble to
with Lisp programs and is referred as the LLGPL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
General Public License. If not, see </>
and <>.
----------------------------------------------------------------------------
GtkPaperSize
Support for named paper sizes
Types and Values
GtkUnit
GtkPaperSize
GTK_UNIT_PIXEL
GTK_PAPER_NAME_A3
GTK_PAPER_NAME_A4
GTK_PAPER_NAME_A5
GTK_PAPER_NAME_B5
GTK_PAPER_NAME_EXECUTIVE
Functions
gtk_paper_size_new
gtk_paper_size_new_from_ppd
gtk_paper_size_new_from_ipp
gtk_paper_size_new_custom
gtk_paper_size_copy
gtk_paper_size_free
gtk_paper_size_is_equal
gtk_paper_size_get_paper_sizes
gtk_paper_size_get_name
gtk_paper_size_get_display_name
gtk_paper_size_get_ppd_name
gtk_paper_size_get_width
gtk_paper_size_get_height
gtk_paper_size_is_ipp
gtk_paper_size_is_custom
gtk_paper_size_set_size
gtk_paper_size_get_default_top_margin
gtk_paper_size_get_default_bottom_margin
gtk_paper_size_get_default_left_margin
gtk_paper_size_get_default_right_margin
gtk_paper_size_get_default
gtk_paper_size_new_from_key_file
gtk_paper_size_new_from_gvariant
gtk_paper_size_to_key_file
gtk_paper_size_to_gvariant
Object Hierarchy
GBoxed
╰── GtkPaperSize
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
alias for :none
----------------------------------------------------------------------------
GtkPaperSize
----------------------------------------------------------------------------
----------------------------------------------------------------------------
GTK_PAPER_NAME_A3
#define GTK_PAPER_NAME_A3 "iso_a3"
Name for the A4 paper size.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
GTK_PAPER_NAME_A4
Name for the A4 paper size.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
GTK_PAPER_NAME_A5
#define GTK_PAPER_NAME_A5 "iso_a5"
----------------------------------------------------------------------------
----------------------------------------------------------------------------
GTK_PAPER_NAME_B5
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Name for the Letter paper size.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
GTK_PAPER_NAME_EXECUTIVE
#define GTK_PAPER_NAME_EXECUTIVE "na_executive"
Name for the Executive paper size.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Name for the Legal paper size.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_new_from_ppd ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_new_from_ipp ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_new_custom ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_copy ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_free ()
----------------------------------------------------------------------------
not exported
----------------------------------------------------------------------------
gtk_paper_size_is_equal ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_paper_sizes () -> gtk-paper-size-paper-sizes
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_name () -> gtk-paper-size-name
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_display_name () -> gtk-paper-size-display-name
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_ppd_name () -> gtk-paper-size-ppd-name
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_width () -> gtk-paper-size-width
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_height () -> gtk-paper-size-height
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_is_ipp ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_is_custom ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_set_size ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_default_top_margin ()
-> gtk-paper-size-default-top-margin
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_default_bottom_margin ()
-> gtk-paper-size-default-bottom-margin
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_default_left_margin ()
-> gtk-paper-size-default-left-margin
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_default_right_margin ()
-> gtk-paper-size-default-right-margin
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_get_default () -> gtk-paper-size-default
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_new_from_gvariant ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_to_key_file ()
----------------------------------------------------------------------------
----------------------------------------------------------------------------
gtk_paper_size_to_gvariant ()
----------------------------------------------------------------------------
--- End of file gtk.paper-size.lisp ---------------------------------------- | The documentation of this file is taken from the GTK+ 3 Reference Manual
Version 3.24 and modified to document the Lisp binding to the GTK+ library .
available from < -cffi-gtk/ > .
Copyright ( C ) 2009 - 2011
Copyright ( C ) 2011 - 2021
as published by the Free Software Foundation , either version 3 of the
the GNU Lesser General Public License that clarifies the terms for use
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this program and the preamble to the Gnu Lesser
GTK_PAPER_NAME_LETTER
GTK_PAPER_NAME_LEGAL
(in-package :gtk)
enum GtkUnit
(define-g-enum "GtkUnit" gtk-unit
(:export t
:type-initializer "gtk_unit_get_type")
(:none 0)
(:points 1)
(:inch 2)
(:mm 3))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-unit atdoc:*symbol-name-alias*)
"Enum"
(gethash 'gtk-unit atdoc:*external-symbols*)
"@version{2021-3-17}
@short{Enumeration for dimenstions of paper sizes.}
@begin{pre}
(define-g-enum \"GtkUnit\" gtk-unit
(:export t
:type-initializer \"gtk_unit_get_type\")
(:none 0)
(:points 1)
(:inch 2)
(:mm 3))
@end{pre}
@begin[code]{table}
@entry[:none]{No units.}
@entry[:points]{Dimensions in points.}
@entry[:inch]{Dimensions in inches.}
@entry[:mm]{Dimensions in millimeters.}
@end{table}
@see-class{gtk-paper-size}")
(glib-init::at-init () (foreign-funcall "gtk_paper_size_get_type" g-size))
(define-g-boxed-opaque gtk-paper-size "GtkPaperSize"
:alloc (%gtk-paper-size-new (null-pointer)))
#+cl-cffi-gtk-documentation
(setf (gethash 'gtk-paper-size atdoc:*class-name-alias*)
"Boxed CStruct"
(documentation 'gtk-paper-size 'type)
"@version{2021-3-17}
@begin{short}
The @sym{gtk-paper-size} instance handles paper sizes.
@end{short}
It uses the standard called \"PWG 5101.1-2002 PWG: Standard for Media
Standardized Names\" to name the paper sizes and to get the data for the page
sizes. In addition to standard paper sizes, the @sym{gtk-paper-size} structure
allows to construct custom paper sizes with arbitrary dimensions.
The @sym{gtk-paper-size} structure stores not only the dimensions (width and
height) of a paper size and its name, it also provides default print margins.
@see-class{gtk-page-setup}")
(export 'gtk-paper-size)
# define " iso_a4 "
Name for the A5 paper size .
# define "
Name for the B5 paper size .
GTK_PAPER_NAME_LETTER
# define GTK_PAPER_NAME_LETTER " na_letter "
# define GTK_PAPER_NAME_LEGAL " na_legal "
gtk_paper_size_new ( )
(defcfun ("gtk_paper_size_new" %gtk-paper-size-new)
(g-boxed-foreign gtk-paper-size)
(name :string))
(defun gtk-paper-size-new (&optional name)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the paper size name, or @code{nil}}
@return{A new @class{gtk-paper-size} instance.}
@begin{short}
Creates a new @class{gtk-paper-size} instance by parsing a PWG 5101.1-2002
paper name.
@end{short}
If @arg{name} is @code{nil}, the default paper size is returned, see the
function @fun{gtk-paper-size-default}.
@see-class{gtk-paper-size}
@see-function{gtk-paper-size-default}"
(%gtk-paper-size-new (if name name (null-pointer))))
(export 'gtk-paper-size-new)
(defcfun ("gtk_paper_size_new_from_ppd" %gtk-paper-size-new-from-ppd)
(g-boxed-foreign gtk-paper-size)
(name :string)
(displayname :string)
(width :double)
(height :double))
(defun gtk-paper-size-new-from-ppd (name &optional (displayname "")
(width 0.0d0)
(height 0.0d0))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the PPD paper name}
@argument[displayname]{a string with the corresponding human readable name}
@argument[width]{a double float with the paper width, in points}
@argument[height]{a double float with the paper height in points}
@begin{return}
A new @class{gtk-paper-size} instance.
@end{return}
@begin{short}
Creates a new @class{gtk-paper-size} instance by using PPD information.
@end{short}
If @arg{name} is not a recognized PPD paper name, @arg{displayname},
@arg{width} and @arg{height} are used to construct a custom
@class{gtk-paper-size} instance.
@see-class{gtk-paper-size}"
(%gtk-paper-size-new-from-ppd name
displayname
(coerce width 'double-float)
(coerce height 'double-float)))
(export 'gtk-paper-size-new-from-ppd)
(defcfun ("gtk_paper_size_new_from_ipp" %gtk-paper-size-new-from-ipp)
(g-boxed-foreign gtk-paper-size)
(name :string)
(width :double)
(height :double))
(defun gtk-paper-size-new-from-ipp (name &optional (width 0.0d0) (height 0.0d0))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the IPP paper name}
@argument[width]{a double float with the paper width, in points}
@argument[height]{a double float with the paper height in points}
@begin{return}
A new @class{gtk-paper-size} instance.
@end{return}
@begin{short}
Creates a new @class{gtk-paper-size} instance by using PPD information.
@end{short}
If @arg{name} is not a recognized IPP paper name, @arg{width} and @arg{height}
are used to construct a custom @class{gtk-paper-size} instance.
@see-class{gtk-paper-size}"
(%gtk-paper-size-new-from-ipp name
(coerce width 'double-float)
(coerce height 'double-float)))
(export 'gtk-paper-size-new-from-ipp)
(defcfun ("gtk_paper_size_new_custom" %gtk-paper-size-new-custom)
(g-boxed-foreign gtk-paper-size)
(name :string)
(displayname :string)
(width :double)
(height :double)
(unit gtk-unit))
(defun gtk-paper-size-new-custom (name displayname width height unit)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[name]{a string with the paper name}
@argument[displayname]{a string with the human readable name}
@argument[width]{a double float with the paper width, in units of @arg{unit}}
@argument[height]{a double float with the paper height, in units of
@arg{unit}}
@argument[unit]{a @symbol{gtk-unit} value for @arg{width} and @arg{height},
not @code{:none}}
@return{A new @class{gtk-paper-size} instance.}
@begin{short}
Creates a new @class{gtk-paper-size} instance with the given parameters.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}"
(%gtk-paper-size-new-custom name
displayname
(coerce width 'double-float)
(coerce height 'double-float)
unit))
(export 'gtk-paper-size-new-custom)
(defcfun ("gtk_paper_size_copy" gtk-paper-size-copy)
(g-boxed-foreign gtk-paper-size)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A copy of @arg{size}.}
@begin{short}
Copies an existing @class{gtk-paper-size} instance.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-copy)
(defcfun ("gtk_paper_size_free" %gtk-paper-size-free) :void
#+cl-cffi-gtk-documentation
"@version{2013-11-13}
@argument[size]{a @class{gtk-paper-size} structure}
@begin{short}
Free the given @class{gtk-paper-size} structure.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(defcfun ("gtk_paper_size_is_equal" gtk-paper-size-is-equal) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size1]{a @class{gtk-paper-size} instance}
@argument[size2]{another @class{gtk-paper-size} instance}
@begin{return}
@em{True}, if @arg{size1} and @arg{size2} represent the same paper size.
@end{return}
@begin{short}
Compares two @class{gtk-paper-size} instances.
@end{short}
@see-class{gtk-paper-size}"
(size1 (g-boxed-foreign gtk-paper-size))
(size2 (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-equal)
(defcfun ("gtk_paper_size_get_paper_sizes" gtk-paper-size-paper-sizes)
(g-list (g-boxed-foreign gtk-paper-size))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[custom]{a boolean whether to include custom paper sizes as defined
in the page setup dialog}
@begin{return}
A list of @class{gtk-paper-size} instances.
@end{return}
@begin{short}
Creates a list of known paper sizes.
@end{short}
@see-class{gtk-paper-size}"
(custom :boolean))
(export 'gtk-paper-size-paper-sizes)
(defcfun ("gtk_paper_size_get_name" gtk-paper-size-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the name of the paper size.}
@begin{short}
Gets the name of the paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-name)
(defcfun ("gtk_paper_size_get_display_name" gtk-paper-size-display-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the human readable name of the paper size.}
@begin{short}
Gets the human readable name of the paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-display-name)
(defcfun ("gtk_paper_size_get_ppd_name" gtk-paper-size-ppd-name) :string
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A string with the PPD name of the paper size.}
@begin{short}
Gets the PPD name of the paper size, which may be @code{nil}.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-ppd-name)
(defcfun ("gtk_paper_size_get_width" gtk-paper-size-width) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the paper width.}
@begin{short}
Gets the paper width of the paper size, in units of @arg{unit}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-height}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-width)
(defcfun ("gtk_paper_size_get_height" gtk-paper-size-height) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the paper height.}
@begin{short}
Gets the paper height of the paper size, in units of @arg{unit}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-width}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-height)
(defcfun ("gtk_paper_size_is_ipp" gtk-paper-size-is-ipp) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A boolean whether the paper size is an IPP paper size.}
@begin{short}
Returns @em{true} if the paper size is an IPP standard paper size.
@end{short}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-ipp)
(defcfun ("gtk_paper_size_is_custom" gtk-paper-size-is-custom) :boolean
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A boolean whether @arg{size} is a custom paper size.}
@short{Returns @em{true} if @arg{size} is not a standard paper size.}
@see-class{gtk-paper-size}"
(size (g-boxed-foreign gtk-paper-size)))
(export 'gtk-paper-size-is-custom)
(defcfun ("gtk_paper_size_set_size" %gtk-paper-size-set-size) :void
(size (g-boxed-foreign gtk-paper-size))
(width :double)
(height :double)
(unit gtk-unit))
(defun gtk-paper-size-set-size (size width height unit)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a custom @class{gtk-paper-size} instance}
@argument[width]{a double float with the new width in units of @arg{unit}}
@argument[height]{a double float with the new height in units of @arg{unit}}
@argument[unit]{the @symbol{gtk-unit} value for @arg{width} and @arg{height}}
@begin{short}
Changes the dimensions of a paper size to @arg{width} x @arg{height}.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}"
(%gtk-paper-size-set-size size
(coerce width 'double-float)
(coerce height 'double-float)
unit))
(export 'gtk-paper-size-set-size)
(defcfun ("gtk_paper_size_get_default_top_margin"
gtk-paper-size-default-top-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default top margin.}
@begin{short}
Gets the default top margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-bottom-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-top-margin)
(defcfun ("gtk_paper_size_get_default_bottom_margin"
gtk-paper-size-default-bottom-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default bottom margin.}
@begin{short}
Gets the default bottom margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-top-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-bottom-margin)
(defcfun ("gtk_paper_size_get_default_left_margin"
gtk-paper-size-default-left-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default left margin.}
@begin{short}
Gets the default left margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-right-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-left-margin)
(defcfun ("gtk_paper_size_get_default_right_margin"
gtk-paper-size-default-right-margin) :double
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[unit]{a @symbol{gtk-unit} value for the return value,
not @code{:none}}
@return{A double float with the default right margin.}
@begin{short}
Gets the default right margin for the paper size.
@end{short}
@see-class{gtk-paper-size}
@see-symbol{gtk-unit}
@see-function{gtk-paper-size-default-left-margin}"
(size (g-boxed-foreign gtk-paper-size))
(unit gtk-unit))
(export 'gtk-paper-size-default-right-margin)
(defcfun ("gtk_paper_size_get_default" gtk-paper-size-default)
(:string :free-from-foreign nil)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@begin{return}
A string with the name of the default paper size.
@end{return}
@begin{short}
Returns the name of the default paper size, which depends on the current
locale.
@end{short}
@begin[Example]{dictionary}
@begin{pre}
(gtk-paper-size-default)
=> \"iso_a4\"
@end{pre}
@end{dictionary}
@see-class{gtk-paper-size}")
(export 'gtk-paper-size-default)
gtk_paper_size_new_from_key_file ( )
(defcfun ("gtk_paper_size_new_from_key_file" %gtk-paper-size-new-from-key-file)
(g-boxed-foreign gtk-paper-size)
(keyfile (:pointer (:struct g-key-file)))
(groupname :string)
(err :pointer))
(defun gtk-paper-size-new-from-key-file (keyfile groupname)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[keyfile]{the @type{g-key-file} instance to retrieve the paper size
from}
@argument[groupname]{a string with the name of the group in the key file to
read, or @code{nil} to read the first group}
@begin{return}
A new @class{gtk-paper-size} instance with the restored paper size, or
@code{nil} if an error occurred.
@end{return}
@begin{short}
Reads a paper size from the group @arg{groupname} in the key file
@arg{keyfile}.
@end{short}
@see-class{gtk-paper-size}
@see-type{g-key-file}"
(with-g-error (err)
(%gtk-paper-size-new-from-key-file keyfile groupname err)))
(export 'gtk-paper-size-new-from-key-file)
(defcfun ("gtk_paper_size_new_from_gvariant" gtk-paper-size-new-from-gvariant)
(g-boxed-foreign gtk-paper-size)
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[value]{a @code{a{sv@}} @type{g-variant} instance}
@return{A @class{gtk-paper-size} instance.}
@begin{short}
Deserialize a paper size from a @code{a{sv@}} variant in the format
produced by the function @fun{gtk-paper-size-to-gvariant}.
@end{short}
Since 3.22
@see-class{gtk-paper-size}
@see-type{g-variant}
@see-function{gtk-paper-size-to-gvariant}"
(value (:pointer (:struct g-variant))))
(export 'gtk-paper-size-new-from-gvariant)
(defcfun ("gtk_paper_size_to_key_file" gtk-paper-size-to-key-file) :void
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@argument[keyfile]{the @type{g-key-file} instance to save the paper size to}
@argument[groupname]{a string with the group name to add the settings to in
@arg{keyfile}}
@begin{short}
This function adds the paper size from @arg{size} to @arg{keyfile}.
@end{short}
@see-class{gtk-paper-size}
@see-type{g-key-file}"
(size (g-boxed-foreign gtk-paper-size))
(keyfile (:pointer (:struct g-key-file)))
(groupname :string))
(export 'gtk-paper-size-to-key-file)
#+gtk-3-22
(defcfun ("gtk_paper_size_to_gvariant" gtk-paper-size-to-gvariant)
(:pointer (:struct g-variant))
#+cl-cffi-gtk-documentation
"@version{2021-3-17}
@argument[size]{a @class{gtk-paper-size} instance}
@return{A new @type{g-variant} instance.}
@begin{short}
Serialize a paper size to a @code{a{sv@}} variant instance.
@end{short}
Since 3.22
@begin[Example]{dictionary}
@begin{pre}
(gtk-paper-size-to-gvariant (gtk-paper-size-new))
=> #.(SB-SYS:INT-SAP #X00F02070)
(g-variant-print * nil)
=> \"{'PPDName': <'A4'>, 'DisplayName': <'A4'>, 'Width': <210.0>, 'Height': <297.0>@}\"
@end{pre}
@end{dictionary}
@see-class{gtk-paper-size}
@see-type{g-variant}"
(size (g-boxed-foreign gtk-paper-size)))
#+gtk-3-22
(export 'gtk-paper-size-to-gvariant)
|
ee229ff94cb768224878d36392e15e8acc4073bb0667ec9ed373b9aacaa2d117 | gregr/racket-misc | dkanren-interp.rkt | #lang racket/base
(provide
evalo
)
(require
"dkanren.rkt"
)
(define (letrec-eval-term program)
`(let ((closure-tag ',(gensym "#%closure"))
(prim-tag ',(gensym "#%primitive"))
(empty-env '()))
(let ((initial-env
`((cons . (val . (,prim-tag . cons)))
(car . (val . (,prim-tag . car)))
(cdr . (val . (,prim-tag . cdr)))
(null? . (val . (,prim-tag . null?)))
(pair? . (val . (,prim-tag . pair?)))
(symbol? . (val . (,prim-tag . symbol?)))
(not . (val . (,prim-tag . not)))
(equal? . (val . (,prim-tag . equal?)))
(list . (val . (,closure-tag (lambda x x) ,empty-env)))
. ,empty-env))
(closure-tag? (lambda (v) (equal? v closure-tag)))
(prim-tag? (lambda (v) (equal? v prim-tag))))
(letrec
((applicable-tag? (lambda (v) (or (closure-tag? v) (prim-tag? v))))
(quotable? (lambda (v)
(match/lazy v
((? symbol?) (not (applicable-tag? v)))
(`(,a . ,d) (and (quotable? a) (quotable? d)))
(_ #t))))
(not-in-params? (lambda (ps sym)
(match/lazy ps
('() #t)
(`(,a . ,d)
(and (not (equal? a sym))
(not-in-params? d sym))))))
(param-list? (lambda (x)
(match/lazy x
('() #t)
(`(,(? symbol? a) . ,d)
(and (param-list? d) (not-in-params? d a)))
(_ #f))))
(params? (lambda (x)
(match/lazy x
((? param-list?) #t)
(x (symbol? x)))))
(in-env? (lambda (env sym)
(match/lazy env
('() #f)
(`((,a . ,_) . ,d)
(or (equal? a sym) (in-env? d sym))))))
(extend-env*
(lambda (params args env)
(match `(,params . ,args)
(`(() . ()) env)
(`((,x . ,dx*) . (,a . ,da*))
(extend-env* dx* da* `((,x . (val . ,a)) . ,env))))))
(lookup
(lambda (env sym)
(match env
(`((,y . ,b) . ,rest)
(if (equal? sym y)
(match b
(`(val . ,v) v)
(`(rec . ,lam-expr) `(,closure-tag ,lam-expr ,env)))
(lookup rest sym))))))
(term?
(lambda (term env)
(letrec
((term1? (lambda (v) (term? v env)))
(terms? (lambda (ts env)
(match/lazy ts
('() #t)
(`(,t . ,ts)
(and (term? t env) (terms? ts env)))))))
(match/lazy term
(#t #t)
(#f #t)
((number) #t)
((symbol sym) (in-env? env sym))
(`(,(? term1?) . ,rands) (terms? rands env))
(`(quote ,datum) (quotable? datum))
(`(if ,c ,t ,f) (and (term1? c) (term1? t) (term1? f)))
(`(lambda ,params ,body)
(and (params? params)
(let ((res
(match params
((and (not (symbol)) params)
(extend-env* params params env))
(sym `((,sym . (val . ,sym)) . ,env)))))
(term? body res))))
(`(letrec
((,p-name ,(and `(lambda ,params ,body) lam-expr)))
,letrec-body)
(and (params? params)
(let ((res `((,p-name
. (rec . (lambda ,params ,body)))
. ,env)))
(and (term? lam-expr res)
(term? letrec-body res)))))
(_ #f)))))
(eval-prim
(lambda (prim-id args)
(match `(,prim-id . ,args)
(`(cons ,a ,d) `(,a . ,d))
(`(car (,(and (not (? applicable-tag?)) a) . ,d)) a)
(`(cdr (,(and (not (? applicable-tag?)) a) . ,d)) d)
(`(null? ()) #t)
(`(null? ,_) #f)
(`(pair? (,(not (? applicable-tag?)) . ,_)) #t)
(`(pair? ,_) #f)
(`(symbol? ,(symbol)) #t)
(`(symbol? ,_) #f)
(`(number? ,(number)) #t)
(`(number? ,(number)) #f)
(`(not #f) #t)
(`(not #t) #f)
(`(equal? ,v1 ,v1) #t)
(`(equal? ,_ ,_) #f))))
(eval-term-list
(lambda (terms env)
(match terms
('() '())
(`(,term . ,terms)
`(,(eval-term term env) . ,(eval-term-list terms env))))))
(eval-term
(lambda (term env)
(let ((bound? (lambda (sym) (in-env? env sym)))
(term1? (lambda (v) (term? v env))))
(match term
((symbol sym) (lookup env sym))
(#t #t)
(#f #f)
((number num) num)
(`(,(and 'quote (not (? bound?))) ,(? quotable? datum))
datum)
((and `(,op . ,_) operation)
(match operation
(`(,(or (not (symbol)) (? bound?))
. ,rands)
(let ((op (eval-term op env))
(a* (eval-term-list rands env)))
(match op
(`(,(? prim-tag?) . ,prim-id)
(eval-prim prim-id a*))
(`(,(? closure-tag?) (lambda ,x ,body) ,env^)
(let ((res (match x
((and (not (symbol)) params)
(extend-env* params a* env^))
(sym `((,sym . (val . ,a*))
. ,env^)))))
(eval-term body res))))))
(`(if ,condition ,alt-true ,alt-false)
(if (eval-term condition env)
(eval-term alt-true env)
(eval-term alt-false env)))
(`(lambda ,params ,body)
`(,closure-tag (lambda ,params ,body) ,env))
(`(letrec ((,p-name (lambda ,params ,body)))
,letrec-body)
(eval-term
letrec-body
`((,p-name . (rec . (lambda ,params ,body)))
. ,env))))))))))
(let ((program ',program))
(let ((_ (match/lazy (term? program initial-env) (#t #t))))
(eval-term program initial-env)))))))
(define (evalo program result)
(let ((tm (letrec-eval-term program)))
(dk-evalo tm result)))
(module+ test
(require
racket/pretty
rackunit
)
(define-syntax test
(syntax-rules ()
((_ name expr expected)
(let ((actual expr))
(when (not (equal? actual expected))
(display name)
(newline)
(pretty-print actual)
(newline))
(check-equal? actual expected)))))
(define (letrec-append body)
`(letrec ((append
(lambda (xs ys)
(if (null? xs) ys (cons (car xs) (append (cdr xs) ys))))))
,body))
(test "evalo-1"
(run* (q)
(evalo `'(1 2 ,q 4 5) '(1 2 3 4 5)))
'((3)))
(test "evalo-append-0"
(run* (q)
(evalo (letrec-append
'(list (append '() '())
(append '(foo) '(bar))
(append '(1 2) '(3 4))))
q))
'(((() (foo bar) (1 2 3 4)))))
(test "evalo-append-1"
(run* (q)
(evalo (letrec-append `(append '(1 2 3) '(4 5))) q))
'(((1 2 3 4 5))))
(test "evalo-append-2"
(run* (q)
(evalo (letrec-append `(append '(1 2 3) ',q)) '(1 2 3 4 5)))
'(((4 5))))
(test "evalo-append-3"
(run* (q)
(evalo (letrec-append `(append ',q '(4 5))) '(1 2 3 4 5)))
'(((1 2 3))))
(test "evalo-append-4"
(run* (q r)
(evalo (letrec-append `(append ',q ',r)) '(1 2 3 4 5)))
'((() (1 2 3 4 5))
((1) (2 3 4 5))
((1 2) (3 4 5))
((1 2 3) (4 5))
((1 2 3 4) (5))
((1 2 3 4 5) ())))
(test "evalo-append-synthesis-1"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (null? xs)
ys
(cons (car ,q) (append (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((xs)))
(test "evalo-append-synthesis-2"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (null? xs)
ys
(cons (car xs) (,q (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((append)))
(test "evalo-append-synthesis-3"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (,q xs)
ys
(cons (car xs) (append (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((null?)))
;; TODO: run higher order interpreters in the relational interpreter instead.
This wo n't work directly due to dKanren 's first - order restriction .
( define
' ( letrec
;((eval-expr
;(lambda (expr env)
;(match expr
;(`(quote ,datum) datum)
;(`(lambda (,(? symbol? x)) ,body)
;(lambda (a)
;(eval-expr body (lambda (y)
;(if (equal? y x) a (env y))))))
;((? symbol? x) (env x))
;(`(cons ,e1 ,e2) (cons (eval-expr e1 env) (eval-expr e2 env)))
;(`(,rator ,rand) ((eval-expr rator env)
;(eval-expr rand env)))))))
;(list
;(eval-expr '((lambda (y) y) 'g1) 'initial-env)
( eval - expr ' ( ( ( lambda ( z ) z ) ( lambda ( v ) v ) ) ' ) ' initial - env )
;(eval-expr '(((lambda (a) (a a)) (lambda (b) b)) 'g3) 'initial-env)
;(eval-expr '(((lambda (c) (lambda (d) c)) 'g4) 'g5) 'initial-env)
;(eval-expr '(((lambda (f) (lambda (v1) (f (f v1)))) (lambda (e) e)) 'g6) 'initial-env)
;(eval-expr '((lambda (g) ((g g) g)) (lambda (i) (lambda (j) 'g7))) 'initial-env))))
;(test-eval ex-eval-expr '(g1 g2 g3 g4 g6 g7))
( define dneg
' ( letrec
;((eval-expr
;(lambda (expr env)
;(match expr
;(`(,(not (not 'quote)) ,datum) datum)
;(`(lambda (,(? symbol? x)) ,body)
;(lambda (a)
;(eval-expr body (lambda (y)
;(if (equal? y x) a (env y))))))
;((symbol x) (env x))
;(`(cons ,e1 ,e2) (cons (eval-expr e1 env) (eval-expr e2 env)))
;(`(,rator ,rand) ((eval-expr rator env)
;(eval-expr rand env)))))))
;(list
;(eval-expr '((lambda (y) y) 'g1) 'initial-env)
( eval - expr ' ( ( ( lambda ( z ) z ) ( lambda ( v ) v ) ) ' ) ' initial - env )
;(eval-expr '(((lambda (a) (a a)) (lambda (b) b)) 'g3) 'initial-env)
;(eval-expr '(((lambda (c) (lambda (d) c)) 'g4) 'g5) 'initial-env)
;(eval-expr '(((lambda (f) (lambda (v1) (f (f v1)))) (lambda (e) e)) 'g6) 'initial-env)
;(eval-expr '((lambda (g) ((g g) g)) (lambda (i) (lambda (j) 'g7))) 'initial-env))))
;(test-eval ex-eval-expr-dneg '(g1 g2 g3 g4 g6 g7))
)
| null | https://raw.githubusercontent.com/gregr/racket-misc/0a5c9d4875288795e209d06982b82848c989d08b/dkanren-interp.rkt | racket | TODO: run higher order interpreters in the relational interpreter instead.
((eval-expr
(lambda (expr env)
(match expr
(`(quote ,datum) datum)
(`(lambda (,(? symbol? x)) ,body)
(lambda (a)
(eval-expr body (lambda (y)
(if (equal? y x) a (env y))))))
((? symbol? x) (env x))
(`(cons ,e1 ,e2) (cons (eval-expr e1 env) (eval-expr e2 env)))
(`(,rator ,rand) ((eval-expr rator env)
(eval-expr rand env)))))))
(list
(eval-expr '((lambda (y) y) 'g1) 'initial-env)
(eval-expr '(((lambda (a) (a a)) (lambda (b) b)) 'g3) 'initial-env)
(eval-expr '(((lambda (c) (lambda (d) c)) 'g4) 'g5) 'initial-env)
(eval-expr '(((lambda (f) (lambda (v1) (f (f v1)))) (lambda (e) e)) 'g6) 'initial-env)
(eval-expr '((lambda (g) ((g g) g)) (lambda (i) (lambda (j) 'g7))) 'initial-env))))
(test-eval ex-eval-expr '(g1 g2 g3 g4 g6 g7))
((eval-expr
(lambda (expr env)
(match expr
(`(,(not (not 'quote)) ,datum) datum)
(`(lambda (,(? symbol? x)) ,body)
(lambda (a)
(eval-expr body (lambda (y)
(if (equal? y x) a (env y))))))
((symbol x) (env x))
(`(cons ,e1 ,e2) (cons (eval-expr e1 env) (eval-expr e2 env)))
(`(,rator ,rand) ((eval-expr rator env)
(eval-expr rand env)))))))
(list
(eval-expr '((lambda (y) y) 'g1) 'initial-env)
(eval-expr '(((lambda (a) (a a)) (lambda (b) b)) 'g3) 'initial-env)
(eval-expr '(((lambda (c) (lambda (d) c)) 'g4) 'g5) 'initial-env)
(eval-expr '(((lambda (f) (lambda (v1) (f (f v1)))) (lambda (e) e)) 'g6) 'initial-env)
(eval-expr '((lambda (g) ((g g) g)) (lambda (i) (lambda (j) 'g7))) 'initial-env))))
(test-eval ex-eval-expr-dneg '(g1 g2 g3 g4 g6 g7)) | #lang racket/base
(provide
evalo
)
(require
"dkanren.rkt"
)
(define (letrec-eval-term program)
`(let ((closure-tag ',(gensym "#%closure"))
(prim-tag ',(gensym "#%primitive"))
(empty-env '()))
(let ((initial-env
`((cons . (val . (,prim-tag . cons)))
(car . (val . (,prim-tag . car)))
(cdr . (val . (,prim-tag . cdr)))
(null? . (val . (,prim-tag . null?)))
(pair? . (val . (,prim-tag . pair?)))
(symbol? . (val . (,prim-tag . symbol?)))
(not . (val . (,prim-tag . not)))
(equal? . (val . (,prim-tag . equal?)))
(list . (val . (,closure-tag (lambda x x) ,empty-env)))
. ,empty-env))
(closure-tag? (lambda (v) (equal? v closure-tag)))
(prim-tag? (lambda (v) (equal? v prim-tag))))
(letrec
((applicable-tag? (lambda (v) (or (closure-tag? v) (prim-tag? v))))
(quotable? (lambda (v)
(match/lazy v
((? symbol?) (not (applicable-tag? v)))
(`(,a . ,d) (and (quotable? a) (quotable? d)))
(_ #t))))
(not-in-params? (lambda (ps sym)
(match/lazy ps
('() #t)
(`(,a . ,d)
(and (not (equal? a sym))
(not-in-params? d sym))))))
(param-list? (lambda (x)
(match/lazy x
('() #t)
(`(,(? symbol? a) . ,d)
(and (param-list? d) (not-in-params? d a)))
(_ #f))))
(params? (lambda (x)
(match/lazy x
((? param-list?) #t)
(x (symbol? x)))))
(in-env? (lambda (env sym)
(match/lazy env
('() #f)
(`((,a . ,_) . ,d)
(or (equal? a sym) (in-env? d sym))))))
(extend-env*
(lambda (params args env)
(match `(,params . ,args)
(`(() . ()) env)
(`((,x . ,dx*) . (,a . ,da*))
(extend-env* dx* da* `((,x . (val . ,a)) . ,env))))))
(lookup
(lambda (env sym)
(match env
(`((,y . ,b) . ,rest)
(if (equal? sym y)
(match b
(`(val . ,v) v)
(`(rec . ,lam-expr) `(,closure-tag ,lam-expr ,env)))
(lookup rest sym))))))
(term?
(lambda (term env)
(letrec
((term1? (lambda (v) (term? v env)))
(terms? (lambda (ts env)
(match/lazy ts
('() #t)
(`(,t . ,ts)
(and (term? t env) (terms? ts env)))))))
(match/lazy term
(#t #t)
(#f #t)
((number) #t)
((symbol sym) (in-env? env sym))
(`(,(? term1?) . ,rands) (terms? rands env))
(`(quote ,datum) (quotable? datum))
(`(if ,c ,t ,f) (and (term1? c) (term1? t) (term1? f)))
(`(lambda ,params ,body)
(and (params? params)
(let ((res
(match params
((and (not (symbol)) params)
(extend-env* params params env))
(sym `((,sym . (val . ,sym)) . ,env)))))
(term? body res))))
(`(letrec
((,p-name ,(and `(lambda ,params ,body) lam-expr)))
,letrec-body)
(and (params? params)
(let ((res `((,p-name
. (rec . (lambda ,params ,body)))
. ,env)))
(and (term? lam-expr res)
(term? letrec-body res)))))
(_ #f)))))
(eval-prim
(lambda (prim-id args)
(match `(,prim-id . ,args)
(`(cons ,a ,d) `(,a . ,d))
(`(car (,(and (not (? applicable-tag?)) a) . ,d)) a)
(`(cdr (,(and (not (? applicable-tag?)) a) . ,d)) d)
(`(null? ()) #t)
(`(null? ,_) #f)
(`(pair? (,(not (? applicable-tag?)) . ,_)) #t)
(`(pair? ,_) #f)
(`(symbol? ,(symbol)) #t)
(`(symbol? ,_) #f)
(`(number? ,(number)) #t)
(`(number? ,(number)) #f)
(`(not #f) #t)
(`(not #t) #f)
(`(equal? ,v1 ,v1) #t)
(`(equal? ,_ ,_) #f))))
(eval-term-list
(lambda (terms env)
(match terms
('() '())
(`(,term . ,terms)
`(,(eval-term term env) . ,(eval-term-list terms env))))))
(eval-term
(lambda (term env)
(let ((bound? (lambda (sym) (in-env? env sym)))
(term1? (lambda (v) (term? v env))))
(match term
((symbol sym) (lookup env sym))
(#t #t)
(#f #f)
((number num) num)
(`(,(and 'quote (not (? bound?))) ,(? quotable? datum))
datum)
((and `(,op . ,_) operation)
(match operation
(`(,(or (not (symbol)) (? bound?))
. ,rands)
(let ((op (eval-term op env))
(a* (eval-term-list rands env)))
(match op
(`(,(? prim-tag?) . ,prim-id)
(eval-prim prim-id a*))
(`(,(? closure-tag?) (lambda ,x ,body) ,env^)
(let ((res (match x
((and (not (symbol)) params)
(extend-env* params a* env^))
(sym `((,sym . (val . ,a*))
. ,env^)))))
(eval-term body res))))))
(`(if ,condition ,alt-true ,alt-false)
(if (eval-term condition env)
(eval-term alt-true env)
(eval-term alt-false env)))
(`(lambda ,params ,body)
`(,closure-tag (lambda ,params ,body) ,env))
(`(letrec ((,p-name (lambda ,params ,body)))
,letrec-body)
(eval-term
letrec-body
`((,p-name . (rec . (lambda ,params ,body)))
. ,env))))))))))
(let ((program ',program))
(let ((_ (match/lazy (term? program initial-env) (#t #t))))
(eval-term program initial-env)))))))
(define (evalo program result)
(let ((tm (letrec-eval-term program)))
(dk-evalo tm result)))
(module+ test
(require
racket/pretty
rackunit
)
(define-syntax test
(syntax-rules ()
((_ name expr expected)
(let ((actual expr))
(when (not (equal? actual expected))
(display name)
(newline)
(pretty-print actual)
(newline))
(check-equal? actual expected)))))
(define (letrec-append body)
`(letrec ((append
(lambda (xs ys)
(if (null? xs) ys (cons (car xs) (append (cdr xs) ys))))))
,body))
(test "evalo-1"
(run* (q)
(evalo `'(1 2 ,q 4 5) '(1 2 3 4 5)))
'((3)))
(test "evalo-append-0"
(run* (q)
(evalo (letrec-append
'(list (append '() '())
(append '(foo) '(bar))
(append '(1 2) '(3 4))))
q))
'(((() (foo bar) (1 2 3 4)))))
(test "evalo-append-1"
(run* (q)
(evalo (letrec-append `(append '(1 2 3) '(4 5))) q))
'(((1 2 3 4 5))))
(test "evalo-append-2"
(run* (q)
(evalo (letrec-append `(append '(1 2 3) ',q)) '(1 2 3 4 5)))
'(((4 5))))
(test "evalo-append-3"
(run* (q)
(evalo (letrec-append `(append ',q '(4 5))) '(1 2 3 4 5)))
'(((1 2 3))))
(test "evalo-append-4"
(run* (q r)
(evalo (letrec-append `(append ',q ',r)) '(1 2 3 4 5)))
'((() (1 2 3 4 5))
((1) (2 3 4 5))
((1 2) (3 4 5))
((1 2 3) (4 5))
((1 2 3 4) (5))
((1 2 3 4 5) ())))
(test "evalo-append-synthesis-1"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (null? xs)
ys
(cons (car ,q) (append (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((xs)))
(test "evalo-append-synthesis-2"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (null? xs)
ys
(cons (car xs) (,q (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((append)))
(test "evalo-append-synthesis-3"
(run 1 (q)
(evalo `(letrec
((append (lambda (xs ys)
(if (,q xs)
ys
(cons (car xs) (append (cdr xs) ys))))))
(append '(1 2) '(3 4)))
'(1 2 3 4))
)
'((null?)))
This wo n't work directly due to dKanren 's first - order restriction .
( define
' ( letrec
( eval - expr ' ( ( ( lambda ( z ) z ) ( lambda ( v ) v ) ) ' ) ' initial - env )
( define dneg
' ( letrec
( eval - expr ' ( ( ( lambda ( z ) z ) ( lambda ( v ) v ) ) ' ) ' initial - env )
)
|
d7f48f940d14d81dd68389d5d6bcb0b2288fb1f679a878628065bbc51b8156b5 | takikawa/racket-ppa | no-gui.rkt | #lang racket/base
(require typed/untyped-utils)
;; ===================================================================================================
;; General exports
(require "private/utils-and-no-gui.rkt")
(provide (all-from-out "private/utils-and-no-gui.rkt"))
;; ===================================================================================================
Nonrenderers
(require "private/common/nonrenderer.rkt")
(provide
x-ticks
y-ticks
z-ticks
invisible-rect
invisible-rect3d)
;; ===================================================================================================
;; 2D exports
(require (rename-in "private/no-gui/plot2d.rkt"
[plot/dc typed-plot/dc]
[plot-bitmap typed-plot-bitmap]
[plot-pict typed-plot-pict])
(rename-in "private/no-gui/plot2d-untyped.rkt"
[plot/dc untyped-plot/dc]
[plot-bitmap untyped-plot-bitmap]
[plot-pict untyped-plot-pict]))
(define-typed/untyped-identifier plot/dc
typed-plot/dc
untyped-plot/dc)
(define-typed/untyped-identifier plot-bitmap
typed-plot-bitmap
untyped-plot-bitmap)
(define-typed/untyped-identifier plot-pict
typed-plot-pict
untyped-plot-pict)
(provide
plot/dc
plot-bitmap
plot-pict
plot-file)
(require "private/plot2d/point.rkt")
(provide
points
vector-field
error-bars
candlesticks)
(require "private/plot2d/color-field.rkt")
(provide
color-field)
(require "private/plot2d/arrows.rkt")
(provide
arrows)
(require "private/plot2d/line.rkt")
(provide
lines
parametric
polar
hrule
vrule
function
inverse
density)
(require "private/plot2d/interval.rkt")
(provide
lines-interval
parametric-interval
polar-interval
function-interval
inverse-interval
violin)
(require "private/plot2d/box-and-whisker.rkt")
(provide box-and-whisker)
(require "private/plot2d/contour.rkt")
(provide
isoline
contours
contour-intervals)
(require "private/plot2d/rectangle.rkt")
(provide
rectangles
area-histogram
discrete-histogram
stacked-histogram)
(require "private/plot2d/decoration.rkt")
(provide
x-axis
y-axis
axes
polar-axes
x-tick-lines
y-tick-lines
tick-grid
point-label
point-pict
parametric-label
parametric-pict
polar-label
polar-pict
function-label
function-pict
inverse-label
inverse-pict)
;; ===================================================================================================
;; 3D exports
(require (rename-in "private/no-gui/plot3d.rkt"
[plot3d/dc typed-plot3d/dc])
"private/no-gui/plot3d-untyped.rkt")
(define-typed/untyped-identifier plot3d/dc
typed-plot3d/dc
untyped-plot3d/dc)
(provide
plot3d/dc
plot3d-bitmap
plot3d-pict
plot3d-file)
(require "private/plot3d/surface.rkt")
(provide
surface3d)
(require "private/plot3d/contour.rkt")
(provide
isoline3d
contours3d
contour-intervals3d)
(require "private/plot3d/arrows.rkt")
(provide
arrows3d)
(require "private/plot3d/line.rkt")
(provide
lines3d
parametric3d)
(require "private/plot3d/point.rkt")
(provide
points3d
vector-field3d)
(require "private/plot3d/isosurface.rkt")
(provide
isosurface3d
isosurfaces3d
polar3d)
(require "private/plot3d/param-surf.rkt")
(provide
polygons3d
parametric-surface3d)
(require "private/plot3d/rectangle.rkt")
(provide
rectangles3d
discrete-histogram3d
stacked-histogram3d)
(require "private/plot3d/decoration.rkt")
(provide
point-label3d)
;; ===================================================================================================
;; Deprecated functions
(require "private/deprecated/deprecated.rkt")
(provide
mix
line
contour
shade
surface)
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/caff086a1cd48208815cec2a22645a3091c11d4c/share/pkgs/plot-lib/plot/no-gui.rkt | racket | ===================================================================================================
General exports
===================================================================================================
===================================================================================================
2D exports
===================================================================================================
3D exports
===================================================================================================
Deprecated functions | #lang racket/base
(require typed/untyped-utils)
(require "private/utils-and-no-gui.rkt")
(provide (all-from-out "private/utils-and-no-gui.rkt"))
Nonrenderers
(require "private/common/nonrenderer.rkt")
(provide
x-ticks
y-ticks
z-ticks
invisible-rect
invisible-rect3d)
(require (rename-in "private/no-gui/plot2d.rkt"
[plot/dc typed-plot/dc]
[plot-bitmap typed-plot-bitmap]
[plot-pict typed-plot-pict])
(rename-in "private/no-gui/plot2d-untyped.rkt"
[plot/dc untyped-plot/dc]
[plot-bitmap untyped-plot-bitmap]
[plot-pict untyped-plot-pict]))
(define-typed/untyped-identifier plot/dc
typed-plot/dc
untyped-plot/dc)
(define-typed/untyped-identifier plot-bitmap
typed-plot-bitmap
untyped-plot-bitmap)
(define-typed/untyped-identifier plot-pict
typed-plot-pict
untyped-plot-pict)
(provide
plot/dc
plot-bitmap
plot-pict
plot-file)
(require "private/plot2d/point.rkt")
(provide
points
vector-field
error-bars
candlesticks)
(require "private/plot2d/color-field.rkt")
(provide
color-field)
(require "private/plot2d/arrows.rkt")
(provide
arrows)
(require "private/plot2d/line.rkt")
(provide
lines
parametric
polar
hrule
vrule
function
inverse
density)
(require "private/plot2d/interval.rkt")
(provide
lines-interval
parametric-interval
polar-interval
function-interval
inverse-interval
violin)
(require "private/plot2d/box-and-whisker.rkt")
(provide box-and-whisker)
(require "private/plot2d/contour.rkt")
(provide
isoline
contours
contour-intervals)
(require "private/plot2d/rectangle.rkt")
(provide
rectangles
area-histogram
discrete-histogram
stacked-histogram)
(require "private/plot2d/decoration.rkt")
(provide
x-axis
y-axis
axes
polar-axes
x-tick-lines
y-tick-lines
tick-grid
point-label
point-pict
parametric-label
parametric-pict
polar-label
polar-pict
function-label
function-pict
inverse-label
inverse-pict)
(require (rename-in "private/no-gui/plot3d.rkt"
[plot3d/dc typed-plot3d/dc])
"private/no-gui/plot3d-untyped.rkt")
(define-typed/untyped-identifier plot3d/dc
typed-plot3d/dc
untyped-plot3d/dc)
(provide
plot3d/dc
plot3d-bitmap
plot3d-pict
plot3d-file)
(require "private/plot3d/surface.rkt")
(provide
surface3d)
(require "private/plot3d/contour.rkt")
(provide
isoline3d
contours3d
contour-intervals3d)
(require "private/plot3d/arrows.rkt")
(provide
arrows3d)
(require "private/plot3d/line.rkt")
(provide
lines3d
parametric3d)
(require "private/plot3d/point.rkt")
(provide
points3d
vector-field3d)
(require "private/plot3d/isosurface.rkt")
(provide
isosurface3d
isosurfaces3d
polar3d)
(require "private/plot3d/param-surf.rkt")
(provide
polygons3d
parametric-surface3d)
(require "private/plot3d/rectangle.rkt")
(provide
rectangles3d
discrete-histogram3d
stacked-histogram3d)
(require "private/plot3d/decoration.rkt")
(provide
point-label3d)
(require "private/deprecated/deprecated.rkt")
(provide
mix
line
contour
shade
surface)
|
73290faef9f8ba3e0597dec20111d5ed5eb4defb0b633eb4ccaa01e3e7315217 | PLTools/GT | test809cool.ml | open GT
module Location = struct
(* from Printast module *)
let fmt_position with_name f l =
let open Format in
let open Lexing in
let fname = if with_name then l.pos_fname else "" in
if l.pos_lnum = -1
then fprintf f "%s[%d]" fname l.pos_cnum
else fprintf f "%s[%d,%d+%d]" fname l.pos_lnum l.pos_bol
(l.pos_cnum - l.pos_bol)
type t = [%import: Location.t]
let fmt_location f loc =
let open Format in
let p_2nd_name = loc.loc_start.pos_fname <> loc.loc_end.pos_fname in
fprintf f "(%a..%a)" (fmt_position true) loc.loc_start
(fmt_position p_2nd_name) loc.loc_end;
if loc.loc_ghost then fprintf f " ghost";
class virtual ['inh,'self,'syn] t_t =
object method virtual do_t : 'inh -> t -> 'syn end
let gcata_t tr inh subj = tr#do_t inh subj
class ['self] html_t_t _fself = object
inherit [unit,'self,View.viewer] t_t
method do_t () _ = HTML.string "<noloc>"
end
let html_t () subj = GT.transform_gc gcata_t (new html_t_t) () subj
class ['self] fmt_t_t fself = object
inherit [Format.formatter, 'self, unit] t_t
method do_t = fmt_location
end
let fmt_t fmt subj = GT.transform_gc gcata_t (new fmt_t_t) fmt subj
let t =
{ GT.gcata = gcata_t
; GT.fix = (fun eta -> transform_gc gcata_t eta)
; GT.plugins = (object method html = html_t () method fmt = fmt_t end)
}
type 'a loc = [%import: 'a Location.loc]
[@@deriving gt ~options:{ fmt; html }]
end
module Longident = struct
type t = [%import: Longident.t] [@@deriving gt ~options:{ fmt; html }]
end
module Asttypes = struct
type rec_flag = [%import: Asttypes.rec_flag] [@@deriving gt ~options:{ fmt; html }]
type direction_flag = [%import: Asttypes.direction_flag] [@@deriving gt ~options:{ fmt; html }]
type private_flag = [%import: Asttypes.private_flag] [@@deriving gt ~options:{ fmt; html }]
type mutable_flag = [%import: Asttypes.mutable_flag] [@@deriving gt ~options:{ fmt; html }]
type virtual_flag = [%import: Asttypes.virtual_flag] [@@deriving gt ~options:{ fmt; html }]
type override_flag = [%import: Asttypes.override_flag] [@@deriving gt ~options:{ fmt; html }]
type closed_flag = [%import: Asttypes.closed_flag] [@@deriving gt ~options:{ fmt; html }]
type label = string [@@deriving gt ~options:{ fmt; html }]
type arg_label = [%import: Asttypes.arg_label] [@@deriving gt ~options:{ fmt; html }]
type 'a loc = [%import: 'a Asttypes.loc] [@@deriving gt ~options:{ fmt; html }]
type variance = [%import: Asttypes.variance] [@@deriving gt ~options:{ fmt; html }]
end
open Asttypes
type constant = [%import: Parsetree.constant] [@@deriving gt ~options:{ fmt; html }]
type attribute = [ % import : Parsetree.attribute ]
and extension = [ % import : Parsetree.extension ]
and attributes = [ % import : Parsetree.attributes ]
and payload = [ % import : Parsetree.payload ]
and core_type = [ % import : Parsetree.core_type ]
and core_type_desc = [ % import : Parsetree.core_type_desc ]
and package_type = [ % import : Parsetree.package_type ]
and = [ % import : Parsetree.row_field ]
and object_field = [ % import : ]
and structure = [ % import : Parsetree.structure ]
and structure_item = [ % import : ]
and structure_item_desc = [ % import : Parsetree.structure_item_desc ]
and value_binding = [ % import : Parsetree.value_binding ]
and value_description = [ % import : Parsetree.value_description ]
and type_declaration = [ % import : Parsetree.type_declaration ]
and type_extension = [ % import : Parsetree.type_extension ]
and module_binding = [ % import : Parsetree.module_binding ]
and module_type_declaration = [ % import : Parsetree.module_type_declaration ]
and open_description = [ % import : Parsetree.open_description ]
and class_type_declaration = [ % import : Parsetree.class_type_declaration ]
and class_type = [ % import : Parsetree.class_type ]
and class_type_desc = [ % import : Parsetree.class_type_desc ]
and class_signature = [ % import : ]
and class_type_field = [ % import : Parsetree.class_type_field ]
and class_type_field_desc = [ % import : Parsetree.class_type_field_desc ]
and include_declaration = [ % import : Parsetree.include_declaration ]
and ' a include_infos = [ % import : ' a Parsetree.include_infos ]
and module_expr = [ % import : Parsetree.module_expr ]
and module_expr_desc = [ % import : Parsetree.module_expr_desc ]
and module_type = [ % import : Parsetree.module_type ]
and module_type_desc = [ % import : Parsetree.module_type_desc ]
and class_declaration = [ % import : Parsetree.class_declaration ]
and ' a class_infos = [ % import : ' a Parsetree.class_infos ]
and class_expr = [ % import : Parsetree.class_expr ]
and class_expr_desc = [ % import : Parsetree.class_expr_desc ]
and class_structure = [ % import : Parsetree.class_structure ]
and class_field = [ % import : Parsetree.class_field ]
and class_field_desc = [ % import : Parsetree.class_field_desc ]
and class_field_kind = [ % import : Parsetree.class_field_kind ]
and type_kind = [ % import : Parsetree.type_kind ]
and constructor_declaration = [ % import : Parsetree.constructor_declaration ]
and constructor_arguments = [ % import : Parsetree.constructor_arguments ]
and label_declaration = [ % import : Parsetree.label_declaration ]
and with_constraint = [ % import : Parsetree.with_constraint ]
and signature = [ % import : Parsetree.signature ]
and signature_item = [ % import : ]
and signature_item_desc = [ % import : Parsetree.signature_item_desc ]
and module_declaration = [ % import : Parsetree.module_declaration ]
and include_description = [ % import : Parsetree.include_description ]
and class_description = [ % import : Parsetree.class_description ]
and pattern = [ % import : Parsetree.pattern ]
and pattern_desc = [ % import : Parsetree.pattern_desc ]
and expression = [ % import : Parsetree.expression ]
and expression_desc = [ % import : Parsetree.expression_desc ]
and extension_constructor = [ % import : Parsetree.extension_constructor ]
and extension_constructor_kind = [ % import : Parsetree.extension_constructor_kind ]
and case = [ % import : Parsetree.case ]
[ @@deriving gt ~options : { fmt ; } ]
type attribute = [%import: Parsetree.attribute]
and extension = [%import: Parsetree.extension]
and attributes = [%import: Parsetree.attributes]
and payload = [%import: Parsetree.payload]
and core_type = [%import: Parsetree.core_type]
and core_type_desc = [%import: Parsetree.core_type_desc]
and package_type = [%import: Parsetree.package_type]
and row_field = [%import: Parsetree.row_field]
and object_field = [%import: Parsetree.object_field]
and structure = [%import: Parsetree.structure]
and structure_item = [%import: Parsetree.structure_item]
and structure_item_desc = [%import: Parsetree.structure_item_desc]
and value_binding = [%import: Parsetree.value_binding]
and value_description = [%import: Parsetree.value_description]
and type_declaration = [%import: Parsetree.type_declaration]
and type_extension = [%import: Parsetree.type_extension]
and module_binding = [%import: Parsetree.module_binding]
and module_type_declaration = [%import: Parsetree.module_type_declaration]
and open_description = [%import: Parsetree.open_description]
and class_type_declaration = [%import: Parsetree.class_type_declaration]
and class_type = [%import: Parsetree.class_type]
and class_type_desc = [%import: Parsetree.class_type_desc]
and class_signature = [%import: Parsetree.class_signature]
and class_type_field = [%import: Parsetree.class_type_field]
and class_type_field_desc = [%import: Parsetree.class_type_field_desc]
and include_declaration = [%import: Parsetree.include_declaration]
and 'a include_infos = [%import: 'a Parsetree.include_infos]
and module_expr = [%import: Parsetree.module_expr]
and module_expr_desc = [%import: Parsetree.module_expr_desc]
and module_type = [%import: Parsetree.module_type]
and module_type_desc = [%import: Parsetree.module_type_desc]
and class_declaration = [%import: Parsetree.class_declaration]
and 'a class_infos = [%import: 'a Parsetree.class_infos]
and class_expr = [%import: Parsetree.class_expr]
and class_expr_desc = [%import: Parsetree.class_expr_desc]
and class_structure = [%import: Parsetree.class_structure]
and class_field = [%import: Parsetree.class_field]
and class_field_desc = [%import: Parsetree.class_field_desc]
and class_field_kind = [%import: Parsetree.class_field_kind]
and type_kind = [%import: Parsetree.type_kind]
and constructor_declaration = [%import: Parsetree.constructor_declaration]
and constructor_arguments = [%import: Parsetree.constructor_arguments]
and label_declaration = [%import: Parsetree.label_declaration]
and with_constraint = [%import: Parsetree.with_constraint]
and signature = [%import: Parsetree.signature]
and signature_item = [%import: Parsetree.signature_item]
and signature_item_desc = [%import: Parsetree.signature_item_desc]
and module_declaration = [%import: Parsetree.module_declaration]
and include_description = [%import: Parsetree.include_description]
and class_description = [%import: Parsetree.class_description]
and pattern = [%import: Parsetree.pattern]
and pattern_desc = [%import: Parsetree.pattern_desc]
and expression = [%import: Parsetree.expression]
and expression_desc = [%import: Parsetree.expression_desc]
and extension_constructor = [%import: Parsetree.extension_constructor]
and extension_constructor_kind = [%import: Parsetree.extension_constructor_kind]
and case = [%import: Parsetree.case]
[@@deriving gt ~options:{ fmt; }]
*)
type attribute = (string Asttypes.loc * payload)
and extension = (string Asttypes.loc * payload)
and attributes = attribute list
and payload =
| PStr of int
| PSig of signature
| PTyp of core_type
| PPat of pattern * expression option
and core_type =
{
ptyp_desc: core_type_desc ;
ptyp_loc: Location.t ;
ptyp_attributes: attributes }
and core_type_desc =
| Ptyp_any
| Ptyp_var of string
| Ptyp_arrow of Asttypes.arg_label * core_type * core_type
| Ptyp_tuple of core_type list
| Ptyp_constr of Longident.t Asttypes.loc * core_type list
| Ptyp_object of object_field list * Asttypes.closed_flag
| Ptyp_class of Longident.t Asttypes.loc * core_type list
| Ptyp_alias of core_type * string
| Ptyp_variant of row_field list * Asttypes.closed_flag * Asttypes.label
list option
| Ptyp_poly of string Asttypes.loc list * core_type
| Ptyp_package of package_type
| Ptyp_extension of extension
and package_type =
(Longident.t Asttypes.loc * (Longident.t Asttypes.loc * core_type) list)
and row_field =
| Rtag of Asttypes.label Asttypes.loc * attributes * bool * core_type list
| Rinherit of core_type
and object_field =
| Otag of Asttypes.label Asttypes.loc * attributes * core_type
| Oinherit of core_type
and structure = structure_item list
and structure_item =
{
pstr_desc: structure_item_desc ;
pstr_loc: Location.t }
and structure_item_desc =
| Pstr_eval of expression * attributes
| Pstr_value of Asttypes.rec_flag * value_binding list
| Pstr_primitive of value_description
| Pstr_type of Asttypes.rec_flag * type_declaration list
| Pstr_typext of type_extension
| Pstr_exception of extension_constructor
| Pstr_module of module_binding
| Pstr_recmodule of module_binding list
| Pstr_modtype of module_type_declaration
| Pstr_open of open_description
| Pstr_class of class_declaration list
| Pstr_class_type of class_type_declaration list
| Pstr_include of include_declaration
| Pstr_attribute of attribute
| Pstr_extension of extension * attributes
and value_binding =
{
pvb_pat: pattern ;
pvb_expr: expression ;
pvb_attributes: attributes ;
pvb_loc: Location.t }
and value_description =
{
pval_name: string Asttypes.loc ;
pval_type: core_type ;
pval_prim: string list ;
pval_attributes: attributes ;
pval_loc: Location.t }
and type_declaration =
{
ptype_name: string Asttypes.loc ;
ptype_params: (core_type * Asttypes.variance) list ;
ptype_cstrs: (core_type * core_type * Location.t) list ;
ptype_kind: type_kind ;
ptype_private: Asttypes.private_flag ;
ptype_manifest: core_type option ;
ptype_attributes: attributes ;
ptype_loc: Location.t }
and type_extension =
{
ptyext_path: Longident.t Asttypes.loc ;
ptyext_params: (core_type * Asttypes.variance) list ;
ptyext_constructors: extension_constructor list ;
ptyext_private: Asttypes.private_flag ;
ptyext_attributes: attributes }
and module_binding =
{
pmb_name: string Asttypes.loc ;
pmb_expr: module_expr ;
pmb_attributes: attributes ;
pmb_loc: Location.t }
and module_type_declaration =
{
pmtd_name: string Asttypes.loc ;
pmtd_type: module_type option ;
pmtd_attributes: attributes ;
pmtd_loc: Location.t }
and open_description =
{
popen_lid: Longident.t Asttypes.loc ;
popen_override: Asttypes.override_flag ;
popen_loc: Location.t ;
popen_attributes: attributes }
and class_type_declaration = class_type class_infos
and class_type =
{
pcty_desc: class_type_desc ;
pcty_loc: Location.t ;
pcty_attributes: attributes }
and class_type_desc =
| Pcty_constr of Longident.t Asttypes.loc * core_type list
| Pcty_signature of class_signature
| Pcty_arrow of Asttypes.arg_label * core_type * class_type
| Pcty_extension of extension
| Pcty_open of Asttypes.override_flag * Longident.t Asttypes.loc *
class_type
and class_signature =
{
pcsig_self: core_type ;
pcsig_fields: class_type_field list }
and class_type_field =
{
pctf_desc: class_type_field_desc ;
pctf_loc: Location.t ;
pctf_attributes: attributes }
and class_type_field_desc =
| Pctf_inherit of class_type
| Pctf_val of (Asttypes.label Asttypes.loc *
Asttypes.mutable_flag *
Asttypes.virtual_flag *
core_type
)
(* | Pctf_method of (Asttypes.label Asttypes.loc * Asttypes.private_flag *
* Asttypes.virtual_flag * core_type)
* | Pctf_constraint of (core_type * core_type)
* | Pctf_attribute of attribute
* | Pctf_extension of extension *)
and include_declaration = module_expr include_infos
and 'a include_infos =
{
pincl_mod: 'a ;
pincl_loc: Location.t ;
pincl_attributes: attributes }
and module_expr =
{
pmod_desc: module_expr_desc ;
pmod_loc: Location.t ;
pmod_attributes: attributes }
and module_expr_desc =
| Pmod_ident of Longident.t Asttypes.loc
| Pmod_structure of structure
| Pmod_functor of string Asttypes.loc * module_type option * module_expr
| Pmod_apply of module_expr * module_expr
| Pmod_constraint of module_expr * module_type
| Pmod_unpack of expression
| Pmod_extension of extension
and module_type =
{
pmty_desc: module_type_desc ;
pmty_loc: Location.t ;
pmty_attributes: attributes }
and module_type_desc =
| Pmty_ident of Longident.t Asttypes.loc
| Pmty_signature of signature
| Pmty_functor of string Asttypes.loc * module_type option * module_type
| Pmty_with of module_type * with_constraint list
| Pmty_typeof of module_expr
| Pmty_extension of extension
| Pmty_alias of Longident.t Asttypes.loc
and class_declaration = class_expr class_infos
and 'a class_infos =
{
pci_virt: Asttypes.virtual_flag ;
pci_params: (core_type * Asttypes.variance) list ;
pci_name: string Asttypes.loc ;
pci_expr: 'a ;
pci_loc: Location.t ;
pci_attributes: attributes }
and class_expr =
{
pcl_desc: class_expr_desc ;
pcl_loc: Location.t ;
pcl_attributes: attributes }
and class_expr_desc =
| Pcl_constr of Longident.t Asttypes.loc * core_type list
| Pcl_structure of class_structure
| Pcl_fun of Asttypes.arg_label * expression option * pattern * class_expr
| Pcl_apply of class_expr * (Asttypes.arg_label * expression) list
| Pcl_let of Asttypes.rec_flag * value_binding list * class_expr
| Pcl_constraint of class_expr * class_type
| Pcl_extension of extension
| Pcl_open of Asttypes.override_flag * Longident.t Asttypes.loc *
class_expr
and class_structure =
{
pcstr_self: pattern ;
pcstr_fields: class_field list }
and class_field =
{
pcf_desc: class_field_desc ;
pcf_loc: Location.t ;
pcf_attributes: attributes }
and class_field_desc =
| Pcf_inherit of Asttypes.override_flag * class_expr * string Asttypes.loc
option
| Pcf_val of (Asttypes.label Asttypes.loc * Asttypes.mutable_flag *
class_field_kind)
| Pcf_method of (Asttypes.label Asttypes.loc * Asttypes.private_flag *
class_field_kind)
| Pcf_constraint of (core_type * core_type)
| Pcf_initializer of expression
| Pcf_attribute of attribute
| Pcf_extension of extension
and class_field_kind =
| Cfk_virtual of core_type
| Cfk_concrete of Asttypes.override_flag * expression
and type_kind =
| Ptype_abstract
| Ptype_variant of constructor_declaration list
| Ptype_record of label_declaration list
| Ptype_open
and constructor_declaration =
{
pcd_name: string Asttypes.loc ;
pcd_args: constructor_arguments ;
pcd_res: core_type option ;
pcd_loc: Location.t ;
pcd_attributes: attributes }
and constructor_arguments =
| Pcstr_tuple of core_type list
| Pcstr_record of label_declaration list
and label_declaration =
{
pld_name: string Asttypes.loc ;
pld_mutable: Asttypes.mutable_flag ;
pld_type: core_type ;
pld_loc: Location.t ;
pld_attributes: attributes }
and with_constraint =
| Pwith_type of Longident.t Asttypes.loc * type_declaration
| Pwith_module of Longident.t Asttypes.loc * Longident.t Asttypes.loc
| Pwith_typesubst of Longident.t Asttypes.loc * type_declaration
| Pwith_modsubst of Longident.t Asttypes.loc * Longident.t Asttypes.loc
and signature = signature_item list
and signature_item =
{
psig_desc: signature_item_desc ;
psig_loc: Location.t }
and signature_item_desc =
| Psig_value of value_description
| Psig_type of Asttypes.rec_flag * type_declaration list
| Psig_typext of type_extension
| Psig_exception of extension_constructor
| Psig_module of module_declaration
| Psig_recmodule of module_declaration list
| Psig_modtype of module_type_declaration
| Psig_open of open_description
| Psig_include of include_description
| Psig_class of class_description list
| Psig_class_type of class_type_declaration list
| Psig_attribute of attribute
| Psig_extension of extension * attributes
and module_declaration =
{
pmd_name: string Asttypes.loc ;
pmd_type: module_type ;
pmd_attributes: attributes ;
pmd_loc: Location.t }
and include_description = module_type include_infos
and class_description = class_type class_infos
and pattern =
{
ppat_desc: pattern_desc ;
ppat_loc: Location.t ;
ppat_attributes: attributes }
and pattern_desc =
| Ppat_any
| Ppat_var of string Asttypes.loc
| Ppat_alias of pattern * string Asttypes.loc
| Ppat_constant of constant
| Ppat_interval of constant * constant
| Ppat_tuple of pattern list
| Ppat_construct of Longident.t Asttypes.loc * pattern option
| Ppat_variant of Asttypes.label * pattern option
| Ppat_record of (Longident.t Asttypes.loc * pattern) list *
Asttypes.closed_flag
| Ppat_array of pattern list
| Ppat_or of pattern * pattern
| Ppat_constraint of pattern * core_type
| Ppat_type of Longident.t Asttypes.loc
| Ppat_lazy of pattern
| Ppat_unpack of string Asttypes.loc
| Ppat_exception of pattern
| Ppat_extension of extension
| Ppat_open of Longident.t Asttypes.loc * pattern
and expression =
{
pexp_desc: expression_desc ;
pexp_loc: Location.t ;
pexp_attributes: attributes }
and expression_desc =
| Pexp_ident of Longident.t Asttypes.loc
| Pexp_constant of constant
| Pexp_let of Asttypes.rec_flag * value_binding list * expression
| Pexp_function of case list
| Pexp_fun of Asttypes.arg_label * expression option * pattern * expression
| Pexp_apply of expression * (Asttypes.arg_label * expression) list
| Pexp_match of expression * case list
| Pexp_try of expression * case list
| Pexp_tuple of expression list
| Pexp_construct of Longident.t Asttypes.loc * expression option
| Pexp_variant of Asttypes.label * expression option
| Pexp_record of (Longident.t Asttypes.loc * expression) list * expression
option
| Pexp_field of expression * Longident.t Asttypes.loc
| Pexp_setfield of expression * Longident.t Asttypes.loc * expression
| Pexp_array of expression list
| Pexp_ifthenelse of expression * expression * expression option
| Pexp_sequence of expression * expression
| Pexp_while of expression * expression
| Pexp_for of pattern * expression * expression * Asttypes.direction_flag *
expression
| Pexp_constraint of expression * core_type
| Pexp_coerce of expression * core_type option * core_type
| Pexp_send of expression * Asttypes.label Asttypes.loc
| Pexp_new of Longident.t Asttypes.loc
| Pexp_setinstvar of Asttypes.label Asttypes.loc * expression
| Pexp_override of (Asttypes.label Asttypes.loc * expression) list
| Pexp_letmodule of string Asttypes.loc * module_expr * expression
| Pexp_letexception of extension_constructor * expression
| Pexp_assert of expression
| Pexp_lazy of expression
| Pexp_poly of expression * core_type option
| Pexp_object of class_structure
| Pexp_newtype of string Asttypes.loc * expression
| Pexp_pack of module_expr
| Pexp_open of Asttypes.override_flag * Longident.t Asttypes.loc *
expression
| Pexp_extension of extension
| Pexp_unreachable
and extension_constructor =
{
pext_name: string Asttypes.loc ;
pext_kind: extension_constructor_kind ;
pext_loc: Location.t ;
pext_attributes: attributes }
and extension_constructor_kind =
| Pext_decl of constructor_arguments * core_type option
| Pext_rebind of Longident.t Asttypes.loc
and case =
{
pc_lhs: pattern ;
pc_guard: expression option ;
pc_rhs: expression }
[@@deriving gt ~options:{ fmt; }]
| null | https://raw.githubusercontent.com/PLTools/GT/62d1a424a3336f2317ba67e447a9ff09d179b583/regression/test809cool.ml | ocaml | from Printast module
| Pctf_method of (Asttypes.label Asttypes.loc * Asttypes.private_flag *
* Asttypes.virtual_flag * core_type)
* | Pctf_constraint of (core_type * core_type)
* | Pctf_attribute of attribute
* | Pctf_extension of extension | open GT
module Location = struct
let fmt_position with_name f l =
let open Format in
let open Lexing in
let fname = if with_name then l.pos_fname else "" in
if l.pos_lnum = -1
then fprintf f "%s[%d]" fname l.pos_cnum
else fprintf f "%s[%d,%d+%d]" fname l.pos_lnum l.pos_bol
(l.pos_cnum - l.pos_bol)
type t = [%import: Location.t]
let fmt_location f loc =
let open Format in
let p_2nd_name = loc.loc_start.pos_fname <> loc.loc_end.pos_fname in
fprintf f "(%a..%a)" (fmt_position true) loc.loc_start
(fmt_position p_2nd_name) loc.loc_end;
if loc.loc_ghost then fprintf f " ghost";
class virtual ['inh,'self,'syn] t_t =
object method virtual do_t : 'inh -> t -> 'syn end
let gcata_t tr inh subj = tr#do_t inh subj
class ['self] html_t_t _fself = object
inherit [unit,'self,View.viewer] t_t
method do_t () _ = HTML.string "<noloc>"
end
let html_t () subj = GT.transform_gc gcata_t (new html_t_t) () subj
class ['self] fmt_t_t fself = object
inherit [Format.formatter, 'self, unit] t_t
method do_t = fmt_location
end
let fmt_t fmt subj = GT.transform_gc gcata_t (new fmt_t_t) fmt subj
let t =
{ GT.gcata = gcata_t
; GT.fix = (fun eta -> transform_gc gcata_t eta)
; GT.plugins = (object method html = html_t () method fmt = fmt_t end)
}
type 'a loc = [%import: 'a Location.loc]
[@@deriving gt ~options:{ fmt; html }]
end
module Longident = struct
type t = [%import: Longident.t] [@@deriving gt ~options:{ fmt; html }]
end
module Asttypes = struct
type rec_flag = [%import: Asttypes.rec_flag] [@@deriving gt ~options:{ fmt; html }]
type direction_flag = [%import: Asttypes.direction_flag] [@@deriving gt ~options:{ fmt; html }]
type private_flag = [%import: Asttypes.private_flag] [@@deriving gt ~options:{ fmt; html }]
type mutable_flag = [%import: Asttypes.mutable_flag] [@@deriving gt ~options:{ fmt; html }]
type virtual_flag = [%import: Asttypes.virtual_flag] [@@deriving gt ~options:{ fmt; html }]
type override_flag = [%import: Asttypes.override_flag] [@@deriving gt ~options:{ fmt; html }]
type closed_flag = [%import: Asttypes.closed_flag] [@@deriving gt ~options:{ fmt; html }]
type label = string [@@deriving gt ~options:{ fmt; html }]
type arg_label = [%import: Asttypes.arg_label] [@@deriving gt ~options:{ fmt; html }]
type 'a loc = [%import: 'a Asttypes.loc] [@@deriving gt ~options:{ fmt; html }]
type variance = [%import: Asttypes.variance] [@@deriving gt ~options:{ fmt; html }]
end
open Asttypes
type constant = [%import: Parsetree.constant] [@@deriving gt ~options:{ fmt; html }]
type attribute = [ % import : Parsetree.attribute ]
and extension = [ % import : Parsetree.extension ]
and attributes = [ % import : Parsetree.attributes ]
and payload = [ % import : Parsetree.payload ]
and core_type = [ % import : Parsetree.core_type ]
and core_type_desc = [ % import : Parsetree.core_type_desc ]
and package_type = [ % import : Parsetree.package_type ]
and = [ % import : Parsetree.row_field ]
and object_field = [ % import : ]
and structure = [ % import : Parsetree.structure ]
and structure_item = [ % import : ]
and structure_item_desc = [ % import : Parsetree.structure_item_desc ]
and value_binding = [ % import : Parsetree.value_binding ]
and value_description = [ % import : Parsetree.value_description ]
and type_declaration = [ % import : Parsetree.type_declaration ]
and type_extension = [ % import : Parsetree.type_extension ]
and module_binding = [ % import : Parsetree.module_binding ]
and module_type_declaration = [ % import : Parsetree.module_type_declaration ]
and open_description = [ % import : Parsetree.open_description ]
and class_type_declaration = [ % import : Parsetree.class_type_declaration ]
and class_type = [ % import : Parsetree.class_type ]
and class_type_desc = [ % import : Parsetree.class_type_desc ]
and class_signature = [ % import : ]
and class_type_field = [ % import : Parsetree.class_type_field ]
and class_type_field_desc = [ % import : Parsetree.class_type_field_desc ]
and include_declaration = [ % import : Parsetree.include_declaration ]
and ' a include_infos = [ % import : ' a Parsetree.include_infos ]
and module_expr = [ % import : Parsetree.module_expr ]
and module_expr_desc = [ % import : Parsetree.module_expr_desc ]
and module_type = [ % import : Parsetree.module_type ]
and module_type_desc = [ % import : Parsetree.module_type_desc ]
and class_declaration = [ % import : Parsetree.class_declaration ]
and ' a class_infos = [ % import : ' a Parsetree.class_infos ]
and class_expr = [ % import : Parsetree.class_expr ]
and class_expr_desc = [ % import : Parsetree.class_expr_desc ]
and class_structure = [ % import : Parsetree.class_structure ]
and class_field = [ % import : Parsetree.class_field ]
and class_field_desc = [ % import : Parsetree.class_field_desc ]
and class_field_kind = [ % import : Parsetree.class_field_kind ]
and type_kind = [ % import : Parsetree.type_kind ]
and constructor_declaration = [ % import : Parsetree.constructor_declaration ]
and constructor_arguments = [ % import : Parsetree.constructor_arguments ]
and label_declaration = [ % import : Parsetree.label_declaration ]
and with_constraint = [ % import : Parsetree.with_constraint ]
and signature = [ % import : Parsetree.signature ]
and signature_item = [ % import : ]
and signature_item_desc = [ % import : Parsetree.signature_item_desc ]
and module_declaration = [ % import : Parsetree.module_declaration ]
and include_description = [ % import : Parsetree.include_description ]
and class_description = [ % import : Parsetree.class_description ]
and pattern = [ % import : Parsetree.pattern ]
and pattern_desc = [ % import : Parsetree.pattern_desc ]
and expression = [ % import : Parsetree.expression ]
and expression_desc = [ % import : Parsetree.expression_desc ]
and extension_constructor = [ % import : Parsetree.extension_constructor ]
and extension_constructor_kind = [ % import : Parsetree.extension_constructor_kind ]
and case = [ % import : Parsetree.case ]
[ @@deriving gt ~options : { fmt ; } ]
type attribute = [%import: Parsetree.attribute]
and extension = [%import: Parsetree.extension]
and attributes = [%import: Parsetree.attributes]
and payload = [%import: Parsetree.payload]
and core_type = [%import: Parsetree.core_type]
and core_type_desc = [%import: Parsetree.core_type_desc]
and package_type = [%import: Parsetree.package_type]
and row_field = [%import: Parsetree.row_field]
and object_field = [%import: Parsetree.object_field]
and structure = [%import: Parsetree.structure]
and structure_item = [%import: Parsetree.structure_item]
and structure_item_desc = [%import: Parsetree.structure_item_desc]
and value_binding = [%import: Parsetree.value_binding]
and value_description = [%import: Parsetree.value_description]
and type_declaration = [%import: Parsetree.type_declaration]
and type_extension = [%import: Parsetree.type_extension]
and module_binding = [%import: Parsetree.module_binding]
and module_type_declaration = [%import: Parsetree.module_type_declaration]
and open_description = [%import: Parsetree.open_description]
and class_type_declaration = [%import: Parsetree.class_type_declaration]
and class_type = [%import: Parsetree.class_type]
and class_type_desc = [%import: Parsetree.class_type_desc]
and class_signature = [%import: Parsetree.class_signature]
and class_type_field = [%import: Parsetree.class_type_field]
and class_type_field_desc = [%import: Parsetree.class_type_field_desc]
and include_declaration = [%import: Parsetree.include_declaration]
and 'a include_infos = [%import: 'a Parsetree.include_infos]
and module_expr = [%import: Parsetree.module_expr]
and module_expr_desc = [%import: Parsetree.module_expr_desc]
and module_type = [%import: Parsetree.module_type]
and module_type_desc = [%import: Parsetree.module_type_desc]
and class_declaration = [%import: Parsetree.class_declaration]
and 'a class_infos = [%import: 'a Parsetree.class_infos]
and class_expr = [%import: Parsetree.class_expr]
and class_expr_desc = [%import: Parsetree.class_expr_desc]
and class_structure = [%import: Parsetree.class_structure]
and class_field = [%import: Parsetree.class_field]
and class_field_desc = [%import: Parsetree.class_field_desc]
and class_field_kind = [%import: Parsetree.class_field_kind]
and type_kind = [%import: Parsetree.type_kind]
and constructor_declaration = [%import: Parsetree.constructor_declaration]
and constructor_arguments = [%import: Parsetree.constructor_arguments]
and label_declaration = [%import: Parsetree.label_declaration]
and with_constraint = [%import: Parsetree.with_constraint]
and signature = [%import: Parsetree.signature]
and signature_item = [%import: Parsetree.signature_item]
and signature_item_desc = [%import: Parsetree.signature_item_desc]
and module_declaration = [%import: Parsetree.module_declaration]
and include_description = [%import: Parsetree.include_description]
and class_description = [%import: Parsetree.class_description]
and pattern = [%import: Parsetree.pattern]
and pattern_desc = [%import: Parsetree.pattern_desc]
and expression = [%import: Parsetree.expression]
and expression_desc = [%import: Parsetree.expression_desc]
and extension_constructor = [%import: Parsetree.extension_constructor]
and extension_constructor_kind = [%import: Parsetree.extension_constructor_kind]
and case = [%import: Parsetree.case]
[@@deriving gt ~options:{ fmt; }]
*)
type attribute = (string Asttypes.loc * payload)
and extension = (string Asttypes.loc * payload)
and attributes = attribute list
and payload =
| PStr of int
| PSig of signature
| PTyp of core_type
| PPat of pattern * expression option
and core_type =
{
ptyp_desc: core_type_desc ;
ptyp_loc: Location.t ;
ptyp_attributes: attributes }
and core_type_desc =
| Ptyp_any
| Ptyp_var of string
| Ptyp_arrow of Asttypes.arg_label * core_type * core_type
| Ptyp_tuple of core_type list
| Ptyp_constr of Longident.t Asttypes.loc * core_type list
| Ptyp_object of object_field list * Asttypes.closed_flag
| Ptyp_class of Longident.t Asttypes.loc * core_type list
| Ptyp_alias of core_type * string
| Ptyp_variant of row_field list * Asttypes.closed_flag * Asttypes.label
list option
| Ptyp_poly of string Asttypes.loc list * core_type
| Ptyp_package of package_type
| Ptyp_extension of extension
and package_type =
(Longident.t Asttypes.loc * (Longident.t Asttypes.loc * core_type) list)
and row_field =
| Rtag of Asttypes.label Asttypes.loc * attributes * bool * core_type list
| Rinherit of core_type
and object_field =
| Otag of Asttypes.label Asttypes.loc * attributes * core_type
| Oinherit of core_type
and structure = structure_item list
and structure_item =
{
pstr_desc: structure_item_desc ;
pstr_loc: Location.t }
and structure_item_desc =
| Pstr_eval of expression * attributes
| Pstr_value of Asttypes.rec_flag * value_binding list
| Pstr_primitive of value_description
| Pstr_type of Asttypes.rec_flag * type_declaration list
| Pstr_typext of type_extension
| Pstr_exception of extension_constructor
| Pstr_module of module_binding
| Pstr_recmodule of module_binding list
| Pstr_modtype of module_type_declaration
| Pstr_open of open_description
| Pstr_class of class_declaration list
| Pstr_class_type of class_type_declaration list
| Pstr_include of include_declaration
| Pstr_attribute of attribute
| Pstr_extension of extension * attributes
and value_binding =
{
pvb_pat: pattern ;
pvb_expr: expression ;
pvb_attributes: attributes ;
pvb_loc: Location.t }
and value_description =
{
pval_name: string Asttypes.loc ;
pval_type: core_type ;
pval_prim: string list ;
pval_attributes: attributes ;
pval_loc: Location.t }
and type_declaration =
{
ptype_name: string Asttypes.loc ;
ptype_params: (core_type * Asttypes.variance) list ;
ptype_cstrs: (core_type * core_type * Location.t) list ;
ptype_kind: type_kind ;
ptype_private: Asttypes.private_flag ;
ptype_manifest: core_type option ;
ptype_attributes: attributes ;
ptype_loc: Location.t }
and type_extension =
{
ptyext_path: Longident.t Asttypes.loc ;
ptyext_params: (core_type * Asttypes.variance) list ;
ptyext_constructors: extension_constructor list ;
ptyext_private: Asttypes.private_flag ;
ptyext_attributes: attributes }
and module_binding =
{
pmb_name: string Asttypes.loc ;
pmb_expr: module_expr ;
pmb_attributes: attributes ;
pmb_loc: Location.t }
and module_type_declaration =
{
pmtd_name: string Asttypes.loc ;
pmtd_type: module_type option ;
pmtd_attributes: attributes ;
pmtd_loc: Location.t }
and open_description =
{
popen_lid: Longident.t Asttypes.loc ;
popen_override: Asttypes.override_flag ;
popen_loc: Location.t ;
popen_attributes: attributes }
and class_type_declaration = class_type class_infos
and class_type =
{
pcty_desc: class_type_desc ;
pcty_loc: Location.t ;
pcty_attributes: attributes }
and class_type_desc =
| Pcty_constr of Longident.t Asttypes.loc * core_type list
| Pcty_signature of class_signature
| Pcty_arrow of Asttypes.arg_label * core_type * class_type
| Pcty_extension of extension
| Pcty_open of Asttypes.override_flag * Longident.t Asttypes.loc *
class_type
and class_signature =
{
pcsig_self: core_type ;
pcsig_fields: class_type_field list }
and class_type_field =
{
pctf_desc: class_type_field_desc ;
pctf_loc: Location.t ;
pctf_attributes: attributes }
and class_type_field_desc =
| Pctf_inherit of class_type
| Pctf_val of (Asttypes.label Asttypes.loc *
Asttypes.mutable_flag *
Asttypes.virtual_flag *
core_type
)
and include_declaration = module_expr include_infos
and 'a include_infos =
{
pincl_mod: 'a ;
pincl_loc: Location.t ;
pincl_attributes: attributes }
and module_expr =
{
pmod_desc: module_expr_desc ;
pmod_loc: Location.t ;
pmod_attributes: attributes }
and module_expr_desc =
| Pmod_ident of Longident.t Asttypes.loc
| Pmod_structure of structure
| Pmod_functor of string Asttypes.loc * module_type option * module_expr
| Pmod_apply of module_expr * module_expr
| Pmod_constraint of module_expr * module_type
| Pmod_unpack of expression
| Pmod_extension of extension
and module_type =
{
pmty_desc: module_type_desc ;
pmty_loc: Location.t ;
pmty_attributes: attributes }
and module_type_desc =
| Pmty_ident of Longident.t Asttypes.loc
| Pmty_signature of signature
| Pmty_functor of string Asttypes.loc * module_type option * module_type
| Pmty_with of module_type * with_constraint list
| Pmty_typeof of module_expr
| Pmty_extension of extension
| Pmty_alias of Longident.t Asttypes.loc
and class_declaration = class_expr class_infos
and 'a class_infos =
{
pci_virt: Asttypes.virtual_flag ;
pci_params: (core_type * Asttypes.variance) list ;
pci_name: string Asttypes.loc ;
pci_expr: 'a ;
pci_loc: Location.t ;
pci_attributes: attributes }
and class_expr =
{
pcl_desc: class_expr_desc ;
pcl_loc: Location.t ;
pcl_attributes: attributes }
and class_expr_desc =
| Pcl_constr of Longident.t Asttypes.loc * core_type list
| Pcl_structure of class_structure
| Pcl_fun of Asttypes.arg_label * expression option * pattern * class_expr
| Pcl_apply of class_expr * (Asttypes.arg_label * expression) list
| Pcl_let of Asttypes.rec_flag * value_binding list * class_expr
| Pcl_constraint of class_expr * class_type
| Pcl_extension of extension
| Pcl_open of Asttypes.override_flag * Longident.t Asttypes.loc *
class_expr
and class_structure =
{
pcstr_self: pattern ;
pcstr_fields: class_field list }
and class_field =
{
pcf_desc: class_field_desc ;
pcf_loc: Location.t ;
pcf_attributes: attributes }
and class_field_desc =
| Pcf_inherit of Asttypes.override_flag * class_expr * string Asttypes.loc
option
| Pcf_val of (Asttypes.label Asttypes.loc * Asttypes.mutable_flag *
class_field_kind)
| Pcf_method of (Asttypes.label Asttypes.loc * Asttypes.private_flag *
class_field_kind)
| Pcf_constraint of (core_type * core_type)
| Pcf_initializer of expression
| Pcf_attribute of attribute
| Pcf_extension of extension
and class_field_kind =
| Cfk_virtual of core_type
| Cfk_concrete of Asttypes.override_flag * expression
and type_kind =
| Ptype_abstract
| Ptype_variant of constructor_declaration list
| Ptype_record of label_declaration list
| Ptype_open
and constructor_declaration =
{
pcd_name: string Asttypes.loc ;
pcd_args: constructor_arguments ;
pcd_res: core_type option ;
pcd_loc: Location.t ;
pcd_attributes: attributes }
and constructor_arguments =
| Pcstr_tuple of core_type list
| Pcstr_record of label_declaration list
and label_declaration =
{
pld_name: string Asttypes.loc ;
pld_mutable: Asttypes.mutable_flag ;
pld_type: core_type ;
pld_loc: Location.t ;
pld_attributes: attributes }
and with_constraint =
| Pwith_type of Longident.t Asttypes.loc * type_declaration
| Pwith_module of Longident.t Asttypes.loc * Longident.t Asttypes.loc
| Pwith_typesubst of Longident.t Asttypes.loc * type_declaration
| Pwith_modsubst of Longident.t Asttypes.loc * Longident.t Asttypes.loc
and signature = signature_item list
and signature_item =
{
psig_desc: signature_item_desc ;
psig_loc: Location.t }
and signature_item_desc =
| Psig_value of value_description
| Psig_type of Asttypes.rec_flag * type_declaration list
| Psig_typext of type_extension
| Psig_exception of extension_constructor
| Psig_module of module_declaration
| Psig_recmodule of module_declaration list
| Psig_modtype of module_type_declaration
| Psig_open of open_description
| Psig_include of include_description
| Psig_class of class_description list
| Psig_class_type of class_type_declaration list
| Psig_attribute of attribute
| Psig_extension of extension * attributes
and module_declaration =
{
pmd_name: string Asttypes.loc ;
pmd_type: module_type ;
pmd_attributes: attributes ;
pmd_loc: Location.t }
and include_description = module_type include_infos
and class_description = class_type class_infos
and pattern =
{
ppat_desc: pattern_desc ;
ppat_loc: Location.t ;
ppat_attributes: attributes }
and pattern_desc =
| Ppat_any
| Ppat_var of string Asttypes.loc
| Ppat_alias of pattern * string Asttypes.loc
| Ppat_constant of constant
| Ppat_interval of constant * constant
| Ppat_tuple of pattern list
| Ppat_construct of Longident.t Asttypes.loc * pattern option
| Ppat_variant of Asttypes.label * pattern option
| Ppat_record of (Longident.t Asttypes.loc * pattern) list *
Asttypes.closed_flag
| Ppat_array of pattern list
| Ppat_or of pattern * pattern
| Ppat_constraint of pattern * core_type
| Ppat_type of Longident.t Asttypes.loc
| Ppat_lazy of pattern
| Ppat_unpack of string Asttypes.loc
| Ppat_exception of pattern
| Ppat_extension of extension
| Ppat_open of Longident.t Asttypes.loc * pattern
and expression =
{
pexp_desc: expression_desc ;
pexp_loc: Location.t ;
pexp_attributes: attributes }
and expression_desc =
| Pexp_ident of Longident.t Asttypes.loc
| Pexp_constant of constant
| Pexp_let of Asttypes.rec_flag * value_binding list * expression
| Pexp_function of case list
| Pexp_fun of Asttypes.arg_label * expression option * pattern * expression
| Pexp_apply of expression * (Asttypes.arg_label * expression) list
| Pexp_match of expression * case list
| Pexp_try of expression * case list
| Pexp_tuple of expression list
| Pexp_construct of Longident.t Asttypes.loc * expression option
| Pexp_variant of Asttypes.label * expression option
| Pexp_record of (Longident.t Asttypes.loc * expression) list * expression
option
| Pexp_field of expression * Longident.t Asttypes.loc
| Pexp_setfield of expression * Longident.t Asttypes.loc * expression
| Pexp_array of expression list
| Pexp_ifthenelse of expression * expression * expression option
| Pexp_sequence of expression * expression
| Pexp_while of expression * expression
| Pexp_for of pattern * expression * expression * Asttypes.direction_flag *
expression
| Pexp_constraint of expression * core_type
| Pexp_coerce of expression * core_type option * core_type
| Pexp_send of expression * Asttypes.label Asttypes.loc
| Pexp_new of Longident.t Asttypes.loc
| Pexp_setinstvar of Asttypes.label Asttypes.loc * expression
| Pexp_override of (Asttypes.label Asttypes.loc * expression) list
| Pexp_letmodule of string Asttypes.loc * module_expr * expression
| Pexp_letexception of extension_constructor * expression
| Pexp_assert of expression
| Pexp_lazy of expression
| Pexp_poly of expression * core_type option
| Pexp_object of class_structure
| Pexp_newtype of string Asttypes.loc * expression
| Pexp_pack of module_expr
| Pexp_open of Asttypes.override_flag * Longident.t Asttypes.loc *
expression
| Pexp_extension of extension
| Pexp_unreachable
and extension_constructor =
{
pext_name: string Asttypes.loc ;
pext_kind: extension_constructor_kind ;
pext_loc: Location.t ;
pext_attributes: attributes }
and extension_constructor_kind =
| Pext_decl of constructor_arguments * core_type option
| Pext_rebind of Longident.t Asttypes.loc
and case =
{
pc_lhs: pattern ;
pc_guard: expression option ;
pc_rhs: expression }
[@@deriving gt ~options:{ fmt; }]
|
03a21911309d5285952fc3f5329019a08a14518fad4ea834d177cd4c0f061624 | profmaad/bitcaml | bitcoin_crypto.ml | open! Core.Std
open Cryptokit;;
let ripemd160 data =
hash_string (Hash.ripemd160 ()) data
;;
let sha1 data =
hash_string (Hash.sha1 ()) data
;;
let sha256 data =
hash_string (Hash.sha256 ()) data
;;
let hash160 data =
ripemd160 (sha256 data)
;;
let hash256 data =
sha256 (sha256 data)
;;
let double_sha256 = hash256;;
(* let double_sha256 data = *)
(* Sha256.to_bin (Sha256.string (Sha256.to_bin (Sha256.string data))) *)
(* ;; *)
let message_checksum payload =
let digest = hash256 payload in
String.sub digest 0 4
;;
let rec merkle_tree_hash hash_f hashes =
let rec create_row acc = function
| [] -> acc
| hash :: [] ->
(hash_f (hash ^ hash)) :: acc
| hash1 :: hash2 :: hashes ->
create_row (hash_f (hash1 ^ hash2) :: acc) hashes
in
if (List.length hashes) = 1 then List.hd_exn hashes
else
let row = create_row [] hashes in
let row = List.rev row in
merkle_tree_hash hash_f row
;;
| null | https://raw.githubusercontent.com/profmaad/bitcaml/18cfbca46c989f43dfb1bcfd50ee2ff500f9ab8d/src/bitcoin_crypto.ml | ocaml | let double_sha256 data =
Sha256.to_bin (Sha256.string (Sha256.to_bin (Sha256.string data)))
;; | open! Core.Std
open Cryptokit;;
let ripemd160 data =
hash_string (Hash.ripemd160 ()) data
;;
let sha1 data =
hash_string (Hash.sha1 ()) data
;;
let sha256 data =
hash_string (Hash.sha256 ()) data
;;
let hash160 data =
ripemd160 (sha256 data)
;;
let hash256 data =
sha256 (sha256 data)
;;
let double_sha256 = hash256;;
let message_checksum payload =
let digest = hash256 payload in
String.sub digest 0 4
;;
let rec merkle_tree_hash hash_f hashes =
let rec create_row acc = function
| [] -> acc
| hash :: [] ->
(hash_f (hash ^ hash)) :: acc
| hash1 :: hash2 :: hashes ->
create_row (hash_f (hash1 ^ hash2) :: acc) hashes
in
if (List.length hashes) = 1 then List.hd_exn hashes
else
let row = create_row [] hashes in
let row = List.rev row in
merkle_tree_hash hash_f row
;;
|
47743360dd824e097d892b433c6e587893601b0ea58531eb01bb3f9af7a626ef | elisehuard/game-in-haskell | Backend.hs | {-# LANGUAGE PackageImports #-}
module Testing.Backend (
withWindow
, readInput
, replayInput
, exitKeyPressed
, swapBuffers
) where
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Control.Monad (when)
import Control.Applicative ((<$>), (<*>))
import Control.Concurrent (MVar, tryTakeMVar)
import Data.Maybe (isJust)
import Testing.GameTypes
import Data.Time.Clock.POSIX
withWindow :: Int
-> Int
-> ((Int, Int) -> IO ())
-> String
-> (Window -> IO ())
-> IO ()
withWindow width height windowSizeSink title f = do
GLFW.setErrorCallback $ Just simpleErrorCallback
r <- GLFW.init
when r $ do
m <- GLFW.createWindow width height title Nothing Nothing
case m of
(Just win) -> do
GLFW.makeContextCurrent m
setWindowSizeCallback win $ Just $ resize windowSizeSink
f win
GLFW.setErrorCallback $ Just simpleErrorCallback
GLFW.destroyWindow win
Nothing -> return ()
GLFW.terminate
where
simpleErrorCallback e s =
putStrLn $ unwords [show e, show s]
resize :: ((Int, Int) -> IO()) -> Window -> Int -> Int -> IO()
resize windowSizeSink _ w h = windowSizeSink (w, h)
keyIsPressed :: Window -> Key -> IO Bool
keyIsPressed win key = isPress `fmap` GLFW.getKey win key
isPress :: KeyState -> Bool
isPress KeyState'Pressed = True
isPress KeyState'Repeating = True
isPress _ = False
readInput :: Window
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Int, Bool) -> IO ())
-> ((Int, Bool, Bool) -> IO ())
-> (Maybe Command -> IO ())
-> MVar Command
-> IO ()
readInput window directionKeySink shootKeySink snapshotSink recordSink commandSink commandVar = do
pollEvents
directionKeySink =<< (,,,) <$> keyIsPressed window Key'Left
<*> keyIsPressed window Key'Right
<*> keyIsPressed window Key'Up
<*> keyIsPressed window Key'Down
shootKeySink =<< (,,,) <$> keyIsPressed window Key'A
<*> keyIsPressed window Key'D
<*> keyIsPressed window Key'W
<*> keyIsPressed window Key'S
startRecording <- keyIsPressed window Key'R
endRecording <- keyIsPressed window Key'E
timestamp <- round `fmap` getPOSIXTime
snapshotting <- keyIsPressed window Key'T
snapshotSink (timestamp, snapshotting)
recordSink (timestamp, startRecording, endRecording)
mbCommand <- tryTakeMVar commandVar
when (isJust mbCommand) $ print mbCommand
commandSink mbCommand
replayInput :: Window
-> ExternalInput
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Int, Bool) -> IO ())
-> ((Int, Bool, Bool) -> IO ())
-> (Maybe Command -> IO ())
-> IO ()
replayInput win
(ExternalInput directionKey shootKey)
directionKeySink
shootKeySink
snapshotSink
recordSink
commandSink = do
pollEvents
directionKeySink directionKey
shootKeySink shootKey
snapshotSink (0, False)
recordSink (0, False, False)
commandSink Nothing
exitKeyPressed :: Window -> IO Bool
exitKeyPressed window = keyIsPressed window Key'Escape
| null | https://raw.githubusercontent.com/elisehuard/game-in-haskell/b755c42d63ff5dc9246b46590fb23ebcc1d455b1/src/Testing/Backend.hs | haskell | # LANGUAGE PackageImports # | module Testing.Backend (
withWindow
, readInput
, replayInput
, exitKeyPressed
, swapBuffers
) where
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Control.Monad (when)
import Control.Applicative ((<$>), (<*>))
import Control.Concurrent (MVar, tryTakeMVar)
import Data.Maybe (isJust)
import Testing.GameTypes
import Data.Time.Clock.POSIX
withWindow :: Int
-> Int
-> ((Int, Int) -> IO ())
-> String
-> (Window -> IO ())
-> IO ()
withWindow width height windowSizeSink title f = do
GLFW.setErrorCallback $ Just simpleErrorCallback
r <- GLFW.init
when r $ do
m <- GLFW.createWindow width height title Nothing Nothing
case m of
(Just win) -> do
GLFW.makeContextCurrent m
setWindowSizeCallback win $ Just $ resize windowSizeSink
f win
GLFW.setErrorCallback $ Just simpleErrorCallback
GLFW.destroyWindow win
Nothing -> return ()
GLFW.terminate
where
simpleErrorCallback e s =
putStrLn $ unwords [show e, show s]
resize :: ((Int, Int) -> IO()) -> Window -> Int -> Int -> IO()
resize windowSizeSink _ w h = windowSizeSink (w, h)
keyIsPressed :: Window -> Key -> IO Bool
keyIsPressed win key = isPress `fmap` GLFW.getKey win key
isPress :: KeyState -> Bool
isPress KeyState'Pressed = True
isPress KeyState'Repeating = True
isPress _ = False
readInput :: Window
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Int, Bool) -> IO ())
-> ((Int, Bool, Bool) -> IO ())
-> (Maybe Command -> IO ())
-> MVar Command
-> IO ()
readInput window directionKeySink shootKeySink snapshotSink recordSink commandSink commandVar = do
pollEvents
directionKeySink =<< (,,,) <$> keyIsPressed window Key'Left
<*> keyIsPressed window Key'Right
<*> keyIsPressed window Key'Up
<*> keyIsPressed window Key'Down
shootKeySink =<< (,,,) <$> keyIsPressed window Key'A
<*> keyIsPressed window Key'D
<*> keyIsPressed window Key'W
<*> keyIsPressed window Key'S
startRecording <- keyIsPressed window Key'R
endRecording <- keyIsPressed window Key'E
timestamp <- round `fmap` getPOSIXTime
snapshotting <- keyIsPressed window Key'T
snapshotSink (timestamp, snapshotting)
recordSink (timestamp, startRecording, endRecording)
mbCommand <- tryTakeMVar commandVar
when (isJust mbCommand) $ print mbCommand
commandSink mbCommand
replayInput :: Window
-> ExternalInput
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Bool, Bool, Bool, Bool) -> IO ())
-> ((Int, Bool) -> IO ())
-> ((Int, Bool, Bool) -> IO ())
-> (Maybe Command -> IO ())
-> IO ()
replayInput win
(ExternalInput directionKey shootKey)
directionKeySink
shootKeySink
snapshotSink
recordSink
commandSink = do
pollEvents
directionKeySink directionKey
shootKeySink shootKey
snapshotSink (0, False)
recordSink (0, False, False)
commandSink Nothing
exitKeyPressed :: Window -> IO Bool
exitKeyPressed window = keyIsPressed window Key'Escape
|
0f1acdd76b667c64fbaeae930c41dcd5a69055154908593217b0921dca792262 | seancribbs/neotoma | test_memoization.erl | -module(test_memoization).
-author("Sean Cribbs <>").
-include_lib("eunit/include/eunit.hrl").
-define(I, fun(V,_) -> V end).
setup_memo_test() ->
neotoma_peg:setup_memo(),
?assertNot(undefined == ets:info(get({parse_memo_table, neotoma_peg}))),
neotoma_peg:release_memo().
release_memo_test() ->
neotoma_peg:setup_memo(),
neotoma_peg:release_memo(),
?assertEqual(undefined, ets:info(get({parse_memo_table, neotoma_peg}))).
step_memo_test() ->
neotoma_peg:setup_memo(),
Result = neotoma_peg:p(<<"abcdefghi">>, {{line,1},{column,1}}, anything, neotoma_peg:p_anything(), ?I),
?assertEqual({<<"a">>, <<"bcdefghi">>, {{line,1},{column,2}}}, Result),
Result2 = neotoma_peg:p(<<"abcdefghi">>, {{line,1},{column,1}}, anything, fun(_) ->
throw(bork) end, ?I),
?assertEqual(Result, Result2),
neotoma_peg:release_memo().
concurrent_memo_test() ->
Me = self(),
Him = spawn(fun() -> Me ! neotoma_peg:setup_memo(), receive _ -> ok after 10000 -> ok end end),
MyTid = neotoma_peg:setup_memo(),
receive
Tid -> ?assertNot(Tid == MyTid),
Him ! ok
after 10000 -> ok
end,
neotoma_peg:release_memo().
| null | https://raw.githubusercontent.com/seancribbs/neotoma/9e57d8ebd4ebb02c3e2428b08f3a01e2ff834ce2/test/test_memoization.erl | erlang | -module(test_memoization).
-author("Sean Cribbs <>").
-include_lib("eunit/include/eunit.hrl").
-define(I, fun(V,_) -> V end).
setup_memo_test() ->
neotoma_peg:setup_memo(),
?assertNot(undefined == ets:info(get({parse_memo_table, neotoma_peg}))),
neotoma_peg:release_memo().
release_memo_test() ->
neotoma_peg:setup_memo(),
neotoma_peg:release_memo(),
?assertEqual(undefined, ets:info(get({parse_memo_table, neotoma_peg}))).
step_memo_test() ->
neotoma_peg:setup_memo(),
Result = neotoma_peg:p(<<"abcdefghi">>, {{line,1},{column,1}}, anything, neotoma_peg:p_anything(), ?I),
?assertEqual({<<"a">>, <<"bcdefghi">>, {{line,1},{column,2}}}, Result),
Result2 = neotoma_peg:p(<<"abcdefghi">>, {{line,1},{column,1}}, anything, fun(_) ->
throw(bork) end, ?I),
?assertEqual(Result, Result2),
neotoma_peg:release_memo().
concurrent_memo_test() ->
Me = self(),
Him = spawn(fun() -> Me ! neotoma_peg:setup_memo(), receive _ -> ok after 10000 -> ok end end),
MyTid = neotoma_peg:setup_memo(),
receive
Tid -> ?assertNot(Tid == MyTid),
Him ! ok
after 10000 -> ok
end,
neotoma_peg:release_memo().
|
|
99dfa3225a2b2a3ea7db7ebac2c88cda8ca0b3591e44f59c27d57556f3fcc5ff | originrose/cortex | softmax.clj | (ns cortex.loss.softmax
(:require [clojure.core.matrix :as m]
[cortex.util :refer [merge-args max-index]]
[cortex.compute.math :as math]
[cortex.compute.nn.backend :as backend]
[cortex.loss.util :as util]
[cortex.loss.core :as loss]
[cortex.graph :as graph]
[cortex.tensor :as tensor]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Compute implementation
(defn- calculate-cross-entropy-gradient
[backend v target gradient]
(tensor/with-stream
(backend/get-stream)
(let [target (math/->batch-ct target)
gradient (math/->batch-ct gradient)
v (math/->batch-ct v)]
(tensor/binary-op! gradient 1.0 v 1.0 target :-))))
(defrecord SoftmaxLoss [loss-term backend]
util/PComputeLoss
(compute-loss-gradient [this buffer-map]
(calculate-cross-entropy-gradient backend
(get-in buffer-map [:output :buffer])
(get-in buffer-map [:labels :buffer])
(get-in buffer-map [:output :gradient]))))
(defmethod util/create-compute-loss-term :softmax-loss
[backend network loss-term batch-size]
(->SoftmaxLoss loss-term backend))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Graph implementation
(defmethod graph/get-node-metadata :softmax-loss
[loss-term]
{:arguments {:output {:gradients? true}
:labels {}}
:passes [:loss]})
(defmethod graph/generate-stream-definitions :softmax-loss
[graph loss-term]
(util/generate-loss-term-stream-definitions graph loss-term))
(defn log-likelihood-softmax-loss
^double [softmax-output answer]
(let [answer-num (m/esum (m/mul softmax-output answer))]
(- (Math/log answer-num))))
(defmethod loss/loss :softmax-loss
[loss-term buffer-map]
(let [output-channels (long (get loss-term :output-channels 1))
v (get buffer-map :output)
target (get buffer-map :labels)]
(if (= output-channels 1)
(log-likelihood-softmax-loss v target)
(let [n-pixels (quot (long (m/ecount v)) output-channels)]
(loop [pix 0
sum 0.0]
(if (< pix n-pixels)
(recur (inc pix)
(double (+ sum
(log-likelihood-softmax-loss
(m/subvector v (* pix output-channels) output-channels)
(m/subvector target (* pix output-channels) output-channels)))))
(double (/ sum n-pixels))))))))
(defmethod loss/generate-loss-term :softmax-loss
[item-key]
(util/generic-loss-term item-key))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; util
(defn- softmax-result-to-unit-vector
[result]
(let [zeros (apply vector (repeat (first (m/shape result)) 0))]
(assoc zeros (max-index (into [] (seq result))) 1.0)))
(defn- softmax-results-to-unit-vectors
[results]
(let [zeros (apply vector (repeat (first (m/shape (first results))) 0))]
(mapv #(assoc zeros (max-index (into [] (seq %))) 1.0)
results)))
(defn evaluate-softmax
"Provide a percentage correct for softmax. This is much easier to interpret than
the actual log-loss of the softmax unit."
[guesses answers]
(if (or (not (pos? (count guesses)))
(not (pos? (count answers)))
(not= (count guesses) (count answers)))
(throw (Exception. (format "evaluate-softmax: guesses [%d] and answers [%d] count must both be positive and equal."
(count guesses)
(count answers)))))
(let [results-answer-seq (mapv vector
(softmax-results-to-unit-vectors guesses)
answers)
correct (count (filter #(m/equals (first %) (second %)) results-answer-seq))]
(double (/ correct (count results-answer-seq)))))
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/src/cortex/loss/softmax.clj | clojure |
Compute implementation
Graph implementation
util | (ns cortex.loss.softmax
(:require [clojure.core.matrix :as m]
[cortex.util :refer [merge-args max-index]]
[cortex.compute.math :as math]
[cortex.compute.nn.backend :as backend]
[cortex.loss.util :as util]
[cortex.loss.core :as loss]
[cortex.graph :as graph]
[cortex.tensor :as tensor]))
(defn- calculate-cross-entropy-gradient
[backend v target gradient]
(tensor/with-stream
(backend/get-stream)
(let [target (math/->batch-ct target)
gradient (math/->batch-ct gradient)
v (math/->batch-ct v)]
(tensor/binary-op! gradient 1.0 v 1.0 target :-))))
(defrecord SoftmaxLoss [loss-term backend]
util/PComputeLoss
(compute-loss-gradient [this buffer-map]
(calculate-cross-entropy-gradient backend
(get-in buffer-map [:output :buffer])
(get-in buffer-map [:labels :buffer])
(get-in buffer-map [:output :gradient]))))
(defmethod util/create-compute-loss-term :softmax-loss
[backend network loss-term batch-size]
(->SoftmaxLoss loss-term backend))
(defmethod graph/get-node-metadata :softmax-loss
[loss-term]
{:arguments {:output {:gradients? true}
:labels {}}
:passes [:loss]})
(defmethod graph/generate-stream-definitions :softmax-loss
[graph loss-term]
(util/generate-loss-term-stream-definitions graph loss-term))
(defn log-likelihood-softmax-loss
^double [softmax-output answer]
(let [answer-num (m/esum (m/mul softmax-output answer))]
(- (Math/log answer-num))))
(defmethod loss/loss :softmax-loss
[loss-term buffer-map]
(let [output-channels (long (get loss-term :output-channels 1))
v (get buffer-map :output)
target (get buffer-map :labels)]
(if (= output-channels 1)
(log-likelihood-softmax-loss v target)
(let [n-pixels (quot (long (m/ecount v)) output-channels)]
(loop [pix 0
sum 0.0]
(if (< pix n-pixels)
(recur (inc pix)
(double (+ sum
(log-likelihood-softmax-loss
(m/subvector v (* pix output-channels) output-channels)
(m/subvector target (* pix output-channels) output-channels)))))
(double (/ sum n-pixels))))))))
(defmethod loss/generate-loss-term :softmax-loss
[item-key]
(util/generic-loss-term item-key))
(defn- softmax-result-to-unit-vector
[result]
(let [zeros (apply vector (repeat (first (m/shape result)) 0))]
(assoc zeros (max-index (into [] (seq result))) 1.0)))
(defn- softmax-results-to-unit-vectors
[results]
(let [zeros (apply vector (repeat (first (m/shape (first results))) 0))]
(mapv #(assoc zeros (max-index (into [] (seq %))) 1.0)
results)))
(defn evaluate-softmax
"Provide a percentage correct for softmax. This is much easier to interpret than
the actual log-loss of the softmax unit."
[guesses answers]
(if (or (not (pos? (count guesses)))
(not (pos? (count answers)))
(not= (count guesses) (count answers)))
(throw (Exception. (format "evaluate-softmax: guesses [%d] and answers [%d] count must both be positive and equal."
(count guesses)
(count answers)))))
(let [results-answer-seq (mapv vector
(softmax-results-to-unit-vectors guesses)
answers)
correct (count (filter #(m/equals (first %) (second %)) results-answer-seq))]
(double (/ correct (count results-answer-seq)))))
|
ea4c562931c51e7d821b1258a3cd8c73ce1c0357e06cc03e0efc97ed92687a16 | tek/chiasma | DecodeError.hs | module Chiasma.Data.DecodeError where
import Text.ParserCombinators.Parsec (ParseError)
data DecodeFailure =
ParseFailure Text ParseError
|
IntParsingFailure Text
|
BoolParsingFailure Text
|
TooFewFields
|
TooManyFields [Text]
|
TooManyRecords [Text]
|
TargetMissing
deriving stock (Eq, Show)
data DecodeError =
DecodeError {
output :: [Text],
failure :: DecodeFailure
}
deriving stock (Eq, Show)
| null | https://raw.githubusercontent.com/tek/chiasma/45ce231f629db9ed67c78ce872ac719a90a217bd/packages/chiasma/lib/Chiasma/Data/DecodeError.hs | haskell | module Chiasma.Data.DecodeError where
import Text.ParserCombinators.Parsec (ParseError)
data DecodeFailure =
ParseFailure Text ParseError
|
IntParsingFailure Text
|
BoolParsingFailure Text
|
TooFewFields
|
TooManyFields [Text]
|
TooManyRecords [Text]
|
TargetMissing
deriving stock (Eq, Show)
data DecodeError =
DecodeError {
output :: [Text],
failure :: DecodeFailure
}
deriving stock (Eq, Show)
|
|
f3af5da0af51c1ccd492460f6978ff3bda104f61a7e1ae44ff779a0d197d6c74 | HealthSamurai/dojo.clj | debounce.cljs | (ns zframes.debounce
(:require [re-frame.core :refer [reg-fx dispatch console] :as rf]))
(defn now [] (.getTime (js/Date.)))
(def registered-keys (atom nil))
(defn dispatch-if-not-superceded [{:keys [key delay event time-received]}]
(when (= time-received (get @registered-keys key))
;; no new events on this key!
(dispatch event)))
(defn dispatch-later [{:keys [delay] :as debounce}]
(js/setTimeout
(fn [] (dispatch-if-not-superceded debounce))
(or delay 300)))
(defn dispatch-debounce [debounce]
(let [ts (now)]
(swap! registered-keys assoc (:key debounce) ts)
(dispatch-later (assoc debounce :time-received ts))))
(reg-fx :dispatch-debounce dispatch-debounce)
(rf/reg-event-fx
:dispatch-debounce
(fn [fx [_ deb]]
{:dispatch-debounce deb}))
| null | https://raw.githubusercontent.com/HealthSamurai/dojo.clj/94922640f534897ab2b181c608b54bfbb8351d7b/ui/src/zframes/debounce.cljs | clojure | no new events on this key! | (ns zframes.debounce
(:require [re-frame.core :refer [reg-fx dispatch console] :as rf]))
(defn now [] (.getTime (js/Date.)))
(def registered-keys (atom nil))
(defn dispatch-if-not-superceded [{:keys [key delay event time-received]}]
(when (= time-received (get @registered-keys key))
(dispatch event)))
(defn dispatch-later [{:keys [delay] :as debounce}]
(js/setTimeout
(fn [] (dispatch-if-not-superceded debounce))
(or delay 300)))
(defn dispatch-debounce [debounce]
(let [ts (now)]
(swap! registered-keys assoc (:key debounce) ts)
(dispatch-later (assoc debounce :time-received ts))))
(reg-fx :dispatch-debounce dispatch-debounce)
(rf/reg-event-fx
:dispatch-debounce
(fn [fx [_ deb]]
{:dispatch-debounce deb}))
|
1dcbfa554a5d8966ecee30bc933cce2b4c5b599d317c7c3e80fddcfd232778e2 | puppetlabs/trapperkeeper-metrics | metrics_core.clj | (ns puppetlabs.trapperkeeper.services.metrics.metrics-core
(:import (com.codahale.metrics JmxReporter MetricRegistry)
(com.fasterxml.jackson.core JsonParseException)
(com.puppetlabs.trapperkeeper.metrics GraphiteReporter AllowedNamesMetricFilter)
(java.util.concurrent TimeUnit)
(java.net InetSocketAddress)
(com.codahale.metrics.graphite Graphite GraphiteSender))
(:require [clojure.tools.logging :as log]
[clojure.java.io :as io]
[cheshire.core :as json]
[schema.core :as schema]
[ring.middleware.defaults :as ring-defaults]
[puppetlabs.comidi :as comidi]
[puppetlabs.ring-middleware.utils :as ringutils]
[puppetlabs.trapperkeeper.services.metrics.metrics-utils
:as metrics-utils]
[puppetlabs.trapperkeeper.services.metrics.jolokia
:as jolokia]
[puppetlabs.kitchensink.core :as ks]
[puppetlabs.i18n.core :as i18n :refer [trs tru]]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Schemas
(def JmxReporterConfig
{:enabled schema/Bool})
(def JolokiaApiConfig
{(schema/optional-key :enabled) schema/Bool
(schema/optional-key :servlet-init-params) jolokia/JolokiaConfig})
(def MbeansApiConfig
{(schema/optional-key :enabled) schema/Bool})
(def WebserviceConfig
{(schema/optional-key :mbeans) MbeansApiConfig
(schema/optional-key :jolokia) JolokiaApiConfig})
(def BaseGraphiteReporterConfig
{:host schema/Str
:port schema/Int
:update-interval-seconds schema/Int})
(def GraphiteReporterConfig
(assoc BaseGraphiteReporterConfig :enabled schema/Bool))
;; schema for what is read from config file for a registry
(def GraphiteRegistryReporterConfig
(assoc (ks/mapkeys schema/optional-key BaseGraphiteReporterConfig)
:enabled schema/Bool))
(def RegistryReportersConfig
{(schema/optional-key :jmx) JmxReporterConfig
(schema/optional-key :graphite) GraphiteRegistryReporterConfig})
(def RegistryConfig
{(schema/optional-key :metrics-allowed) [schema/Str]
(schema/optional-key :metric-prefix) schema/Str
(schema/optional-key :reporters) RegistryReportersConfig})
(def RegistriesConfig
{schema/Any RegistryConfig})
(def ReportersConfig
{(schema/optional-key :graphite) BaseGraphiteReporterConfig})
(def MetricsConfig
{:server-id schema/Str
(schema/optional-key :registries) RegistriesConfig
(schema/optional-key :reporters) ReportersConfig
(schema/optional-key :metrics-webservice) WebserviceConfig})
(def RegistryContext
{:registry (schema/maybe MetricRegistry)
:jmx-reporter (schema/maybe JmxReporter)
(schema/optional-key :graphite-reporter) GraphiteReporter})
(def DefaultRegistrySettings
{:default-metrics-allowed [schema/Str]})
(def MetricsServiceContext
{:registries (schema/atom {schema/Keyword RegistryContext})
:can-update-registry-settings? schema/Bool
:registry-settings (schema/atom {schema/Keyword DefaultRegistrySettings})
:metrics-config MetricsConfig})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Private
(schema/defn jmx-reporter :- JmxReporter
[registry :- MetricRegistry
domain :- (schema/maybe schema/Keyword)]
(let [b (JmxReporter/forRegistry registry)]
(when domain
(.inDomain b (name domain)))
(.build b)))
(schema/defn initialize-registry-context :- RegistryContext
"Create initial registry context. This will include a MetricsRegistry and a
JMX reporter, but not a Graphite reporter."
[config :- (schema/maybe RegistryConfig)
domain :- schema/Keyword]
(let [jmx-config (get-in config [:reporters :jmx])
registry (MetricRegistry.)]
{:registry registry
:jmx-reporter (when (:enabled jmx-config)
(doto ^JmxReporter (jmx-reporter registry domain)
(.start)))}))
(schema/defn construct-metric-names :- #{schema/Str}
"Prefixes the metric prefix to each metric name. Returns a set of metric names (duplicates are
removed)."
[prefix :- schema/Str
metric-names :- [schema/Str]]
(set (map #(format "%s.%s" prefix %) metric-names)))
(schema/defn build-metric-filter :- AllowedNamesMetricFilter
[metrics-allowed :- #{schema/Str}]
(AllowedNamesMetricFilter. metrics-allowed))
(schema/defn get-metric-prefix :- schema/Str
"Determines what the metric prefix should be.
If a metric-prefix is set in the config, we use that. Else default to the server-id"
[metrics-config :- MetricsConfig
domain :- schema/Keyword]
(if-let [metric-prefix (get-in metrics-config [:registries domain :metric-prefix])]
metric-prefix
(format "puppetlabs.%s" (:server-id metrics-config))))
(schema/defn build-graphite-reporter :- GraphiteReporter
"Constructs a GraphiteReporter instance for the given registry, with the given allowed metrics,
and using the given graphite-sender"
[registry :- MetricRegistry
metrics-allowed :- #{schema/Str}
graphite-sender :- GraphiteSender]
(->
(GraphiteReporter/forRegistry registry)
(.convertRatesTo (TimeUnit/MILLISECONDS))
(.convertDurationsTo (TimeUnit/MILLISECONDS))
(.filter (build-metric-filter metrics-allowed))
(.build graphite-sender)))
(schema/defn build-graphite-sender :- GraphiteSender
[graphite-config :- GraphiteReporterConfig
;; The domain is only needed as an argument for testing, which is unfortunate. In the future, it
;; would be nice to add the ability to register a function that could receive a callback when a
;; reporter is added, which could solve the problem of needing this extra argument solely for
;; testing (see PE-17010).
domain :- schema/Keyword]
(Graphite. (InetSocketAddress. (:host graphite-config)
(:port graphite-config))))
(schema/defn add-graphite-reporter :- RegistryContext
"Adds a graphite reporter to the given registry context if graphite
is enabled in the configuration. Starts up a thread which reports the metrics
to graphite on the interval specified in :update-interval-seconds"
[registry-context :- RegistryContext
graphite-config :- (schema/maybe GraphiteReporterConfig)
metrics-allowed :- #{schema/Str}
domain :- schema/Keyword]
(if (:enabled graphite-config)
(let [graphite-sender (build-graphite-sender graphite-config domain)
graphite-reporter (build-graphite-reporter (:registry registry-context)
metrics-allowed
graphite-sender)]
(.start graphite-reporter (:update-interval-seconds graphite-config) (TimeUnit/SECONDS))
(assoc registry-context :graphite-reporter graphite-reporter))
registry-context))
(schema/defn get-graphite-config :- (schema/maybe GraphiteReporterConfig)
"Merge together the graphite config for the registry with the global graphite config."
[config :- MetricsConfig
domain :- schema/Keyword]
(let [reporter-config (get-in config [:reporters :graphite])
registry-config (get-in config [:registries domain :reporters :graphite])
merged-config (merge reporter-config registry-config)]
;; the default value for enabled is false
(if (nil? merged-config)
merged-config
(update-in merged-config [:enabled] #(if (nil? %) false %)))))
(schema/defn get-metrics-allowed :- #{schema/Str}
"Get the metrics allowed for the registry. Looks at the metrics-allowed registered for the
registry in the registry settings atom using the `update-registry-settings` function as well
as the metrics-allowed listed in the config file under the `:metrics-allowed` key. Merges these
lists together and then adds the metrics prefix to them, returning a set of prefixed allowed
metrics."
[metrics-config :- MetricsConfig
registry-settings :- {schema/Any DefaultRegistrySettings}
domain :- schema/Keyword]
(let [metric-prefix (get-metric-prefix metrics-config domain)
default-metrics-allowed (get-in registry-settings [domain :default-metrics-allowed])
configured-metrics-allowed (get-in metrics-config [:registries domain :metrics-allowed])
metrics-allowed (concat default-metrics-allowed configured-metrics-allowed)]
(construct-metric-names metric-prefix metrics-allowed)))
(schema/defn maybe-add-default-to-config :- MetricsConfig
"Add a `:default` key with an empty map as the value to the registries config if it is not
present."
[metrics-config :- MetricsConfig]
(update-in metrics-config [:registries :default] #(if (nil? %) {} %)))
(schema/defn initialize-registries-from-config :- {schema/Any RegistryContext}
"Read through the config and create a MetricsRegistry (+ JMX reporter if configured) for every
registry mentioned in it. Also create the default registry if not mentioned in the config. Should
be called from `init` of the metrics-service."
[metrics-config :- MetricsConfig]
(let [registries-config (:registries metrics-config)]
(into {} (map
(fn [x] {x (initialize-registry-context (get registries-config x)
x)})
(keys registries-config)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Public
(schema/defn ^:always-validate add-graphite-reporters :- MetricsServiceContext
"Add Graphite reporters to all registries with Graphite enabled in the config, using the
configured settings for each registry. Returns an updated service context. Should be called from
`start` of the metrics-service."
[service-context :- MetricsServiceContext]
(let [config (:metrics-config service-context)
registry-settings @(:registry-settings service-context)]
(doseq [registry @(:registries service-context)]
(let [domain (key registry)
graphite-config (get-graphite-config config domain)
metrics-allowed (get-metrics-allowed config registry-settings domain)
registry-with-graphite-reporter (add-graphite-reporter
(val registry)
graphite-config
metrics-allowed
domain)]
(swap! (:registries service-context) assoc domain registry-with-graphite-reporter))))
service-context)
;; Note here that the return schema includes registries that could have Graphite reporters. If the
registry was in the config , then a Graphite reporter could have been configured for it . Any
;; registries not in the config will not have Graphite reporters.
(schema/defn ^:always-validate get-or-initialize-registry-context :- RegistryContext
"If a registry exists within the service context for a given domain
already, return it.
Otherwise initialize a new registry for that domain and return it.
Modifies the registries atom in the service context to add the new registry"
[{:keys [registries metrics-config]} :- MetricsServiceContext
domain :- schema/Keyword]
(if-let [metric-registry-context (get @registries domain)]
metric-registry-context
(let [registry-config (get-in metrics-config [:registries domain])
new-registry-context (initialize-registry-context registry-config domain)]
(swap! registries assoc domain new-registry-context)
new-registry-context)))
(schema/defn ^:always-validate create-initial-service-context :- MetricsServiceContext
"Create the initial service context for the metrics-service. Initialize all registries in the
config, add them to the `registries` atom, and include that in the service context map, along with
an empty atom for `registry-settings` and the metrics config."
[metrics-config :- MetricsConfig]
(let [config-with-default (maybe-add-default-to-config metrics-config)
registries (initialize-registries-from-config config-with-default)]
{:registries (atom registries)
:can-update-registry-settings? true
:registry-settings (atom {})
:metrics-config config-with-default}))
(schema/defn lock-registry-settings :- MetricsServiceContext
"Switch the `can-update-registry-settings?` boolean to false to show that it is after the `init`
phase and registry settings can no longer be set."
[context :- MetricsServiceContext]
(assoc context :can-update-registry-settings? false))
(schema/defn ^:always-validate update-registry-settings :- {schema/Any DefaultRegistrySettings}
"Update the `registry-settings` atom for the given domain. If called again for the same domain,
the new settings will be merged in, and lists such as :default-metrics-allowed, will be concat'd
together."
[context :- MetricsServiceContext
domain :- schema/Keyword
settings :- DefaultRegistrySettings]
(when (= false (:can-update-registry-settings? context))
(throw (RuntimeException.
"Registry settings must be initialized in the `init` phase of the lifecycle.")))
(let [registry-settings (:registry-settings context)
deep-merge-fn (fn [first second]
; first will be nil if no settings exist for this domain,
; and deep-merge-with doesn't like that
(ks/deep-merge-with concat (or first {}) second))]
; Swap out the atom by updating the value under the specified domain.
; Update using deep-merge-fn to do a deep merge between the existing settings
and the new settings , concating values together if two keys match
(swap! registry-settings update domain deep-merge-fn settings)))
(schema/defn ^:always-validate stop
[context :- RegistryContext]
(if-let [jmx-reporter (:jmx-reporter context)]
(.close jmx-reporter))
(if-let [graphite-reporter (:graphite-reporter context)]
(.close graphite-reporter)))
(schema/defn ^:always-validate stop-all
[service-context :- MetricsServiceContext]
(let [registries (:registries service-context)]
(doseq [[_ metrics-registry] @registries]
(stop metrics-registry))
service-context))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Comidi
(defn build-handler [path]
(comidi/routes->handler
(comidi/wrap-routes
(comidi/context path
(comidi/context "/v1"
(comidi/context "/mbeans"
(comidi/GET "" []
(fn [req]
(ringutils/json-response 200
(metrics-utils/mbean-names))))
(comidi/POST "" []
(fn [req]
(try
(let [metrics (with-open [reader (-> req :body io/reader)]
(doall (json/parse-stream reader true)))]
(cond
(seq? metrics)
(ringutils/json-response
200 (map metrics-utils/get-mbean metrics))
(string? metrics)
(ringutils/json-response
200 (metrics-utils/get-mbean metrics))
(map? metrics)
(ringutils/json-response
200 (ks/mapvals metrics-utils/get-mbean metrics))
:else
(ringutils/json-response
400 (tru "metrics request must be a JSON array, string, or object"))))
(catch JsonParseException e
(ringutils/json-response 400 {:error (str e)})))))
(comidi/GET ["/" [#".*" :names]] []
(fn [{:keys [route-params] :as req}]
(let [name (java.net.URLDecoder/decode (:names route-params))]
(if-let [mbean (metrics-utils/get-mbean name)]
(ringutils/json-response 200 mbean)
(ringutils/json-response 404
(tru "No mbean ''{0}'' found" name)))))))))
(comp i18n/locale-negotiator #(ring-defaults/wrap-defaults % ring-defaults/api-defaults)))))
| null | https://raw.githubusercontent.com/puppetlabs/trapperkeeper-metrics/47f1534ebb75339eeb42e0de33fab0ec2f6cacdc/src/clj/puppetlabs/trapperkeeper/services/metrics/metrics_core.clj | clojure |
schema for what is read from config file for a registry
Private
The domain is only needed as an argument for testing, which is unfortunate. In the future, it
would be nice to add the ability to register a function that could receive a callback when a
reporter is added, which could solve the problem of needing this extra argument solely for
testing (see PE-17010).
the default value for enabled is false
Public
Note here that the return schema includes registries that could have Graphite reporters. If the
registries not in the config will not have Graphite reporters.
first will be nil if no settings exist for this domain,
and deep-merge-with doesn't like that
Swap out the atom by updating the value under the specified domain.
Update using deep-merge-fn to do a deep merge between the existing settings
| (ns puppetlabs.trapperkeeper.services.metrics.metrics-core
(:import (com.codahale.metrics JmxReporter MetricRegistry)
(com.fasterxml.jackson.core JsonParseException)
(com.puppetlabs.trapperkeeper.metrics GraphiteReporter AllowedNamesMetricFilter)
(java.util.concurrent TimeUnit)
(java.net InetSocketAddress)
(com.codahale.metrics.graphite Graphite GraphiteSender))
(:require [clojure.tools.logging :as log]
[clojure.java.io :as io]
[cheshire.core :as json]
[schema.core :as schema]
[ring.middleware.defaults :as ring-defaults]
[puppetlabs.comidi :as comidi]
[puppetlabs.ring-middleware.utils :as ringutils]
[puppetlabs.trapperkeeper.services.metrics.metrics-utils
:as metrics-utils]
[puppetlabs.trapperkeeper.services.metrics.jolokia
:as jolokia]
[puppetlabs.kitchensink.core :as ks]
[puppetlabs.i18n.core :as i18n :refer [trs tru]]))
Schemas
(def JmxReporterConfig
{:enabled schema/Bool})
(def JolokiaApiConfig
{(schema/optional-key :enabled) schema/Bool
(schema/optional-key :servlet-init-params) jolokia/JolokiaConfig})
(def MbeansApiConfig
{(schema/optional-key :enabled) schema/Bool})
(def WebserviceConfig
{(schema/optional-key :mbeans) MbeansApiConfig
(schema/optional-key :jolokia) JolokiaApiConfig})
(def BaseGraphiteReporterConfig
{:host schema/Str
:port schema/Int
:update-interval-seconds schema/Int})
(def GraphiteReporterConfig
(assoc BaseGraphiteReporterConfig :enabled schema/Bool))
(def GraphiteRegistryReporterConfig
(assoc (ks/mapkeys schema/optional-key BaseGraphiteReporterConfig)
:enabled schema/Bool))
(def RegistryReportersConfig
{(schema/optional-key :jmx) JmxReporterConfig
(schema/optional-key :graphite) GraphiteRegistryReporterConfig})
(def RegistryConfig
{(schema/optional-key :metrics-allowed) [schema/Str]
(schema/optional-key :metric-prefix) schema/Str
(schema/optional-key :reporters) RegistryReportersConfig})
(def RegistriesConfig
{schema/Any RegistryConfig})
(def ReportersConfig
{(schema/optional-key :graphite) BaseGraphiteReporterConfig})
(def MetricsConfig
{:server-id schema/Str
(schema/optional-key :registries) RegistriesConfig
(schema/optional-key :reporters) ReportersConfig
(schema/optional-key :metrics-webservice) WebserviceConfig})
(def RegistryContext
{:registry (schema/maybe MetricRegistry)
:jmx-reporter (schema/maybe JmxReporter)
(schema/optional-key :graphite-reporter) GraphiteReporter})
(def DefaultRegistrySettings
{:default-metrics-allowed [schema/Str]})
(def MetricsServiceContext
{:registries (schema/atom {schema/Keyword RegistryContext})
:can-update-registry-settings? schema/Bool
:registry-settings (schema/atom {schema/Keyword DefaultRegistrySettings})
:metrics-config MetricsConfig})
(schema/defn jmx-reporter :- JmxReporter
[registry :- MetricRegistry
domain :- (schema/maybe schema/Keyword)]
(let [b (JmxReporter/forRegistry registry)]
(when domain
(.inDomain b (name domain)))
(.build b)))
(schema/defn initialize-registry-context :- RegistryContext
"Create initial registry context. This will include a MetricsRegistry and a
JMX reporter, but not a Graphite reporter."
[config :- (schema/maybe RegistryConfig)
domain :- schema/Keyword]
(let [jmx-config (get-in config [:reporters :jmx])
registry (MetricRegistry.)]
{:registry registry
:jmx-reporter (when (:enabled jmx-config)
(doto ^JmxReporter (jmx-reporter registry domain)
(.start)))}))
(schema/defn construct-metric-names :- #{schema/Str}
"Prefixes the metric prefix to each metric name. Returns a set of metric names (duplicates are
removed)."
[prefix :- schema/Str
metric-names :- [schema/Str]]
(set (map #(format "%s.%s" prefix %) metric-names)))
(schema/defn build-metric-filter :- AllowedNamesMetricFilter
[metrics-allowed :- #{schema/Str}]
(AllowedNamesMetricFilter. metrics-allowed))
(schema/defn get-metric-prefix :- schema/Str
"Determines what the metric prefix should be.
If a metric-prefix is set in the config, we use that. Else default to the server-id"
[metrics-config :- MetricsConfig
domain :- schema/Keyword]
(if-let [metric-prefix (get-in metrics-config [:registries domain :metric-prefix])]
metric-prefix
(format "puppetlabs.%s" (:server-id metrics-config))))
(schema/defn build-graphite-reporter :- GraphiteReporter
"Constructs a GraphiteReporter instance for the given registry, with the given allowed metrics,
and using the given graphite-sender"
[registry :- MetricRegistry
metrics-allowed :- #{schema/Str}
graphite-sender :- GraphiteSender]
(->
(GraphiteReporter/forRegistry registry)
(.convertRatesTo (TimeUnit/MILLISECONDS))
(.convertDurationsTo (TimeUnit/MILLISECONDS))
(.filter (build-metric-filter metrics-allowed))
(.build graphite-sender)))
(schema/defn build-graphite-sender :- GraphiteSender
[graphite-config :- GraphiteReporterConfig
domain :- schema/Keyword]
(Graphite. (InetSocketAddress. (:host graphite-config)
(:port graphite-config))))
(schema/defn add-graphite-reporter :- RegistryContext
"Adds a graphite reporter to the given registry context if graphite
is enabled in the configuration. Starts up a thread which reports the metrics
to graphite on the interval specified in :update-interval-seconds"
[registry-context :- RegistryContext
graphite-config :- (schema/maybe GraphiteReporterConfig)
metrics-allowed :- #{schema/Str}
domain :- schema/Keyword]
(if (:enabled graphite-config)
(let [graphite-sender (build-graphite-sender graphite-config domain)
graphite-reporter (build-graphite-reporter (:registry registry-context)
metrics-allowed
graphite-sender)]
(.start graphite-reporter (:update-interval-seconds graphite-config) (TimeUnit/SECONDS))
(assoc registry-context :graphite-reporter graphite-reporter))
registry-context))
(schema/defn get-graphite-config :- (schema/maybe GraphiteReporterConfig)
"Merge together the graphite config for the registry with the global graphite config."
[config :- MetricsConfig
domain :- schema/Keyword]
(let [reporter-config (get-in config [:reporters :graphite])
registry-config (get-in config [:registries domain :reporters :graphite])
merged-config (merge reporter-config registry-config)]
(if (nil? merged-config)
merged-config
(update-in merged-config [:enabled] #(if (nil? %) false %)))))
(schema/defn get-metrics-allowed :- #{schema/Str}
"Get the metrics allowed for the registry. Looks at the metrics-allowed registered for the
registry in the registry settings atom using the `update-registry-settings` function as well
as the metrics-allowed listed in the config file under the `:metrics-allowed` key. Merges these
lists together and then adds the metrics prefix to them, returning a set of prefixed allowed
metrics."
[metrics-config :- MetricsConfig
registry-settings :- {schema/Any DefaultRegistrySettings}
domain :- schema/Keyword]
(let [metric-prefix (get-metric-prefix metrics-config domain)
default-metrics-allowed (get-in registry-settings [domain :default-metrics-allowed])
configured-metrics-allowed (get-in metrics-config [:registries domain :metrics-allowed])
metrics-allowed (concat default-metrics-allowed configured-metrics-allowed)]
(construct-metric-names metric-prefix metrics-allowed)))
(schema/defn maybe-add-default-to-config :- MetricsConfig
"Add a `:default` key with an empty map as the value to the registries config if it is not
present."
[metrics-config :- MetricsConfig]
(update-in metrics-config [:registries :default] #(if (nil? %) {} %)))
(schema/defn initialize-registries-from-config :- {schema/Any RegistryContext}
"Read through the config and create a MetricsRegistry (+ JMX reporter if configured) for every
registry mentioned in it. Also create the default registry if not mentioned in the config. Should
be called from `init` of the metrics-service."
[metrics-config :- MetricsConfig]
(let [registries-config (:registries metrics-config)]
(into {} (map
(fn [x] {x (initialize-registry-context (get registries-config x)
x)})
(keys registries-config)))))
(schema/defn ^:always-validate add-graphite-reporters :- MetricsServiceContext
"Add Graphite reporters to all registries with Graphite enabled in the config, using the
configured settings for each registry. Returns an updated service context. Should be called from
`start` of the metrics-service."
[service-context :- MetricsServiceContext]
(let [config (:metrics-config service-context)
registry-settings @(:registry-settings service-context)]
(doseq [registry @(:registries service-context)]
(let [domain (key registry)
graphite-config (get-graphite-config config domain)
metrics-allowed (get-metrics-allowed config registry-settings domain)
registry-with-graphite-reporter (add-graphite-reporter
(val registry)
graphite-config
metrics-allowed
domain)]
(swap! (:registries service-context) assoc domain registry-with-graphite-reporter))))
service-context)
registry was in the config , then a Graphite reporter could have been configured for it . Any
(schema/defn ^:always-validate get-or-initialize-registry-context :- RegistryContext
"If a registry exists within the service context for a given domain
already, return it.
Otherwise initialize a new registry for that domain and return it.
Modifies the registries atom in the service context to add the new registry"
[{:keys [registries metrics-config]} :- MetricsServiceContext
domain :- schema/Keyword]
(if-let [metric-registry-context (get @registries domain)]
metric-registry-context
(let [registry-config (get-in metrics-config [:registries domain])
new-registry-context (initialize-registry-context registry-config domain)]
(swap! registries assoc domain new-registry-context)
new-registry-context)))
(schema/defn ^:always-validate create-initial-service-context :- MetricsServiceContext
"Create the initial service context for the metrics-service. Initialize all registries in the
config, add them to the `registries` atom, and include that in the service context map, along with
an empty atom for `registry-settings` and the metrics config."
[metrics-config :- MetricsConfig]
(let [config-with-default (maybe-add-default-to-config metrics-config)
registries (initialize-registries-from-config config-with-default)]
{:registries (atom registries)
:can-update-registry-settings? true
:registry-settings (atom {})
:metrics-config config-with-default}))
(schema/defn lock-registry-settings :- MetricsServiceContext
"Switch the `can-update-registry-settings?` boolean to false to show that it is after the `init`
phase and registry settings can no longer be set."
[context :- MetricsServiceContext]
(assoc context :can-update-registry-settings? false))
(schema/defn ^:always-validate update-registry-settings :- {schema/Any DefaultRegistrySettings}
"Update the `registry-settings` atom for the given domain. If called again for the same domain,
the new settings will be merged in, and lists such as :default-metrics-allowed, will be concat'd
together."
[context :- MetricsServiceContext
domain :- schema/Keyword
settings :- DefaultRegistrySettings]
(when (= false (:can-update-registry-settings? context))
(throw (RuntimeException.
"Registry settings must be initialized in the `init` phase of the lifecycle.")))
(let [registry-settings (:registry-settings context)
deep-merge-fn (fn [first second]
(ks/deep-merge-with concat (or first {}) second))]
and the new settings , concating values together if two keys match
(swap! registry-settings update domain deep-merge-fn settings)))
(schema/defn ^:always-validate stop
[context :- RegistryContext]
(if-let [jmx-reporter (:jmx-reporter context)]
(.close jmx-reporter))
(if-let [graphite-reporter (:graphite-reporter context)]
(.close graphite-reporter)))
(schema/defn ^:always-validate stop-all
[service-context :- MetricsServiceContext]
(let [registries (:registries service-context)]
(doseq [[_ metrics-registry] @registries]
(stop metrics-registry))
service-context))
Comidi
(defn build-handler [path]
(comidi/routes->handler
(comidi/wrap-routes
(comidi/context path
(comidi/context "/v1"
(comidi/context "/mbeans"
(comidi/GET "" []
(fn [req]
(ringutils/json-response 200
(metrics-utils/mbean-names))))
(comidi/POST "" []
(fn [req]
(try
(let [metrics (with-open [reader (-> req :body io/reader)]
(doall (json/parse-stream reader true)))]
(cond
(seq? metrics)
(ringutils/json-response
200 (map metrics-utils/get-mbean metrics))
(string? metrics)
(ringutils/json-response
200 (metrics-utils/get-mbean metrics))
(map? metrics)
(ringutils/json-response
200 (ks/mapvals metrics-utils/get-mbean metrics))
:else
(ringutils/json-response
400 (tru "metrics request must be a JSON array, string, or object"))))
(catch JsonParseException e
(ringutils/json-response 400 {:error (str e)})))))
(comidi/GET ["/" [#".*" :names]] []
(fn [{:keys [route-params] :as req}]
(let [name (java.net.URLDecoder/decode (:names route-params))]
(if-let [mbean (metrics-utils/get-mbean name)]
(ringutils/json-response 200 mbean)
(ringutils/json-response 404
(tru "No mbean ''{0}'' found" name)))))))))
(comp i18n/locale-negotiator #(ring-defaults/wrap-defaults % ring-defaults/api-defaults)))))
|
9cc4e7e9d05b2dcdb478179060166c2b0f9c4ce5db2935494562e21777bd8498 | ocaml/ocamlbuild | loc.ml | it 's not worth adding a dependency on parsing / location.ml(i ) or
compilerlibs just to support location printing , so we re - implement
that here
compilerlibs just to support location printing, so we re-implement
that here *)
open Lexing
(* We use a loosely structural type so that this bit of code can be
easily reused by project that would wish it, without introducing
any type-compatibility burden. *)
type source = string (* "file", "environment variable", "command-line option" ... *)
type location = source * position * position
let file loc = loc.pos_fname
let line loc = loc.pos_lnum
let char loc = loc.pos_cnum - loc.pos_bol
let print_loc ppf (source, start, end_) =
let open Format in
let print one_or_two ppf (start_num, end_num) =
if one_or_two then fprintf ppf " %d" start_num
else fprintf ppf "s %d-%d" start_num end_num in
fprintf ppf "%s %S, line%a, character%a:@."
(String.capitalize_ascii source)
(file start)
(print (line start = line end_))
(line start, line end_)
(print (line start = line end_ && char start = char end_))
(char start, char end_)
let of_lexbuf source lexbuf =
(source, lexbuf.lex_start_p, lexbuf.lex_curr_p)
let print_loc_option ppf = function
| None -> ()
| Some loc -> print_loc ppf loc
| null | https://raw.githubusercontent.com/ocaml/ocamlbuild/792b7c8abdbc712c98ed7e69469ed354b87e125b/src/loc.ml | ocaml | We use a loosely structural type so that this bit of code can be
easily reused by project that would wish it, without introducing
any type-compatibility burden.
"file", "environment variable", "command-line option" ... | it 's not worth adding a dependency on parsing / location.ml(i ) or
compilerlibs just to support location printing , so we re - implement
that here
compilerlibs just to support location printing, so we re-implement
that here *)
open Lexing
type location = source * position * position
let file loc = loc.pos_fname
let line loc = loc.pos_lnum
let char loc = loc.pos_cnum - loc.pos_bol
let print_loc ppf (source, start, end_) =
let open Format in
let print one_or_two ppf (start_num, end_num) =
if one_or_two then fprintf ppf " %d" start_num
else fprintf ppf "s %d-%d" start_num end_num in
fprintf ppf "%s %S, line%a, character%a:@."
(String.capitalize_ascii source)
(file start)
(print (line start = line end_))
(line start, line end_)
(print (line start = line end_ && char start = char end_))
(char start, char end_)
let of_lexbuf source lexbuf =
(source, lexbuf.lex_start_p, lexbuf.lex_curr_p)
let print_loc_option ppf = function
| None -> ()
| Some loc -> print_loc ppf loc
|
50f5c5b466ee9decc04ef39aeced70a22170da5f4c1425f1072c53fd275eec18 | mfoemmel/erlang-otp | http_cookie.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2004 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
Description : Cookie handling according to RFC 2109
-module(http_cookie).
-include("httpc_internal.hrl").
-export([header/4, cookies/3, open_cookie_db/1, close_cookie_db/1, insert/2]).
%%%=========================================================================
%%% API
%%%=========================================================================
header(Scheme, {Host, _}, Path, CookieDb) ->
case lookup_cookies(Host, Path, CookieDb) of
[] ->
{"cookie", ""};
Cookies ->
{"cookie", cookies_to_string(Scheme, Cookies)}
end.
cookies(Headers, RequestPath, RequestHost) ->
Cookies = parse_set_cookies(Headers, {RequestPath, RequestHost}),
accept_cookies(Cookies, RequestPath, RequestHost).
open_cookie_db({{_, only_session_cookies}, SessionDbName}) ->
EtsDb = ets:new(SessionDbName, [protected, bag,
{keypos, #http_cookie.domain}]),
{undefined, EtsDb};
open_cookie_db({{DbName, Dbdir}, SessionDbName}) ->
File = filename:join(Dbdir, atom_to_list(DbName)),
{ok, DetsDb} = dets:open_file(DbName, [{keypos, #http_cookie.domain},
{type, bag},
{file, File},
{ram_file, true}]),
EtsDb = ets:new(SessionDbName, [protected, bag,
{keypos, #http_cookie.domain}]),
{DetsDb, EtsDb}.
close_cookie_db({undefined, EtsDb}) ->
ets:delete(EtsDb);
close_cookie_db({DetsDb, EtsDb}) ->
dets:close(DetsDb),
ets:delete(EtsDb).
%% If no persistent cookie database is defined we
%% treat all cookies as if they where session cookies.
insert(Cookie = #http_cookie{max_age = Int},
Dbs = {undefined, _}) when is_integer(Int) ->
insert(Cookie#http_cookie{max_age = session}, Dbs);
insert(Cookie = #http_cookie{domain = Key, name = Name,
path = Path, max_age = session},
Db = {_, CookieDb}) ->
case ets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
ets:insert(CookieDb, Cookie);
[NewCookie] ->
delete(NewCookie, Db),
ets:insert(CookieDb, Cookie)
end,
ok;
insert(#http_cookie{domain = Key, name = Name,
path = Path, max_age = 0},
Db = {CookieDb, _}) ->
case dets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
ok;
[NewCookie] ->
delete(NewCookie, Db)
end,
ok;
insert(Cookie = #http_cookie{domain = Key, name = Name, path = Path},
Db = {CookieDb, _}) ->
case dets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
dets:insert(CookieDb, Cookie);
[NewCookie] ->
delete(NewCookie, Db),
dets:insert(CookieDb, Cookie)
end,
ok.
%%%========================================================================
Internal functions
%%%========================================================================
lookup_cookies(Key, {undefined, Ets}) ->
ets:match_object(Ets, #http_cookie{domain = Key,
_ = '_'});
lookup_cookies(Key, {Dets,Ets}) ->
SessionCookies = ets:match_object(Ets, #http_cookie{domain = Key,
_ = '_'}),
Cookies = dets:match_object(Dets, #http_cookie{domain = Key,
_ = '_'}),
Cookies ++ SessionCookies.
delete(Cookie = #http_cookie{max_age = session}, {_, CookieDb}) ->
ets:delete_object(CookieDb, Cookie);
delete(Cookie, {CookieDb, _}) ->
dets:delete_object(CookieDb, Cookie).
lookup_cookies(Host, Path, Db) ->
Cookies =
case http_util:is_hostname(Host) of
true ->
HostCookies = lookup_cookies(Host, Db),
[_| DomainParts] = string:tokens(Host, "."),
lookup_domain_cookies(DomainParts, Db, HostCookies);
false -> % IP-adress
lookup_cookies(Host, Db)
end,
ValidCookies = valid_cookies(Cookies, [], Db),
lists:filter(fun(Cookie) ->
lists:prefix(Cookie#http_cookie.path, Path)
end, ValidCookies).
%% For instance if Host=localhost
lookup_domain_cookies([], _, AccCookies) ->
lists:flatten(AccCookies);
%% Top domains can not have cookies
lookup_domain_cookies([_], _, AccCookies) ->
lists:flatten(AccCookies);
lookup_domain_cookies([Next | DomainParts], CookieDb, AccCookies) ->
Domain = merge_domain_parts(DomainParts, [Next ++ "."]),
lookup_domain_cookies(DomainParts, CookieDb,
[lookup_cookies(Domain, CookieDb)
| AccCookies]).
merge_domain_parts([Part], Merged) ->
lists:flatten(["." | lists:reverse([Part | Merged])]);
merge_domain_parts([Part| Rest], Merged) ->
merge_domain_parts(Rest, [".", Part | Merged]).
cookies_to_string(Scheme, Cookies = [Cookie | _]) ->
Version = "$Version=" ++ Cookie#http_cookie.version ++ "; ",
cookies_to_string(Scheme, path_sort(Cookies), [Version]).
cookies_to_string(_, [], CookieStrs) ->
case length(CookieStrs) of
1 ->
"";
_ ->
lists:flatten(lists:reverse(CookieStrs))
end;
cookies_to_string(https, [Cookie = #http_cookie{secure = true}| Cookies],
CookieStrs) ->
Str = case Cookies of
[] ->
cookie_to_string(Cookie);
_ ->
cookie_to_string(Cookie) ++ "; "
end,
cookies_to_string(https, Cookies, [Str | CookieStrs]);
cookies_to_string(Scheme, [#http_cookie{secure = true}| Cookies],
CookieStrs) ->
cookies_to_string(Scheme, Cookies, CookieStrs);
cookies_to_string(Scheme, [Cookie | Cookies], CookieStrs) ->
Str = case Cookies of
[] ->
cookie_to_string(Cookie);
_ ->
cookie_to_string(Cookie) ++ "; "
end,
cookies_to_string(Scheme, Cookies, [Str | CookieStrs]).
cookie_to_string(Cookie = #http_cookie{name = Name, value = Value}) ->
Str = Name ++ "=" ++ Value,
add_domain(add_path(Str, Cookie), Cookie).
add_path(Str, #http_cookie{path_default = true}) ->
Str;
add_path(Str, #http_cookie{path = Path}) ->
Str ++ "; $Path=" ++ Path.
add_domain(Str, #http_cookie{domain_default = true}) ->
Str;
add_domain(Str, #http_cookie{domain = Domain}) ->
Str ++ "; $Domain=" ++ Domain.
parse_set_cookies(OtherHeaders, DefaultPathDomain) ->
SetCookieHeaders = lists:foldl(fun({"set-cookie", Value}, Acc) ->
[string:tokens(Value, ",")| Acc];
(_, Acc) ->
Acc
end, [], OtherHeaders),
lists:flatten(lists:map(fun(CookieHeader) ->
NewHeader =
fix_netscape_cookie(CookieHeader,
[]),
parse_set_cookie(NewHeader, [],
DefaultPathDomain) end,
SetCookieHeaders)).
parse_set_cookie([], AccCookies, _) ->
AccCookies;
parse_set_cookie([CookieHeader | CookieHeaders], AccCookies,
Defaults = {DefaultPath, DefaultDomain}) ->
[CookieStr | Attributes] = case string:tokens(CookieHeader, ";") of
[CStr] ->
[CStr, ""];
[CStr | Attr] ->
[CStr, Attr]
end,
Pos = string:chr(CookieStr, $=),
Name = string:substr(CookieStr, 1, Pos - 1),
Value = string:substr(CookieStr, Pos + 1),
Cookie = #http_cookie{name = string:strip(Name),
value = string:strip(Value)},
NewAttributes = parse_set_cookie_attributes(Attributes),
TmpCookie = cookie_attributes(NewAttributes, Cookie),
%% Add runtime defult values if necessary
NewCookie = domain_default(path_default(TmpCookie, DefaultPath),
DefaultDomain),
parse_set_cookie(CookieHeaders, [NewCookie | AccCookies], Defaults).
parse_set_cookie_attributes([]) ->
[];
parse_set_cookie_attributes([Attributes]) ->
lists:map(fun(Attr) ->
[AttrName, AttrValue] =
case string:tokens(Attr, "=") of
%% All attributes have the form
%% Name=Value except "secure"!
[Name] ->
[Name, ""];
[Name, Value] ->
[Name, Value];
%% Anything not expected will be
%% disregarded
_ ->
["Dummy",""]
end,
{http_util:to_lower(string:strip(AttrName)),
string:strip(AttrValue)}
end, Attributes).
cookie_attributes([], Cookie) ->
Cookie;
cookie_attributes([{"comment", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{comment = Value});
cookie_attributes([{"domain", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{domain = Value});
cookie_attributes([{"max-age", Value}| Attributes], Cookie) ->
ExpireTime = cookie_expires(list_to_integer(Value)),
cookie_attributes(Attributes,
Cookie#http_cookie{max_age = ExpireTime});
%% Backwards compatibility with netscape cookies
cookie_attributes([{"expires", Value}| Attributes], Cookie) ->
Time = http_util:convert_netscapecookie_date(Value),
ExpireTime = calendar:datetime_to_gregorian_seconds(Time),
cookie_attributes(Attributes,
Cookie#http_cookie{max_age = ExpireTime});
cookie_attributes([{"path", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{path = Value});
cookie_attributes([{"secure", _}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{secure = true});
cookie_attributes([{"version", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{version = Value});
%% Disregard unknown attributes.
cookie_attributes([_| Attributes], Cookie) ->
cookie_attributes(Attributes, Cookie).
domain_default(Cookie = #http_cookie{domain = undefined},
DefaultDomain) ->
Cookie#http_cookie{domain = DefaultDomain, domain_default = true};
domain_default(Cookie, _) ->
Cookie.
path_default(Cookie = #http_cookie{path = undefined},
DefaultPath) ->
Cookie#http_cookie{path = skip_right_most_slash(DefaultPath),
path_default = true};
path_default(Cookie, _) ->
Cookie.
%% Note: if the path is only / that / will be keept
skip_right_most_slash("/") ->
"/";
skip_right_most_slash(Str) ->
string:strip(Str, right, $/).
accept_cookies(Cookies, RequestPath, RequestHost) ->
lists:filter(fun(Cookie) ->
accept_cookie(Cookie, RequestPath, RequestHost)
end, Cookies).
accept_cookie(Cookie, RequestPath, RequestHost) ->
accept_path(Cookie, RequestPath) and accept_domain(Cookie, RequestHost).
accept_path(#http_cookie{path = Path}, RequestPath) ->
lists:prefix(Path, RequestPath).
accept_domain(#http_cookie{domain = RequestHost}, RequestHost) ->
true;
accept_domain(#http_cookie{domain = Domain}, RequestHost) ->
HostCheck = case http_util:is_hostname(RequestHost) of
true ->
(lists:suffix(Domain, RequestHost) andalso
(not
lists:member($.,
string:substr(RequestHost, 1,
(length(RequestHost) -
length(Domain))))));
false ->
false
end,
HostCheck andalso (hd(Domain) == $.)
andalso (length(string:tokens(Domain, ".")) > 1).
cookie_expires(0) ->
0;
cookie_expires(DeltaSec) ->
NowSec = calendar:datetime_to_gregorian_seconds({date(), time()}),
NowSec + DeltaSec.
is_cookie_expired(#http_cookie{max_age = session}) ->
false;
is_cookie_expired(#http_cookie{max_age = ExpireTime}) ->
NowSec = calendar:datetime_to_gregorian_seconds({date(), time()}),
ExpireTime - NowSec =< 0.
valid_cookies([], Valid, _) ->
Valid;
valid_cookies([Cookie | Cookies], Valid, Db) ->
case is_cookie_expired(Cookie) of
true ->
delete(Cookie, Db),
valid_cookies(Cookies, Valid, Db);
false ->
valid_cookies(Cookies, [Cookie | Valid], Db)
end.
path_sort(Cookies)->
lists:reverse(lists:keysort(#http_cookie.path, Cookies)).
Informally , the Set - Cookie response header comprises the token
Set - Cookie : , followed by a comma - separated list of one or more
%% cookies. Netscape cookies expires attribute may also have a
%% , in this case the header list will have been incorrectly split
%% in parse_set_cookies/2 this functions fixs that problem.
fix_netscape_cookie([Cookie1, Cookie2 | Rest], Acc) ->
case inets_regexp:match(Cookie1, "expires=") of
{_, _, _} ->
fix_netscape_cookie(Rest, [Cookie1 ++ Cookie2 | Acc]);
nomatch ->
fix_netscape_cookie([Cookie2 |Rest], [Cookie1| Acc])
end;
fix_netscape_cookie([Cookie | Rest], Acc) ->
fix_netscape_cookie(Rest, [Cookie | Acc]);
fix_netscape_cookie([], Acc) ->
Acc.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/inets/src/http_client/http_cookie.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
=========================================================================
API
=========================================================================
If no persistent cookie database is defined we
treat all cookies as if they where session cookies.
========================================================================
========================================================================
IP-adress
For instance if Host=localhost
Top domains can not have cookies
Add runtime defult values if necessary
All attributes have the form
Name=Value except "secure"!
Anything not expected will be
disregarded
Backwards compatibility with netscape cookies
Disregard unknown attributes.
Note: if the path is only / that / will be keept
cookies. Netscape cookies expires attribute may also have a
, in this case the header list will have been incorrectly split
in parse_set_cookies/2 this functions fixs that problem. | Copyright Ericsson AB 2004 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
Description : Cookie handling according to RFC 2109
-module(http_cookie).
-include("httpc_internal.hrl").
-export([header/4, cookies/3, open_cookie_db/1, close_cookie_db/1, insert/2]).
header(Scheme, {Host, _}, Path, CookieDb) ->
case lookup_cookies(Host, Path, CookieDb) of
[] ->
{"cookie", ""};
Cookies ->
{"cookie", cookies_to_string(Scheme, Cookies)}
end.
cookies(Headers, RequestPath, RequestHost) ->
Cookies = parse_set_cookies(Headers, {RequestPath, RequestHost}),
accept_cookies(Cookies, RequestPath, RequestHost).
open_cookie_db({{_, only_session_cookies}, SessionDbName}) ->
EtsDb = ets:new(SessionDbName, [protected, bag,
{keypos, #http_cookie.domain}]),
{undefined, EtsDb};
open_cookie_db({{DbName, Dbdir}, SessionDbName}) ->
File = filename:join(Dbdir, atom_to_list(DbName)),
{ok, DetsDb} = dets:open_file(DbName, [{keypos, #http_cookie.domain},
{type, bag},
{file, File},
{ram_file, true}]),
EtsDb = ets:new(SessionDbName, [protected, bag,
{keypos, #http_cookie.domain}]),
{DetsDb, EtsDb}.
close_cookie_db({undefined, EtsDb}) ->
ets:delete(EtsDb);
close_cookie_db({DetsDb, EtsDb}) ->
dets:close(DetsDb),
ets:delete(EtsDb).
insert(Cookie = #http_cookie{max_age = Int},
Dbs = {undefined, _}) when is_integer(Int) ->
insert(Cookie#http_cookie{max_age = session}, Dbs);
insert(Cookie = #http_cookie{domain = Key, name = Name,
path = Path, max_age = session},
Db = {_, CookieDb}) ->
case ets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
ets:insert(CookieDb, Cookie);
[NewCookie] ->
delete(NewCookie, Db),
ets:insert(CookieDb, Cookie)
end,
ok;
insert(#http_cookie{domain = Key, name = Name,
path = Path, max_age = 0},
Db = {CookieDb, _}) ->
case dets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
ok;
[NewCookie] ->
delete(NewCookie, Db)
end,
ok;
insert(Cookie = #http_cookie{domain = Key, name = Name, path = Path},
Db = {CookieDb, _}) ->
case dets:match_object(CookieDb, #http_cookie{domain = Key,
name = Name,
path = Path,
_ = '_'}) of
[] ->
dets:insert(CookieDb, Cookie);
[NewCookie] ->
delete(NewCookie, Db),
dets:insert(CookieDb, Cookie)
end,
ok.
Internal functions
lookup_cookies(Key, {undefined, Ets}) ->
ets:match_object(Ets, #http_cookie{domain = Key,
_ = '_'});
lookup_cookies(Key, {Dets,Ets}) ->
SessionCookies = ets:match_object(Ets, #http_cookie{domain = Key,
_ = '_'}),
Cookies = dets:match_object(Dets, #http_cookie{domain = Key,
_ = '_'}),
Cookies ++ SessionCookies.
delete(Cookie = #http_cookie{max_age = session}, {_, CookieDb}) ->
ets:delete_object(CookieDb, Cookie);
delete(Cookie, {CookieDb, _}) ->
dets:delete_object(CookieDb, Cookie).
lookup_cookies(Host, Path, Db) ->
Cookies =
case http_util:is_hostname(Host) of
true ->
HostCookies = lookup_cookies(Host, Db),
[_| DomainParts] = string:tokens(Host, "."),
lookup_domain_cookies(DomainParts, Db, HostCookies);
lookup_cookies(Host, Db)
end,
ValidCookies = valid_cookies(Cookies, [], Db),
lists:filter(fun(Cookie) ->
lists:prefix(Cookie#http_cookie.path, Path)
end, ValidCookies).
lookup_domain_cookies([], _, AccCookies) ->
lists:flatten(AccCookies);
lookup_domain_cookies([_], _, AccCookies) ->
lists:flatten(AccCookies);
lookup_domain_cookies([Next | DomainParts], CookieDb, AccCookies) ->
Domain = merge_domain_parts(DomainParts, [Next ++ "."]),
lookup_domain_cookies(DomainParts, CookieDb,
[lookup_cookies(Domain, CookieDb)
| AccCookies]).
merge_domain_parts([Part], Merged) ->
lists:flatten(["." | lists:reverse([Part | Merged])]);
merge_domain_parts([Part| Rest], Merged) ->
merge_domain_parts(Rest, [".", Part | Merged]).
cookies_to_string(Scheme, Cookies = [Cookie | _]) ->
Version = "$Version=" ++ Cookie#http_cookie.version ++ "; ",
cookies_to_string(Scheme, path_sort(Cookies), [Version]).
cookies_to_string(_, [], CookieStrs) ->
case length(CookieStrs) of
1 ->
"";
_ ->
lists:flatten(lists:reverse(CookieStrs))
end;
cookies_to_string(https, [Cookie = #http_cookie{secure = true}| Cookies],
CookieStrs) ->
Str = case Cookies of
[] ->
cookie_to_string(Cookie);
_ ->
cookie_to_string(Cookie) ++ "; "
end,
cookies_to_string(https, Cookies, [Str | CookieStrs]);
cookies_to_string(Scheme, [#http_cookie{secure = true}| Cookies],
CookieStrs) ->
cookies_to_string(Scheme, Cookies, CookieStrs);
cookies_to_string(Scheme, [Cookie | Cookies], CookieStrs) ->
Str = case Cookies of
[] ->
cookie_to_string(Cookie);
_ ->
cookie_to_string(Cookie) ++ "; "
end,
cookies_to_string(Scheme, Cookies, [Str | CookieStrs]).
cookie_to_string(Cookie = #http_cookie{name = Name, value = Value}) ->
Str = Name ++ "=" ++ Value,
add_domain(add_path(Str, Cookie), Cookie).
add_path(Str, #http_cookie{path_default = true}) ->
Str;
add_path(Str, #http_cookie{path = Path}) ->
Str ++ "; $Path=" ++ Path.
add_domain(Str, #http_cookie{domain_default = true}) ->
Str;
add_domain(Str, #http_cookie{domain = Domain}) ->
Str ++ "; $Domain=" ++ Domain.
parse_set_cookies(OtherHeaders, DefaultPathDomain) ->
SetCookieHeaders = lists:foldl(fun({"set-cookie", Value}, Acc) ->
[string:tokens(Value, ",")| Acc];
(_, Acc) ->
Acc
end, [], OtherHeaders),
lists:flatten(lists:map(fun(CookieHeader) ->
NewHeader =
fix_netscape_cookie(CookieHeader,
[]),
parse_set_cookie(NewHeader, [],
DefaultPathDomain) end,
SetCookieHeaders)).
parse_set_cookie([], AccCookies, _) ->
AccCookies;
parse_set_cookie([CookieHeader | CookieHeaders], AccCookies,
Defaults = {DefaultPath, DefaultDomain}) ->
[CookieStr | Attributes] = case string:tokens(CookieHeader, ";") of
[CStr] ->
[CStr, ""];
[CStr | Attr] ->
[CStr, Attr]
end,
Pos = string:chr(CookieStr, $=),
Name = string:substr(CookieStr, 1, Pos - 1),
Value = string:substr(CookieStr, Pos + 1),
Cookie = #http_cookie{name = string:strip(Name),
value = string:strip(Value)},
NewAttributes = parse_set_cookie_attributes(Attributes),
TmpCookie = cookie_attributes(NewAttributes, Cookie),
NewCookie = domain_default(path_default(TmpCookie, DefaultPath),
DefaultDomain),
parse_set_cookie(CookieHeaders, [NewCookie | AccCookies], Defaults).
parse_set_cookie_attributes([]) ->
[];
parse_set_cookie_attributes([Attributes]) ->
lists:map(fun(Attr) ->
[AttrName, AttrValue] =
case string:tokens(Attr, "=") of
[Name] ->
[Name, ""];
[Name, Value] ->
[Name, Value];
_ ->
["Dummy",""]
end,
{http_util:to_lower(string:strip(AttrName)),
string:strip(AttrValue)}
end, Attributes).
cookie_attributes([], Cookie) ->
Cookie;
cookie_attributes([{"comment", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{comment = Value});
cookie_attributes([{"domain", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{domain = Value});
cookie_attributes([{"max-age", Value}| Attributes], Cookie) ->
ExpireTime = cookie_expires(list_to_integer(Value)),
cookie_attributes(Attributes,
Cookie#http_cookie{max_age = ExpireTime});
cookie_attributes([{"expires", Value}| Attributes], Cookie) ->
Time = http_util:convert_netscapecookie_date(Value),
ExpireTime = calendar:datetime_to_gregorian_seconds(Time),
cookie_attributes(Attributes,
Cookie#http_cookie{max_age = ExpireTime});
cookie_attributes([{"path", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{path = Value});
cookie_attributes([{"secure", _}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{secure = true});
cookie_attributes([{"version", Value}| Attributes], Cookie) ->
cookie_attributes(Attributes,
Cookie#http_cookie{version = Value});
cookie_attributes([_| Attributes], Cookie) ->
cookie_attributes(Attributes, Cookie).
domain_default(Cookie = #http_cookie{domain = undefined},
DefaultDomain) ->
Cookie#http_cookie{domain = DefaultDomain, domain_default = true};
domain_default(Cookie, _) ->
Cookie.
path_default(Cookie = #http_cookie{path = undefined},
DefaultPath) ->
Cookie#http_cookie{path = skip_right_most_slash(DefaultPath),
path_default = true};
path_default(Cookie, _) ->
Cookie.
skip_right_most_slash("/") ->
"/";
skip_right_most_slash(Str) ->
string:strip(Str, right, $/).
accept_cookies(Cookies, RequestPath, RequestHost) ->
lists:filter(fun(Cookie) ->
accept_cookie(Cookie, RequestPath, RequestHost)
end, Cookies).
accept_cookie(Cookie, RequestPath, RequestHost) ->
accept_path(Cookie, RequestPath) and accept_domain(Cookie, RequestHost).
accept_path(#http_cookie{path = Path}, RequestPath) ->
lists:prefix(Path, RequestPath).
accept_domain(#http_cookie{domain = RequestHost}, RequestHost) ->
true;
accept_domain(#http_cookie{domain = Domain}, RequestHost) ->
HostCheck = case http_util:is_hostname(RequestHost) of
true ->
(lists:suffix(Domain, RequestHost) andalso
(not
lists:member($.,
string:substr(RequestHost, 1,
(length(RequestHost) -
length(Domain))))));
false ->
false
end,
HostCheck andalso (hd(Domain) == $.)
andalso (length(string:tokens(Domain, ".")) > 1).
cookie_expires(0) ->
0;
cookie_expires(DeltaSec) ->
NowSec = calendar:datetime_to_gregorian_seconds({date(), time()}),
NowSec + DeltaSec.
is_cookie_expired(#http_cookie{max_age = session}) ->
false;
is_cookie_expired(#http_cookie{max_age = ExpireTime}) ->
NowSec = calendar:datetime_to_gregorian_seconds({date(), time()}),
ExpireTime - NowSec =< 0.
valid_cookies([], Valid, _) ->
Valid;
valid_cookies([Cookie | Cookies], Valid, Db) ->
case is_cookie_expired(Cookie) of
true ->
delete(Cookie, Db),
valid_cookies(Cookies, Valid, Db);
false ->
valid_cookies(Cookies, [Cookie | Valid], Db)
end.
path_sort(Cookies)->
lists:reverse(lists:keysort(#http_cookie.path, Cookies)).
Informally , the Set - Cookie response header comprises the token
Set - Cookie : , followed by a comma - separated list of one or more
fix_netscape_cookie([Cookie1, Cookie2 | Rest], Acc) ->
case inets_regexp:match(Cookie1, "expires=") of
{_, _, _} ->
fix_netscape_cookie(Rest, [Cookie1 ++ Cookie2 | Acc]);
nomatch ->
fix_netscape_cookie([Cookie2 |Rest], [Cookie1| Acc])
end;
fix_netscape_cookie([Cookie | Rest], Acc) ->
fix_netscape_cookie(Rest, [Cookie | Acc]);
fix_netscape_cookie([], Acc) ->
Acc.
|
b8bc82a2dbf330900b4ecf2665ece1c1980cc74e0819f47a0f86183cd8eeabdb | ladderlife/autochrome | annotation.clj | (ns autochrome.annotation
(:require [autochrome.common :as clj-common]
[autochrome.scope :as scope]
[autochrome.tree :as tree]
[autochrome.xref :as xref])
(:import [java.util IdentityHashMap]))
(defn attach
[{:keys [type text delim wscontents] :as form} ann]
(let [a (.get ann form)
rec (cond
(or (= type :coll)
(= type :reader-conditional)
(= type :reader-conditional-splicing)
(clj-common/decoration? form))
(assoc form :wscontents (mapv #(attach % ann) wscontents))
(= type :quote)
(assoc form :val (list (attach (first (:val form)) ann)))
(= type :lambda)
(assoc form :text (attach text ann))
:else form)]
(cond-> rec
a (assoc :annotation a))))
(defn annotated?
[form ann]
(or
(.get ann form)
(when-let [children (tree/->children form)]
(loop [[c & cs] children]
(when c
(if (annotated? c ann)
true
(recur cs)))))))
(defn syntax-highlighting
[form]
(let [ann (IdentityHashMap.)]
(scope/execute-writer
(scope/walk-with-scope
form
(fn [c f]
(if-let [sym (scope/form->real-symbol c f)]
(.put ann f
(let [locally-bound (get (:scope c) sym)
qual (scope/qualify-symbol c f)]
(cond
(get (:scope c) sym)
:local
(and (nil? qual) (xref/javadoc-link (:text f)))
:java-class
(and (symbol? qual) (= "clojure.core" (namespace qual)))
:core
:else sym)))
(when (some-> f :text (.startsWith "."))
(.put ann f :java-class))))
scope/default-context))
ann))
| null | https://raw.githubusercontent.com/ladderlife/autochrome/d454e4450658b4799bfd939f2a84690921800806/src/autochrome/annotation.clj | clojure | (ns autochrome.annotation
(:require [autochrome.common :as clj-common]
[autochrome.scope :as scope]
[autochrome.tree :as tree]
[autochrome.xref :as xref])
(:import [java.util IdentityHashMap]))
(defn attach
[{:keys [type text delim wscontents] :as form} ann]
(let [a (.get ann form)
rec (cond
(or (= type :coll)
(= type :reader-conditional)
(= type :reader-conditional-splicing)
(clj-common/decoration? form))
(assoc form :wscontents (mapv #(attach % ann) wscontents))
(= type :quote)
(assoc form :val (list (attach (first (:val form)) ann)))
(= type :lambda)
(assoc form :text (attach text ann))
:else form)]
(cond-> rec
a (assoc :annotation a))))
(defn annotated?
[form ann]
(or
(.get ann form)
(when-let [children (tree/->children form)]
(loop [[c & cs] children]
(when c
(if (annotated? c ann)
true
(recur cs)))))))
(defn syntax-highlighting
[form]
(let [ann (IdentityHashMap.)]
(scope/execute-writer
(scope/walk-with-scope
form
(fn [c f]
(if-let [sym (scope/form->real-symbol c f)]
(.put ann f
(let [locally-bound (get (:scope c) sym)
qual (scope/qualify-symbol c f)]
(cond
(get (:scope c) sym)
:local
(and (nil? qual) (xref/javadoc-link (:text f)))
:java-class
(and (symbol? qual) (= "clojure.core" (namespace qual)))
:core
:else sym)))
(when (some-> f :text (.startsWith "."))
(.put ann f :java-class))))
scope/default-context))
ann))
|
|
ff88554ae5b3bf825b920a93d99e862b1a2f3b9c00e11f97b6ff025e7c06013e | LexiFi/menhir | keyword.ml | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
(* This module provides some type and function definitions
that help deal with the keywords that we recognize within
semantic actions. *)
(* ------------------------------------------------------------------------- *)
(* Types. *)
(* The user can request position information either at type
[int] (a simple offset) or at type [Lexing.position]. *)
type flavor =
| FlavorOffset
| FlavorPosition
| FlavorLocation
The user can request position information about the $ start or $ end
of a symbol . Also , $ symbolstart requests the computation of the
start position of the first nonempty element in a production .
of a symbol. Also, $symbolstart requests the computation of the
start position of the first nonempty element in a production. *)
type where =
| WhereSymbolStart
| WhereStart
| WhereEnd
The user can request position information about a production 's
left - hand side or about one of the symbols in its right - hand
side , which he can refer to by position or by name .
left-hand side or about one of the symbols in its right-hand
side, which he can refer to by position or by name. *)
type subject =
| Before
| Left
| RightNamed of string
(* Keywords inside semantic actions. They allow access to semantic
values or to position information. *)
type keyword =
| Position of subject * where * flavor
| SyntaxError
(* ------------------------------------------------------------------------- *)
(* These auxiliary functions help map a [Position] keyword to the
name of the variable that the keyword is replaced with. *)
let where = function
| WhereSymbolStart ->
"symbolstart"
| WhereStart ->
"start"
| WhereEnd ->
"end"
let subject = function
| Before ->
"__0_"
| Left ->
""
| RightNamed id ->
Printf.sprintf "_%s_" id
let flavor = function
| FlavorPosition ->
"pos"
| FlavorOffset ->
"ofs"
| FlavorLocation ->
"loc"
let posvar s w f =
match w, f with
| _, (FlavorOffset | FlavorPosition) ->
Printf.sprintf "_%s%s%s" (where w) (flavor f) (subject s)
| WhereSymbolStart, FlavorLocation ->
"_sloc"
| WhereStart, FlavorLocation ->
Printf.sprintf "_loc%s" (subject s)
| _ ->
assert false
(* ------------------------------------------------------------------------- *)
(* Sets of keywords. *)
module KeywordSet = struct
include Set.Make (struct
type t = keyword
let compare = compare
end)
let map f keywords =
fold (fun keyword accu ->
add (f keyword) accu
) keywords empty
end
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/sdk/keyword.ml | ocaml | ****************************************************************************
file LICENSE.
****************************************************************************
This module provides some type and function definitions
that help deal with the keywords that we recognize within
semantic actions.
-------------------------------------------------------------------------
Types.
The user can request position information either at type
[int] (a simple offset) or at type [Lexing.position].
Keywords inside semantic actions. They allow access to semantic
values or to position information.
-------------------------------------------------------------------------
These auxiliary functions help map a [Position] keyword to the
name of the variable that the keyword is replaced with.
-------------------------------------------------------------------------
Sets of keywords. |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
type flavor =
| FlavorOffset
| FlavorPosition
| FlavorLocation
The user can request position information about the $ start or $ end
of a symbol . Also , $ symbolstart requests the computation of the
start position of the first nonempty element in a production .
of a symbol. Also, $symbolstart requests the computation of the
start position of the first nonempty element in a production. *)
type where =
| WhereSymbolStart
| WhereStart
| WhereEnd
The user can request position information about a production 's
left - hand side or about one of the symbols in its right - hand
side , which he can refer to by position or by name .
left-hand side or about one of the symbols in its right-hand
side, which he can refer to by position or by name. *)
type subject =
| Before
| Left
| RightNamed of string
type keyword =
| Position of subject * where * flavor
| SyntaxError
let where = function
| WhereSymbolStart ->
"symbolstart"
| WhereStart ->
"start"
| WhereEnd ->
"end"
let subject = function
| Before ->
"__0_"
| Left ->
""
| RightNamed id ->
Printf.sprintf "_%s_" id
let flavor = function
| FlavorPosition ->
"pos"
| FlavorOffset ->
"ofs"
| FlavorLocation ->
"loc"
let posvar s w f =
match w, f with
| _, (FlavorOffset | FlavorPosition) ->
Printf.sprintf "_%s%s%s" (where w) (flavor f) (subject s)
| WhereSymbolStart, FlavorLocation ->
"_sloc"
| WhereStart, FlavorLocation ->
Printf.sprintf "_loc%s" (subject s)
| _ ->
assert false
module KeywordSet = struct
include Set.Make (struct
type t = keyword
let compare = compare
end)
let map f keywords =
fold (fun keyword accu ->
add (f keyword) accu
) keywords empty
end
|
27585bcabce94ca63f43a3bdf70069840e7229d73161c9897771641675c2c694 | syntheorem/thexa | Core.hs | -- | The primitive interface to the lexer.
--
Compared to the main " Thexa " module , this module presents a less featureful , but more flexible ,
-- interface for running a lexer. It does not manage any lexer state for you, and instead just
-- provides the 'nextMatch' function to find a single match at a time. If the features provided by
" Thexa " are not suitable for your use case , you can instead build on top of " Thexa . Core " .
module Thexa.Core
( module Thexa.Rule
* construction
, Lexer
, makeLexer
-- * Running the lexer
, GetNextByte
, EvalCondition
, MatchResult(..)
, nextMatch
) where
import Data.Set (Set)
import Data.Set qualified as Set
import Data.Vector (Vector)
import Data.Vector qualified as V
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax.Compat qualified as TH
import Thexa.Internal.DFA (MatchKey)
import Thexa.Internal.DFA qualified as DFA
import Thexa.Internal.IntLike.Set qualified as ILSet
import Thexa.Internal.Regex.Compiler
import Thexa.Position (GetNextByte)
import Thexa.Rule
type DFA = DFA.DFA DFA.Dense32
-- | A precompiled lexer, parameterized on the types of its rules' modes, conditions, and actions.
data Lexer mode cond act = Lexer
{-# UNPACK #-} !(Vector DFA)
-- ^ The DFAs used to match input to the lexer, one for each mode.
{-# UNPACK #-} !(Vector (MatchInfo cond act))
-- ^ Array of info on how we should handle a match for each rule. The DFAs are constructed so that
the ' MatchKey ' for each regex is an index into this array .
-- Specify that the mode is nominal since it would otherwise be phantom
type role Lexer nominal representational representational
data MatchInfo cond act = MatchInfo
{ matchAction :: Maybe act
-- ^ Action to run on match, or 'Nothing' to skip matched input.
, matchFollowedBy :: Maybe DFA
^ Optional DFA that should match the input following this match .
, matchNotFollowedBy :: Maybe DFA
^ Optional DFA that should not match the input following this match .
, matchConditions :: [cond]
-- ^ List of conditions which must be satisfied in order for the rule to match. Conditions are
resolved to a ' Bool ' by a user - supplied ' EvalCondition ' function .
}
deriving (Generic, NFData)
instance (NFData cond, NFData act) => NFData (Lexer mode cond act) where
rnf (Lexer dfas matchVec) = rnf dfas `seq` rnf matchVec
-- | Construct a lexer at compile-time from the list of rules it should match.
--
-- The order of the rules in the list is important; the resulting lexer will always prefer the
-- longest match, but in the case that multiple rules match the same length of input, the rule that
-- appears earliest in the list will be chosen.
makeLexer :: forall mode cond act. LexerMode mode => [Rule mode cond act] -> SpliceQ (Lexer mode cond act)
makeLexer rules
| modesAreValid = [|| Lexer dfas (V.fromListN matchListLen $$matchList) ||]
| otherwise = TH.liftSplice $ fail "invalid Enum instance for LexerMode"
where
dfas = V.fromList $ map (DFA.fromNFA . compileRegexes) regexesByMode
-- Extract the modes, regex, and rule index from each rule
regexes :: [(Set mode, (Regex, Int))]
regexes = [ (if Set.null modes then defaultMode else modes, (ruleRegex rule, i))
| i <- [0..]
| rule <- rules
, let modes = ruleModes rule
]
-- Get the list of regexes for each mode
regexesByMode :: [[(Regex, Int)]]
regexesByMode = allModes & map \mode ->
[ regex | (modes, regex) <- regexes, Set.member mode modes ]
Enforce that the enum instance is equivalent to a derived one
modesAreValid = map fromEnum allModes == [0..fromEnum (maxBound @mode)]
defaultMode = Set.singleton minBound
allModes = [minBound @mode .. maxBound @mode]
Construct the list of MatchInfos
matchList = liftListWith liftMatchInfo rules
matchListLen = length rules
liftMatchInfo :: Rule mode cond act -> SpliceQ (MatchInfo cond act)
liftMatchInfo rule = [|| MatchInfo
{ matchAction = $$matchAct
, matchFollowedBy = fbDFA
, matchNotFollowedBy = nfDFA
, matchConditions = $$matchConds
}||]
where
fbDFA = mkDFA <$> ruleFollowedBy rule
nfDFA = mkDFA <$> ruleNotFollowedBy rule
mkDFA = DFA.fromNFA . compileRegex
matchConds = liftListWith id (ruleConditions rule)
matchAct = case ruleAction rule of
Nothing -> [|| Nothing ||]
Just act -> [|| Just $$act ||]
liftListWith :: (a -> SpliceQ b) -> [a] -> SpliceQ [b]
liftListWith f = foldr (\a bsQ -> [|| $$(f a) : $$bsQ ||]) [|| [] ||]
---------------------
--
---------------------
-- | Type of the function used to evaluate rule conditions.
--
-- The function is provided the input stream at the start of the match, the input stream after the
-- match, and the condition to evaluate. Returns whether the condition was satisfied.
type EvalCondition str cond = str -> str -> cond -> Bool
-- | Result of trying to get the 'nextMatch' of the input.
--
-- On a successful match, the result contains the remaining unconsumed input stream. Note that it
-- does not contain the string that was actually matched; to provide that, you need a way to derive
-- it from the initial input and the remaining input, e.g. by having @str@ contain an offset that is
-- incremented by the 'GetNextByte' function.
data MatchResult str act
-- | Successfully matched a rule that skips the input it matched.
= MatchSkip str
-- | Successfully matched a rule that should run the given action when matched.
| MatchAction str act
-- | Failed to match any rule.
| MatchError
-- | Reached the end of the input.
--
-- Note that this result is only returned if the input provided to 'nextMatch' is already empty.
Otherwise , if we reach the end of the input but have n't found a match , we return ' MatchError ' .
| MatchEOF
-- | Attempts to find the next match from the start of the provided input stream.
--
-- This function is inlined when applied to all of its arguments, so it is recommended to create a
-- wrapper function which calls this with the specific values for your lexer. The main reason for
-- this is so the 'GetNextByte' function can be inlined into /this/ function and hopefully optimized
-- to avoid actually creating a boxed 'Word8' value for every byte.
nextMatch :: forall mode cond act str
. LexerMode mode
=> Lexer mode cond act -- ^ The lexer to match the input against.
-> GetNextByte str -- ^ Function to get the next byte of the input stream.
-> EvalCondition str cond -- ^ Function to evaluate match conditions.
-> mode -- ^ The currently active mode for the lexer.
-> str -- ^ The input stream to match.
-> MatchResult str act
nextMatch (Lexer dfas matchVec) getNextByte evalCond mode initStr =
case find (validMatch initStr) matches of
Just (str', MatchInfo{matchAction=mAct})
| Just act <- mAct -> MatchAction str' act
| Nothing <- mAct -> MatchSkip str'
_ | isEOF -> MatchEOF
_ | otherwise -> MatchError
where
isEOF = isNothing (getNextByte initStr)
matches = buildMatchStack DFA.startNode initStr []
Recursively build the " match stack " for the given input stream . The idea is we step the DFA
-- as far as we can along the input. At each step, we push any possible matches onto the match
-- stack. At this point they are only "possible" matches because they may have additional
-- conditions to check. But we don't want to check these conditions eagerly, because we always
-- prefer the longest match, so many possible matches will be ruled out by the end anyway. So
-- instead we build the whole stack of possible matches and then once we're done we can simply
-- find the top-most match that satisfies its additional conditions, if any.
buildMatchStack :: DFA.Node -> str -> [(str, MatchInfo cond act)] -> [(str, MatchInfo cond act)]
buildMatchStack node str matchStack
| Just (b, str') <- getNextByte str
, Just node' <- DFA.step dfa node b = buildMatchStack node' str' matchStack'
| otherwise = matchStack'
where
-- Push the matches for the current node onto the match stack. Note that the order is
-- important, because matches higher on the stack are prioritized. Since the match keys are
-- the index of the rule that we're matching, and we want to prioritize earlier rules, using
-- a foldr respects this ordering since we'll fold over the match keys in descending order.
matchStack' = ILSet.foldr pushMatch matchStack (DFA.matches dfa node)
pushMatch k ms = (str, matchKeyToInfo k) : ms
dfa = (V.!) dfas (fromEnum mode)
matchKeyToInfo :: MatchKey -> MatchInfo cond act
matchKeyToInfo k = (V.!) matchVec k
-- Returns whether all conditions of the given match are satisfied.
validMatch :: str -> (str, MatchInfo cond act) -> Bool
validMatch str (str', match) = and
[ case matchFollowedBy match of
Nothing -> True
Just dfa -> dfaCanMatch dfa DFA.startNode str'
, case matchNotFollowedBy match of
Nothing -> True
Just dfa -> not (dfaCanMatch dfa DFA.startNode str')
, all (evalCond str str') (matchConditions match)
]
Returns whether the given DFA matches the input stream when starting from the given node .
-- Used to check the followedBy and notFollowedBy conditions.
dfaCanMatch :: DFA -> DFA.Node -> str -> Bool
dfaCanMatch dfa node str
| DFA.isMatchNode dfa node = True
| Just (b, str') <- getNextByte str
, Just node' <- DFA.step dfa node b = dfaCanMatch dfa node' str'
| otherwise = False
# INLINE nextMatch #
| null | https://raw.githubusercontent.com/syntheorem/thexa/e599efd97c0cb49dc3acb9a5da911e20608a210b/src/Thexa/Core.hs | haskell | | The primitive interface to the lexer.
interface for running a lexer. It does not manage any lexer state for you, and instead just
provides the 'nextMatch' function to find a single match at a time. If the features provided by
* Running the lexer
| A precompiled lexer, parameterized on the types of its rules' modes, conditions, and actions.
# UNPACK #
^ The DFAs used to match input to the lexer, one for each mode.
# UNPACK #
^ Array of info on how we should handle a match for each rule. The DFAs are constructed so that
Specify that the mode is nominal since it would otherwise be phantom
^ Action to run on match, or 'Nothing' to skip matched input.
^ List of conditions which must be satisfied in order for the rule to match. Conditions are
| Construct a lexer at compile-time from the list of rules it should match.
The order of the rules in the list is important; the resulting lexer will always prefer the
longest match, but in the case that multiple rules match the same length of input, the rule that
appears earliest in the list will be chosen.
Extract the modes, regex, and rule index from each rule
Get the list of regexes for each mode
-------------------
-------------------
| Type of the function used to evaluate rule conditions.
The function is provided the input stream at the start of the match, the input stream after the
match, and the condition to evaluate. Returns whether the condition was satisfied.
| Result of trying to get the 'nextMatch' of the input.
On a successful match, the result contains the remaining unconsumed input stream. Note that it
does not contain the string that was actually matched; to provide that, you need a way to derive
it from the initial input and the remaining input, e.g. by having @str@ contain an offset that is
incremented by the 'GetNextByte' function.
| Successfully matched a rule that skips the input it matched.
| Successfully matched a rule that should run the given action when matched.
| Failed to match any rule.
| Reached the end of the input.
Note that this result is only returned if the input provided to 'nextMatch' is already empty.
| Attempts to find the next match from the start of the provided input stream.
This function is inlined when applied to all of its arguments, so it is recommended to create a
wrapper function which calls this with the specific values for your lexer. The main reason for
this is so the 'GetNextByte' function can be inlined into /this/ function and hopefully optimized
to avoid actually creating a boxed 'Word8' value for every byte.
^ The lexer to match the input against.
^ Function to get the next byte of the input stream.
^ Function to evaluate match conditions.
^ The currently active mode for the lexer.
^ The input stream to match.
as far as we can along the input. At each step, we push any possible matches onto the match
stack. At this point they are only "possible" matches because they may have additional
conditions to check. But we don't want to check these conditions eagerly, because we always
prefer the longest match, so many possible matches will be ruled out by the end anyway. So
instead we build the whole stack of possible matches and then once we're done we can simply
find the top-most match that satisfies its additional conditions, if any.
Push the matches for the current node onto the match stack. Note that the order is
important, because matches higher on the stack are prioritized. Since the match keys are
the index of the rule that we're matching, and we want to prioritize earlier rules, using
a foldr respects this ordering since we'll fold over the match keys in descending order.
Returns whether all conditions of the given match are satisfied.
Used to check the followedBy and notFollowedBy conditions. | Compared to the main " Thexa " module , this module presents a less featureful , but more flexible ,
" Thexa " are not suitable for your use case , you can instead build on top of " Thexa . Core " .
module Thexa.Core
( module Thexa.Rule
* construction
, Lexer
, makeLexer
, GetNextByte
, EvalCondition
, MatchResult(..)
, nextMatch
) where
import Data.Set (Set)
import Data.Set qualified as Set
import Data.Vector (Vector)
import Data.Vector qualified as V
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax.Compat qualified as TH
import Thexa.Internal.DFA (MatchKey)
import Thexa.Internal.DFA qualified as DFA
import Thexa.Internal.IntLike.Set qualified as ILSet
import Thexa.Internal.Regex.Compiler
import Thexa.Position (GetNextByte)
import Thexa.Rule
type DFA = DFA.DFA DFA.Dense32
data Lexer mode cond act = Lexer
the ' MatchKey ' for each regex is an index into this array .
type role Lexer nominal representational representational
data MatchInfo cond act = MatchInfo
{ matchAction :: Maybe act
, matchFollowedBy :: Maybe DFA
^ Optional DFA that should match the input following this match .
, matchNotFollowedBy :: Maybe DFA
^ Optional DFA that should not match the input following this match .
, matchConditions :: [cond]
resolved to a ' Bool ' by a user - supplied ' EvalCondition ' function .
}
deriving (Generic, NFData)
instance (NFData cond, NFData act) => NFData (Lexer mode cond act) where
rnf (Lexer dfas matchVec) = rnf dfas `seq` rnf matchVec
makeLexer :: forall mode cond act. LexerMode mode => [Rule mode cond act] -> SpliceQ (Lexer mode cond act)
makeLexer rules
| modesAreValid = [|| Lexer dfas (V.fromListN matchListLen $$matchList) ||]
| otherwise = TH.liftSplice $ fail "invalid Enum instance for LexerMode"
where
dfas = V.fromList $ map (DFA.fromNFA . compileRegexes) regexesByMode
regexes :: [(Set mode, (Regex, Int))]
regexes = [ (if Set.null modes then defaultMode else modes, (ruleRegex rule, i))
| i <- [0..]
| rule <- rules
, let modes = ruleModes rule
]
regexesByMode :: [[(Regex, Int)]]
regexesByMode = allModes & map \mode ->
[ regex | (modes, regex) <- regexes, Set.member mode modes ]
Enforce that the enum instance is equivalent to a derived one
modesAreValid = map fromEnum allModes == [0..fromEnum (maxBound @mode)]
defaultMode = Set.singleton minBound
allModes = [minBound @mode .. maxBound @mode]
Construct the list of MatchInfos
matchList = liftListWith liftMatchInfo rules
matchListLen = length rules
liftMatchInfo :: Rule mode cond act -> SpliceQ (MatchInfo cond act)
liftMatchInfo rule = [|| MatchInfo
{ matchAction = $$matchAct
, matchFollowedBy = fbDFA
, matchNotFollowedBy = nfDFA
, matchConditions = $$matchConds
}||]
where
fbDFA = mkDFA <$> ruleFollowedBy rule
nfDFA = mkDFA <$> ruleNotFollowedBy rule
mkDFA = DFA.fromNFA . compileRegex
matchConds = liftListWith id (ruleConditions rule)
matchAct = case ruleAction rule of
Nothing -> [|| Nothing ||]
Just act -> [|| Just $$act ||]
liftListWith :: (a -> SpliceQ b) -> [a] -> SpliceQ [b]
liftListWith f = foldr (\a bsQ -> [|| $$(f a) : $$bsQ ||]) [|| [] ||]
type EvalCondition str cond = str -> str -> cond -> Bool
data MatchResult str act
= MatchSkip str
| MatchAction str act
| MatchError
Otherwise , if we reach the end of the input but have n't found a match , we return ' MatchError ' .
| MatchEOF
nextMatch :: forall mode cond act str
. LexerMode mode
-> MatchResult str act
nextMatch (Lexer dfas matchVec) getNextByte evalCond mode initStr =
case find (validMatch initStr) matches of
Just (str', MatchInfo{matchAction=mAct})
| Just act <- mAct -> MatchAction str' act
| Nothing <- mAct -> MatchSkip str'
_ | isEOF -> MatchEOF
_ | otherwise -> MatchError
where
isEOF = isNothing (getNextByte initStr)
matches = buildMatchStack DFA.startNode initStr []
Recursively build the " match stack " for the given input stream . The idea is we step the DFA
buildMatchStack :: DFA.Node -> str -> [(str, MatchInfo cond act)] -> [(str, MatchInfo cond act)]
buildMatchStack node str matchStack
| Just (b, str') <- getNextByte str
, Just node' <- DFA.step dfa node b = buildMatchStack node' str' matchStack'
| otherwise = matchStack'
where
matchStack' = ILSet.foldr pushMatch matchStack (DFA.matches dfa node)
pushMatch k ms = (str, matchKeyToInfo k) : ms
dfa = (V.!) dfas (fromEnum mode)
matchKeyToInfo :: MatchKey -> MatchInfo cond act
matchKeyToInfo k = (V.!) matchVec k
validMatch :: str -> (str, MatchInfo cond act) -> Bool
validMatch str (str', match) = and
[ case matchFollowedBy match of
Nothing -> True
Just dfa -> dfaCanMatch dfa DFA.startNode str'
, case matchNotFollowedBy match of
Nothing -> True
Just dfa -> not (dfaCanMatch dfa DFA.startNode str')
, all (evalCond str str') (matchConditions match)
]
Returns whether the given DFA matches the input stream when starting from the given node .
dfaCanMatch :: DFA -> DFA.Node -> str -> Bool
dfaCanMatch dfa node str
| DFA.isMatchNode dfa node = True
| Just (b, str') <- getNextByte str
, Just node' <- DFA.step dfa node b = dfaCanMatch dfa node' str'
| otherwise = False
# INLINE nextMatch #
|
01d32c817abf80d5e75b5eb911be87d472a6b41287351b0f0df687456f4c34ae | haskell-opengl/OpenGLRaw | CullVertex.hs | # LANGUAGE PatternSynonyms #
--------------------------------------------------------------------------------
-- |
Module : Graphics .
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.CullVertex (
-- * Extension Support
glGetEXTCullVertex,
gl_EXT_cull_vertex,
-- * Enums
pattern GL_CULL_VERTEX_EXT,
pattern GL_CULL_VERTEX_EYE_POSITION_EXT,
pattern GL_CULL_VERTEX_OBJECT_POSITION_EXT,
-- * Functions
glCullParameterdvEXT,
glCullParameterfvEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGLRaw/57e50c9d28dfa62d6a87ae9b561af28f64ce32a0/src/Graphics/GL/EXT/CullVertex.hs | haskell | ------------------------------------------------------------------------------
|
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
* Extension Support
* Enums
* Functions | # LANGUAGE PatternSynonyms #
Module : Graphics .
Copyright : ( c ) 2019
Maintainer : < >
module Graphics.GL.EXT.CullVertex (
glGetEXTCullVertex,
gl_EXT_cull_vertex,
pattern GL_CULL_VERTEX_EXT,
pattern GL_CULL_VERTEX_EYE_POSITION_EXT,
pattern GL_CULL_VERTEX_OBJECT_POSITION_EXT,
glCullParameterdvEXT,
glCullParameterfvEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
9c273691f1ec07793b5f325e2c7afb943059420665332ecd9e57003c3565b6f5 | rlepigre/subml | subset.ml | (****************************************************************************)
(**{3 Implementattion of mutable sets via subset constraints }*)
(****************************************************************************)
type 'a elts = Finite of 'a list | CoFinite of 'a list
type 'a set = { mutable set : 'a elts
(** The current value of the set. *)
; mutable frozen : bool
(** if true, the set is frozen and will not change anymore *) }
let create l = { set = CoFinite l; frozen = false }
(** test if a set is contained in a list for the given equality.
if the set is not frozen, it will decrease *)
let test : ('a -> 'a -> bool) -> 'a set -> 'a list -> bool =
fun eq set l ->
if set.frozen then
match set.set with
| CoFinite _ -> assert false (* enforced by get *)
| Finite l' -> List.for_all (fun x -> List.exists (fun y -> eq x y) l) l'
else
(* when the set is not fixed we change set into (set inter (Finite l))*)
match set.set with
| CoFinite l' -> set.set <- Finite (List.filter (fun x ->
not (List.exists (fun y -> eq x y) l')) l); true
| Finite l' ->
set.set <- Finite (List.filter (fun x -> List.exists (fun y -> eq x y) l) l');
true
(** check if a set is empty *)
let is_empty : 'a set -> bool =
fun set -> match set.set with
| Finite [] -> true
| _ -> false
(** get the current value of a set and froze it *)
let get : 'a set -> 'a list =
fun set ->
set.frozen <- true;
match set.set with
| CoFinite _ -> set.set <- Finite []; []
| Finite l -> l
* get the current value of a set without frozing it .
should only be used for printing . Return [ ] for
should only be used for printing. Return [] for CoFinite *)
let unsafe_get : 'a set -> 'a list =
fun set ->
match set.set with
| CoFinite _ -> []
| Finite l -> l
| null | https://raw.githubusercontent.com/rlepigre/subml/6d2b35276d8d299bef9d5e70653a88be7f89f7f1/src/subset.ml | ocaml | **************************************************************************
*{3 Implementattion of mutable sets via subset constraints }
**************************************************************************
* The current value of the set.
* if true, the set is frozen and will not change anymore
* test if a set is contained in a list for the given equality.
if the set is not frozen, it will decrease
enforced by get
when the set is not fixed we change set into (set inter (Finite l))
* check if a set is empty
* get the current value of a set and froze it |
type 'a elts = Finite of 'a list | CoFinite of 'a list
type 'a set = { mutable set : 'a elts
; mutable frozen : bool
let create l = { set = CoFinite l; frozen = false }
let test : ('a -> 'a -> bool) -> 'a set -> 'a list -> bool =
fun eq set l ->
if set.frozen then
match set.set with
| Finite l' -> List.for_all (fun x -> List.exists (fun y -> eq x y) l) l'
else
match set.set with
| CoFinite l' -> set.set <- Finite (List.filter (fun x ->
not (List.exists (fun y -> eq x y) l')) l); true
| Finite l' ->
set.set <- Finite (List.filter (fun x -> List.exists (fun y -> eq x y) l) l');
true
let is_empty : 'a set -> bool =
fun set -> match set.set with
| Finite [] -> true
| _ -> false
let get : 'a set -> 'a list =
fun set ->
set.frozen <- true;
match set.set with
| CoFinite _ -> set.set <- Finite []; []
| Finite l -> l
* get the current value of a set without frozing it .
should only be used for printing . Return [ ] for
should only be used for printing. Return [] for CoFinite *)
let unsafe_get : 'a set -> 'a list =
fun set ->
match set.set with
| CoFinite _ -> []
| Finite l -> l
|
a2121b79e4be184839ca6a158bb5e62652fe06c47e7b2ced51e3d40194d9d541 | YoshikuniJujo/funpaala | fruits1.hs | myFavoriteFruit = "banana"
| null | https://raw.githubusercontent.com/YoshikuniJujo/funpaala/5366130826da0e6b1180992dfff94c4a634cda99/samples/05_function/fruits1.hs | haskell | myFavoriteFruit = "banana"
|
|
84a1b1810e27ce61fa7e3455af44b034e3fb7f21d09779fb334f806d2314d559 | dvingo/cljs-emotion | target_styled.cljs | (ns dv.cljs-emotion.target-styled
(:require
[devcards.core :as dc :refer (defcard)]
[sablono.core :refer [html]]
["polished" :as p :refer [darken lighten]]
["react" :as react]
["react-dom" :as react-dom]
[dv.cljs-emotion :as em :refer [defstyled keyframes global-style]]))
(defcard
"These examples show some more emotion API examples.")
(defstyled prop-fn :div {:padding 20 :outline "1px solid"})
(defstyled prop-fn2 :div
{:padding 20
:outline "1px solid"
"& > a" {:color "hotpink"}})
(defcard
"## Using :as
You can change the DOM element at render time by passing `:as`.
```clojure
(defstyled prop-fn :div {:padding 20 :outline \"1px solid\"})
(prop-fn {:as \"button\"} \"HERE\")\n
```
"
(prop-fn {:as "button"} "HERE"))
(defcard prop-fn
"
An & will be replaced by the current styled-component's class name - so you can
use it how you wish - override styles by repeating it \"&&&\" for example, or in this case target
a child element:
```clojure
(defstyled prop-fn2 :div\n {:padding 20 \n :outline \"1px solid\"\n \"& > a\" {:color \"hotpink\"} } )\n
(prop-fn2 (html [:a {:href \"localhost:9001\"} \"hi here\"]))\n
```
"
(prop-fn2 (html [:a {:href "localhost:9001"} "hi here"]))
)
(defstyled hover-example prop-fn
{":hover,:focus" {:background "hotpink"}})
(defcard
"You can combine multiple selectors using the CSS comma operator.
```clojure
(defstyled hover-example prop-fn\n {\":hover,:focus\" {:background \"hotpink\"}})\n
```"
(hover-example "HELLLO")
)
(defstyled a-child :div
{:color "deepSKYBlue"})
(defstyled a-parent :div
{:color "red"
a-child {:color "darkorchid"}})
(defn nested-child-ex []
(html
[:div
(a-child "child should be deepSkyBlue")
(a-parent "parent should be red")
(a-parent
(a-child "nested child should be darkorchid"))]))
(dc/defcard-doc
"# Target another defstyled component
If you use a component created with `defstyled` in the key position of a styles map
the generated CSS uses a class selector in its place that is a hash of its fully qualified
symbol name.
This works inside media queries and functions (see the next example)."
(dc/mkdn-pprint-source a-child)
(dc/mkdn-pprint-source a-parent)
(dc/mkdn-pprint-source nested-child-ex))
(defn use-current-width []
(let [use-width (react/useState js/innerWidth)
width (aget use-width 0)
set-width (aget use-width 1)]
(react/useEffect
(fn []
(let [on-resize
(fn [timeout-id]
(js/clearTimeout timeout-id)
(js/setTimeout (fn [] (set-width js/innerWidth)) 150))]
(js/addEventListener "resize" on-resize)
(fn [] (js/removeEventListener "resize" on-resize)))))
width))
(defstyled a-parent2 :div
(fn [{:keys [color]}]
{:color "red"
a-child {:color (or color "darkorchid")}
"@media (min-width: 1024px)"
{a-child {:color "black"}}}))
(defn my-component []
(let [width (use-current-width)]
(html
[:div
(a-parent2 {:color "blue"} "parent should be red")
(a-child "child should be deepSkyBlue")
(a-parent2 {:color "steelblue"}
(a-child
(str "nested child should be "
(if (>= width 1024) "black" "darkorchid"))))])))
(dc/defcard-doc
"# Target another defstyled component in nested position
Here we change the child element conditionally based on a media query -
resize the page over and under 1024 pixels to see the effect."
(dc/mkdn-pprint-source a-parent2)
(dc/mkdn-pprint-source my-component))
(defcard
(dc/dom-node
(fn [data-atom node]
(react-dom/render (react/createElement my-component) node))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; styled component in selector string
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defstyled a-parent3 :div
(fn [_]
{:color "red"
(str a-child " + " a-child)
{:color "#eee"
:background-color "hsl(0, 0%, 48%)"
:padding "1em"
:border-top "1px solid"}
a-child {:color "darkorchid"
:background-color "paleVIOLETRed"}
"@media (min-width: 1024px)"
{a-child {:color "black"}
(str a-child " + " a-child)
{:background-color (lighten 0.2 "hsl(0, 0%, 48%)")}}}))
(dc/defcard-doc
"# Target another defstyled component in a combinator selector"
(dc/mkdn-pprint-source a-parent3)
"Here we are using a styled component as part of a larger CSS Selector expression.
This works by implementing `toString` for styled components, returning their class selector."
"
```clojure
(a-parent3
\"HELLLO\"
(a-child \"first\")
(a-child \"second\")
(a-child \"third\")
(a-child \"fourth\")
(a-child \"fifth\"))
```")
(defcard
(a-parent3
"HELLLO"
(a-child "first")
(a-child "second")
(a-child "third")
(a-child "fourth")
(a-child "fifth")))
(defstyled inside-text :span
{:color "hsl(200, 40, 69)"})
(defn anon-target []
(em/jsx :div
{:css
{:color "skyblue"
inside-text {:color "grey"}} }
(html
[:div
[:p "outside text"]
(inside-text "inside text")])))
| null | https://raw.githubusercontent.com/dvingo/cljs-emotion/0e2d7339657bd2957f03fbf8f4bb4a97b4fefc81/src/dev/dv/cljs_emotion/target_styled.cljs | clojure |
styled component in selector string
| (ns dv.cljs-emotion.target-styled
(:require
[devcards.core :as dc :refer (defcard)]
[sablono.core :refer [html]]
["polished" :as p :refer [darken lighten]]
["react" :as react]
["react-dom" :as react-dom]
[dv.cljs-emotion :as em :refer [defstyled keyframes global-style]]))
(defcard
"These examples show some more emotion API examples.")
(defstyled prop-fn :div {:padding 20 :outline "1px solid"})
(defstyled prop-fn2 :div
{:padding 20
:outline "1px solid"
"& > a" {:color "hotpink"}})
(defcard
"## Using :as
You can change the DOM element at render time by passing `:as`.
```clojure
(defstyled prop-fn :div {:padding 20 :outline \"1px solid\"})
(prop-fn {:as \"button\"} \"HERE\")\n
```
"
(prop-fn {:as "button"} "HERE"))
(defcard prop-fn
"
An & will be replaced by the current styled-component's class name - so you can
use it how you wish - override styles by repeating it \"&&&\" for example, or in this case target
a child element:
```clojure
(defstyled prop-fn2 :div\n {:padding 20 \n :outline \"1px solid\"\n \"& > a\" {:color \"hotpink\"} } )\n
(prop-fn2 (html [:a {:href \"localhost:9001\"} \"hi here\"]))\n
```
"
(prop-fn2 (html [:a {:href "localhost:9001"} "hi here"]))
)
(defstyled hover-example prop-fn
{":hover,:focus" {:background "hotpink"}})
(defcard
"You can combine multiple selectors using the CSS comma operator.
```clojure
(defstyled hover-example prop-fn\n {\":hover,:focus\" {:background \"hotpink\"}})\n
```"
(hover-example "HELLLO")
)
(defstyled a-child :div
{:color "deepSKYBlue"})
(defstyled a-parent :div
{:color "red"
a-child {:color "darkorchid"}})
(defn nested-child-ex []
(html
[:div
(a-child "child should be deepSkyBlue")
(a-parent "parent should be red")
(a-parent
(a-child "nested child should be darkorchid"))]))
(dc/defcard-doc
"# Target another defstyled component
If you use a component created with `defstyled` in the key position of a styles map
the generated CSS uses a class selector in its place that is a hash of its fully qualified
symbol name.
This works inside media queries and functions (see the next example)."
(dc/mkdn-pprint-source a-child)
(dc/mkdn-pprint-source a-parent)
(dc/mkdn-pprint-source nested-child-ex))
(defn use-current-width []
(let [use-width (react/useState js/innerWidth)
width (aget use-width 0)
set-width (aget use-width 1)]
(react/useEffect
(fn []
(let [on-resize
(fn [timeout-id]
(js/clearTimeout timeout-id)
(js/setTimeout (fn [] (set-width js/innerWidth)) 150))]
(js/addEventListener "resize" on-resize)
(fn [] (js/removeEventListener "resize" on-resize)))))
width))
(defstyled a-parent2 :div
(fn [{:keys [color]}]
{:color "red"
a-child {:color (or color "darkorchid")}
"@media (min-width: 1024px)"
{a-child {:color "black"}}}))
(defn my-component []
(let [width (use-current-width)]
(html
[:div
(a-parent2 {:color "blue"} "parent should be red")
(a-child "child should be deepSkyBlue")
(a-parent2 {:color "steelblue"}
(a-child
(str "nested child should be "
(if (>= width 1024) "black" "darkorchid"))))])))
(dc/defcard-doc
"# Target another defstyled component in nested position
Here we change the child element conditionally based on a media query -
resize the page over and under 1024 pixels to see the effect."
(dc/mkdn-pprint-source a-parent2)
(dc/mkdn-pprint-source my-component))
(defcard
(dc/dom-node
(fn [data-atom node]
(react-dom/render (react/createElement my-component) node))))
(defstyled a-parent3 :div
(fn [_]
{:color "red"
(str a-child " + " a-child)
{:color "#eee"
:background-color "hsl(0, 0%, 48%)"
:padding "1em"
:border-top "1px solid"}
a-child {:color "darkorchid"
:background-color "paleVIOLETRed"}
"@media (min-width: 1024px)"
{a-child {:color "black"}
(str a-child " + " a-child)
{:background-color (lighten 0.2 "hsl(0, 0%, 48%)")}}}))
(dc/defcard-doc
"# Target another defstyled component in a combinator selector"
(dc/mkdn-pprint-source a-parent3)
"Here we are using a styled component as part of a larger CSS Selector expression.
This works by implementing `toString` for styled components, returning their class selector."
"
```clojure
(a-parent3
\"HELLLO\"
(a-child \"first\")
(a-child \"second\")
(a-child \"third\")
(a-child \"fourth\")
(a-child \"fifth\"))
```")
(defcard
(a-parent3
"HELLLO"
(a-child "first")
(a-child "second")
(a-child "third")
(a-child "fourth")
(a-child "fifth")))
(defstyled inside-text :span
{:color "hsl(200, 40, 69)"})
(defn anon-target []
(em/jsx :div
{:css
{:color "skyblue"
inside-text {:color "grey"}} }
(html
[:div
[:p "outside text"]
(inside-text "inside text")])))
|
e9dc57717b8ef527a1a2fb21f4d3b70dcd9efb1e797a2573c291511c4791e8c1 | electric-sql/vaxine | logging_notification_server.erl | %% @doc Nofitication service for logging events. Handler are supposed to be
%% light-weight and should not spend too much time in M:F/2 calls.
-module(logging_notification_server).
-behaviour(gen_event).
-export([ start_link/0,
add_handler/3,
delete_handler/1,
notify_cache_update/3,
lookup_last_global_id/2,
stop/0
]).
-export([ init/1,
handle_event/2,
handle_call/2,
handle_info/2,
terminate/2
]).
-record( state, { handler :: handler() } ).
-type state() :: #state{}.
-type handler() :: {module(), atom(), term()}.
start_link() ->
gen_event:start_link({local, ?MODULE}, []).
stop() ->
gen_event:stop(?MODULE).
%% @doc Add subscribers handler. Handler should be as light-weight as possible,
%% as it affects the flow of committed transactions.
-spec add_handler(module(), atom(), term()) -> ok.
add_handler(M, F, HandlerState) ->
gen_event:add_sup_handler(?MODULE, {?MODULE, self()}, {M, F, HandlerState}).
delete_handler(Args) ->
gen_event:delete_handler(?MODULE, {?MODULE, self()}, Args).
-spec notify_cache_update(antidote:partition_id(), antidote:dcid(), antidote:op_number()) ->
ok.
notify_cache_update(Partition, DcId, OpId) ->
gen_event:notify(?MODULE, {cache_update, [Partition, DcId, OpId]}).
-spec lookup_last_global_id(antidote:partition_id(), antidote:dcid()) ->
antidote:op_number() | undefined.
lookup_last_global_id(Partition, DcId) ->
materializer_vnode:lookup_last_applied_opid(Partition, DcId).
%%------------------------------------------------------------------------------
Internal functions
%%------------------------------------------------------------------------------
init({M, F, A}) ->
{ok, #state{ handler = {M, F, A} }}.
handle_call(Msg, State) ->
State1 = apply_handler(Msg, State),
{ok, _Reply = ok, State1}.
handle_event({cache_update, Msg}, State) ->
try
State1 = apply_handler(Msg, State),
{ok, State1}
catch T:E:S ->
logger:error("Handler crashed: ~p:~p Stack: ~p~n", [T, E, S]),
remove_handler
end.
handle_info(Msg, State) ->
logger:warning("Unexpected info message: ~p~n", [Msg]),
{ok, State}.
terminate(_Arg, _State) ->
ok.
-spec apply_handler(term(), state()) -> state().
apply_handler(Info, State = #state{handler = {M, F, HandlerState0}}) ->
case apply(M, F, HandlerState0 ++ Info) of
ok ->
State;
{ok, HandlerState1} ->
State#state{handler = {M, F, HandlerState1}}
end.
| null | https://raw.githubusercontent.com/electric-sql/vaxine/b43ba9add4972c4bd10d3bfe61df9096667ada40/apps/antidote/src/logging_notification_server.erl | erlang | @doc Nofitication service for logging events. Handler are supposed to be
light-weight and should not spend too much time in M:F/2 calls.
@doc Add subscribers handler. Handler should be as light-weight as possible,
as it affects the flow of committed transactions.
------------------------------------------------------------------------------
------------------------------------------------------------------------------ |
-module(logging_notification_server).
-behaviour(gen_event).
-export([ start_link/0,
add_handler/3,
delete_handler/1,
notify_cache_update/3,
lookup_last_global_id/2,
stop/0
]).
-export([ init/1,
handle_event/2,
handle_call/2,
handle_info/2,
terminate/2
]).
-record( state, { handler :: handler() } ).
-type state() :: #state{}.
-type handler() :: {module(), atom(), term()}.
start_link() ->
gen_event:start_link({local, ?MODULE}, []).
stop() ->
gen_event:stop(?MODULE).
-spec add_handler(module(), atom(), term()) -> ok.
add_handler(M, F, HandlerState) ->
gen_event:add_sup_handler(?MODULE, {?MODULE, self()}, {M, F, HandlerState}).
delete_handler(Args) ->
gen_event:delete_handler(?MODULE, {?MODULE, self()}, Args).
-spec notify_cache_update(antidote:partition_id(), antidote:dcid(), antidote:op_number()) ->
ok.
notify_cache_update(Partition, DcId, OpId) ->
gen_event:notify(?MODULE, {cache_update, [Partition, DcId, OpId]}).
-spec lookup_last_global_id(antidote:partition_id(), antidote:dcid()) ->
antidote:op_number() | undefined.
lookup_last_global_id(Partition, DcId) ->
materializer_vnode:lookup_last_applied_opid(Partition, DcId).
Internal functions
init({M, F, A}) ->
{ok, #state{ handler = {M, F, A} }}.
handle_call(Msg, State) ->
State1 = apply_handler(Msg, State),
{ok, _Reply = ok, State1}.
handle_event({cache_update, Msg}, State) ->
try
State1 = apply_handler(Msg, State),
{ok, State1}
catch T:E:S ->
logger:error("Handler crashed: ~p:~p Stack: ~p~n", [T, E, S]),
remove_handler
end.
handle_info(Msg, State) ->
logger:warning("Unexpected info message: ~p~n", [Msg]),
{ok, State}.
terminate(_Arg, _State) ->
ok.
-spec apply_handler(term(), state()) -> state().
apply_handler(Info, State = #state{handler = {M, F, HandlerState0}}) ->
case apply(M, F, HandlerState0 ++ Info) of
ok ->
State;
{ok, HandlerState1} ->
State#state{handler = {M, F, HandlerState1}}
end.
|
c458fd27b2aed0944ab63e8df2a3e4100c82f82a25fae670d58cc15cdf9a7685 | henrystanley/Quark | QuoteEval.hs | module Quark.QuoteEval where
import Quark.Type
import Quark.QVM
import Quark.Errors
import Data.Sequence (viewr)
import Data.Sequence (ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Map.Strict as Map
--- Pattern Matching & Quote Calling --
-- this is the function responsible for the behavior of the `call` quark function
tryQuote :: QItem -> QVM -> IState
tryQuote (QQuote p b) vm = case patternMatch p (stack vm) of
Just bindings -> callQuote b bindings vm'
Nothing -> return . Just $ pushVM (QSym "nil") vm'
where vm' = dropVM (Seq.length p) vm
tryQuote _ vm = raiseError "Tried to call a value that wasn't a quote" vm
used to call a function quote , essentially the same as tryQuote but with push and pop
callFunc :: FuncName -> QItem -> QVM -> IState
callFunc fname (QQuote p b) vm = case patternMatch p (stack vm) of
Just bindings -> callQuote b bindings vm'
Nothing -> return . Just $ pushVM (QSym "nil") vm'
where vm' = pushCallVM fname $ pushProgVM (Seq.singleton $ QMagic PopCallStack) $ dropVM (Seq.length p) vm
callFunc _ _ vm = raiseError "Tried to call a function that wasn't a quote" vm
-- appends quote body to VM prog queue after subbing
callQuote :: QProg -> QLib -> QVM -> IState
callQuote prog vars = return . Just . pushProgVM (fmap (qSub vars) prog)
-- substitutes pattern terms
qSub :: QLib -> QItem -> QItem
qSub vars (QVar a) = case Map.lookup a vars of { Just x -> x; Nothing -> QVar a; }
qSub vars (QQuote p b) = QQuote (fmap (qSub vars) p) (fmap (qSub vars) b)
qSub _ x = x
-- checks to make sure the items a quote is being applied to match the quotes pattern
-- if these items do match, it returns the variable bindings for the quote body
patternMatch :: QProg -> QStack -> Maybe QLib
patternMatch pattern stack = qmatch Map.empty pattern stack
where qmatch vars pattern stack = case (viewr pattern, stack) of
(Seq.EmptyR, _) -> Just vars
(_, []) -> Nothing
((sq :> (QVar x)), (y : ys)) -> if Map.member x vars
then if (vars Map.! x) == y then qmatch vars sq ys else Nothing
else qmatch (Map.insert x y vars) sq ys
((sq :> x), (y : ys)) -> if x == y then qmatch vars sq ys else Nothing
| null | https://raw.githubusercontent.com/henrystanley/Quark/63df61e47bedc1092c89529eedaf8ad229193f0b/Quark/QuoteEval.hs | haskell | - Pattern Matching & Quote Calling --
this is the function responsible for the behavior of the `call` quark function
appends quote body to VM prog queue after subbing
substitutes pattern terms
checks to make sure the items a quote is being applied to match the quotes pattern
if these items do match, it returns the variable bindings for the quote body | module Quark.QuoteEval where
import Quark.Type
import Quark.QVM
import Quark.Errors
import Data.Sequence (viewr)
import Data.Sequence (ViewR(..))
import qualified Data.Sequence as Seq
import qualified Data.Map.Strict as Map
tryQuote :: QItem -> QVM -> IState
tryQuote (QQuote p b) vm = case patternMatch p (stack vm) of
Just bindings -> callQuote b bindings vm'
Nothing -> return . Just $ pushVM (QSym "nil") vm'
where vm' = dropVM (Seq.length p) vm
tryQuote _ vm = raiseError "Tried to call a value that wasn't a quote" vm
used to call a function quote , essentially the same as tryQuote but with push and pop
callFunc :: FuncName -> QItem -> QVM -> IState
callFunc fname (QQuote p b) vm = case patternMatch p (stack vm) of
Just bindings -> callQuote b bindings vm'
Nothing -> return . Just $ pushVM (QSym "nil") vm'
where vm' = pushCallVM fname $ pushProgVM (Seq.singleton $ QMagic PopCallStack) $ dropVM (Seq.length p) vm
callFunc _ _ vm = raiseError "Tried to call a function that wasn't a quote" vm
callQuote :: QProg -> QLib -> QVM -> IState
callQuote prog vars = return . Just . pushProgVM (fmap (qSub vars) prog)
qSub :: QLib -> QItem -> QItem
qSub vars (QVar a) = case Map.lookup a vars of { Just x -> x; Nothing -> QVar a; }
qSub vars (QQuote p b) = QQuote (fmap (qSub vars) p) (fmap (qSub vars) b)
qSub _ x = x
patternMatch :: QProg -> QStack -> Maybe QLib
patternMatch pattern stack = qmatch Map.empty pattern stack
where qmatch vars pattern stack = case (viewr pattern, stack) of
(Seq.EmptyR, _) -> Just vars
(_, []) -> Nothing
((sq :> (QVar x)), (y : ys)) -> if Map.member x vars
then if (vars Map.! x) == y then qmatch vars sq ys else Nothing
else qmatch (Map.insert x y vars) sq ys
((sq :> x), (y : ys)) -> if x == y then qmatch vars sq ys else Nothing
|
72c2b0963514a480b6bb179ddd0265078e4b599e554495772b806b9f3be9766f | day8/re-frame-10x | window.cljs | (ns day8.re-frame-10x.fx.window
(:require
[goog.object :as gobj]
[goog.string :as gstring]
[clojure.string :as string]
[day8.re-frame-10x.inlined-deps.re-frame.v1v1v2.re-frame.core :as rf]))
(defn m->str
[m]
(->> m
(reduce (fn [ret [k v]]
(let [k (if (keyword? k) (name k) k)
v (if (keyword? v) (name v) v)]
(conj ret (str k "=" v))))
[])
(string/join ",")))
(defn open-debugger-window
"Originally copied from re-frisk.devtool/open-debugger-window"
[{:keys [width height top left on-load on-success on-failure]}]
(let [document-title js/document.title
window-title (gstring/escapeString (str "re-frame-10x | " document-title))
window-html (str "<head><title>"
window-title
> " )
window-features (m->str
{:width width
:height height
:left left
:top top
:resizable :yes
:scrollbars :yes
:status :no
:directories :no
:toolbar :no
:menubar :no})]
We would like to set the windows left and top positions to match the monitor that it was on previously , but Chrome does n't give us
;; control over this, it will only position it within the same display that it was popped out on.
(if-let [w (js/window.open "about:blank" "re-frame-10x-popout" window-features)]
(let [d (.-document w)]
;; We had to comment out the following unmountComponentAtNode as it causes a React exception we assume
because React says is not a root container that it knows about .
;; In theory by not freeing up the resources associated with this container (e.g. event handlers) we may be
;; creating memory leaks. However with observation of the heap in developer tools we cannot see any significant
;; unbounded growth in memory usage.
;(when-let [el (.getElementById d "--re-frame-10x--")]
; (r/unmount-component-at-node el)))
(.open d)
(.write d window-html)
(gobj/set w "onload" (partial on-load w d))
(.close d)
(rf/dispatch on-success))
(rf/dispatch on-failure))))
(rf/reg-fx
::open-debugger-window
open-debugger-window) | null | https://raw.githubusercontent.com/day8/re-frame-10x/2cead8d02ca6eda99af1205f8ea234cc3d38d843/src/day8/re_frame_10x/fx/window.cljs | clojure | control over this, it will only position it within the same display that it was popped out on.
We had to comment out the following unmountComponentAtNode as it causes a React exception we assume
In theory by not freeing up the resources associated with this container (e.g. event handlers) we may be
creating memory leaks. However with observation of the heap in developer tools we cannot see any significant
unbounded growth in memory usage.
(when-let [el (.getElementById d "--re-frame-10x--")]
(r/unmount-component-at-node el))) | (ns day8.re-frame-10x.fx.window
(:require
[goog.object :as gobj]
[goog.string :as gstring]
[clojure.string :as string]
[day8.re-frame-10x.inlined-deps.re-frame.v1v1v2.re-frame.core :as rf]))
(defn m->str
[m]
(->> m
(reduce (fn [ret [k v]]
(let [k (if (keyword? k) (name k) k)
v (if (keyword? v) (name v) v)]
(conj ret (str k "=" v))))
[])
(string/join ",")))
(defn open-debugger-window
"Originally copied from re-frisk.devtool/open-debugger-window"
[{:keys [width height top left on-load on-success on-failure]}]
(let [document-title js/document.title
window-title (gstring/escapeString (str "re-frame-10x | " document-title))
window-html (str "<head><title>"
window-title
> " )
window-features (m->str
{:width width
:height height
:left left
:top top
:resizable :yes
:scrollbars :yes
:status :no
:directories :no
:toolbar :no
:menubar :no})]
We would like to set the windows left and top positions to match the monitor that it was on previously , but Chrome does n't give us
(if-let [w (js/window.open "about:blank" "re-frame-10x-popout" window-features)]
(let [d (.-document w)]
because React says is not a root container that it knows about .
(.open d)
(.write d window-html)
(gobj/set w "onload" (partial on-load w d))
(.close d)
(rf/dispatch on-success))
(rf/dispatch on-failure))))
(rf/reg-fx
::open-debugger-window
open-debugger-window) |
7f1f29c8e9089dac0e19601453bca2853255196f35e7a7f46b027d7d42d9e420 | kowainik/summoner | Summoner.hs | |
Module : Summoner
Copyright : ( c ) 2017 - 2022 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
Stability : Stable
Portability : Portable
Main module that reexports all library components of the @summoner@.
Module : Summoner
Copyright : (c) 2017-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <>
Stability : Stable
Portability : Portable
Main module that reexports all library components of the @summoner@.
-}
module Summoner
( module Summoner
) where
import Summoner.CLI as Summoner
import Summoner.Config as Summoner
import Summoner.CustomPrelude as Summoner
import Summoner.Decision as Summoner
import Summoner.Default as Summoner
import Summoner.GhcVer as Summoner
import Summoner.License as Summoner
import Summoner.Mode as Summoner
import Summoner.Project as Summoner
import Summoner.Question as Summoner
import Summoner.Settings as Summoner
import Summoner.Source as Summoner
import Summoner.Template as Summoner
import Summoner.Text as Summoner
import Summoner.Tree as Summoner
| null | https://raw.githubusercontent.com/kowainik/summoner/0c03dd0d6ee71c79227974b697f171396d3d09a7/summoner-cli/src/Summoner.hs | haskell | |
Module : Summoner
Copyright : ( c ) 2017 - 2022 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
Stability : Stable
Portability : Portable
Main module that reexports all library components of the @summoner@.
Module : Summoner
Copyright : (c) 2017-2022 Kowainik
SPDX-License-Identifier : MPL-2.0
Maintainer : Kowainik <>
Stability : Stable
Portability : Portable
Main module that reexports all library components of the @summoner@.
-}
module Summoner
( module Summoner
) where
import Summoner.CLI as Summoner
import Summoner.Config as Summoner
import Summoner.CustomPrelude as Summoner
import Summoner.Decision as Summoner
import Summoner.Default as Summoner
import Summoner.GhcVer as Summoner
import Summoner.License as Summoner
import Summoner.Mode as Summoner
import Summoner.Project as Summoner
import Summoner.Question as Summoner
import Summoner.Settings as Summoner
import Summoner.Source as Summoner
import Summoner.Template as Summoner
import Summoner.Text as Summoner
import Summoner.Tree as Summoner
|
|
0b0ccf7291244577e30b31665b922881288dfa872cf59e7221be3f772ccd36aa | ajtulloch/freelearning | Free.hs | {-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
module Main where
import Control.Monad
import Control.Monad.Free
import Control.Monad.IO.Class
import Control.Monad.Random
import qualified Data.Foldable as F
import Data.Graph.Free
import Data.GraphViz
import qualified Data.Traversable as T
import System.Process
import Layers
data NNOperator next = Node Layer next
| Split [next]
| End
deriving (Functor, F.Foldable, T.Traversable)
type NN = Free NNOperator
layer :: Layer -> NN ()
layer l = liftF $ Node l ()
end :: NN a
end = liftF End
alexNet :: NN ()
alexNetColumns = Free ( Split $ replicate 2 $ Free ( Node ReLU ( Free End ) ) )
alexNet = Free (Split $ replicate 2 (mapM_ layer features >> mapM_ layer classifier))
features :: [Layer]
features = [
Convolution CP{nOutput=10, kernel=[CD{width=3, stride=2}]},
Pointwise ReLU, MaxPool MP{steps=[2]},
-- Convolution, Pointwise ReLU, MaxPool,
-- Convolution, Pointwise ReLU,
-- Convolution, Pointwise ReLU,
Convolution CP{nOutput=10, kernel=[CD{width=3, stride=2}]},
Pointwise ReLU, MaxPool MP{steps=[2]}]
classifier :: [Layer]
classifier = [
Reshape,
DropOut , FullyConnected [ [ 1 ] ] , ReLU ,
DropOut , FullyConnected [ [ 1 ] ] , ReLU ,
FullyConnected FC{nHidden=5, weights=[[1] | _ <- [1..5::Integer]]},
Criterion LogSoftMax
]
slp :: [[Float]] -> NN ()
slp weights_ = do
layer (FullyConnected FC{nHidden=length weights_, weights=weights_})
layer (Pointwise ReLU)
mlp :: [[[Float]]] -> NN ()
mlp = mapM_ slp
activations :: MonadIO m => [Float] -> NN a -> m ()
activations input (Free (Node l k)) = do
let output = fprop l input
liftIO (print output)
activations output k
activations _ (Free End) = return ()
activations input (Free (Split xs)) = mapM_ (activations input) xs
activations _ (Pure _) = return ()
randomWeights
:: (MonadRandom m, Random a, Fractional a) => Int -> Int -> m [[a]]
randomWeights nInput nOutput_ = do
rs <- replicateM nOutput_ (getRandomRs (-1.0, 1.0))
return $ map (take nInput) rs
generateMlp :: NN ()
generateMlp = alexNet >> end
attributes :: NN a -> Attributes
attributes (Free (Node l _)) = [(toLabel . show . P) l, color Red]
attributes (Free End) = [toLabel "End", color Green]
attributes (Free (Split _)) = [toLabel "Split", color Blue]
attributes (Pure _) = []
main :: IO ()
main = do
let nn = generateMlp
activations [1, 1] nn
generate (freeFoldableGraph nn) attributes "nn"
_ <- system "open nn.png &"
return ()
| null | https://raw.githubusercontent.com/ajtulloch/freelearning/b89d481735b6bf43f3bc90cea1985b4c6ff4fd8e/Free.hs | haskell | # LANGUAGE DeriveFoldable #
# LANGUAGE DeriveFunctor #
# LANGUAGE DeriveTraversable #
# LANGUAGE FlexibleContexts #
# LANGUAGE RankNTypes #
Convolution, Pointwise ReLU, MaxPool,
Convolution, Pointwise ReLU,
Convolution, Pointwise ReLU, | # LANGUAGE ScopedTypeVariables #
module Main where
import Control.Monad
import Control.Monad.Free
import Control.Monad.IO.Class
import Control.Monad.Random
import qualified Data.Foldable as F
import Data.Graph.Free
import Data.GraphViz
import qualified Data.Traversable as T
import System.Process
import Layers
data NNOperator next = Node Layer next
| Split [next]
| End
deriving (Functor, F.Foldable, T.Traversable)
type NN = Free NNOperator
layer :: Layer -> NN ()
layer l = liftF $ Node l ()
end :: NN a
end = liftF End
alexNet :: NN ()
alexNetColumns = Free ( Split $ replicate 2 $ Free ( Node ReLU ( Free End ) ) )
alexNet = Free (Split $ replicate 2 (mapM_ layer features >> mapM_ layer classifier))
features :: [Layer]
features = [
Convolution CP{nOutput=10, kernel=[CD{width=3, stride=2}]},
Pointwise ReLU, MaxPool MP{steps=[2]},
Convolution CP{nOutput=10, kernel=[CD{width=3, stride=2}]},
Pointwise ReLU, MaxPool MP{steps=[2]}]
classifier :: [Layer]
classifier = [
Reshape,
DropOut , FullyConnected [ [ 1 ] ] , ReLU ,
DropOut , FullyConnected [ [ 1 ] ] , ReLU ,
FullyConnected FC{nHidden=5, weights=[[1] | _ <- [1..5::Integer]]},
Criterion LogSoftMax
]
slp :: [[Float]] -> NN ()
slp weights_ = do
layer (FullyConnected FC{nHidden=length weights_, weights=weights_})
layer (Pointwise ReLU)
mlp :: [[[Float]]] -> NN ()
mlp = mapM_ slp
activations :: MonadIO m => [Float] -> NN a -> m ()
activations input (Free (Node l k)) = do
let output = fprop l input
liftIO (print output)
activations output k
activations _ (Free End) = return ()
activations input (Free (Split xs)) = mapM_ (activations input) xs
activations _ (Pure _) = return ()
randomWeights
:: (MonadRandom m, Random a, Fractional a) => Int -> Int -> m [[a]]
randomWeights nInput nOutput_ = do
rs <- replicateM nOutput_ (getRandomRs (-1.0, 1.0))
return $ map (take nInput) rs
generateMlp :: NN ()
generateMlp = alexNet >> end
attributes :: NN a -> Attributes
attributes (Free (Node l _)) = [(toLabel . show . P) l, color Red]
attributes (Free End) = [toLabel "End", color Green]
attributes (Free (Split _)) = [toLabel "Split", color Blue]
attributes (Pure _) = []
main :: IO ()
main = do
let nn = generateMlp
activations [1, 1] nn
generate (freeFoldableGraph nn) attributes "nn"
_ <- system "open nn.png &"
return ()
|
94a21fae1518998243b93f1223e7caf408b688109bd26b0f27b98a5d037c344f | liquidz/misaki | core.clj | (ns misaki.test.core
(require
[misaki [core :refer :all]
[config :refer :all]
[tester :refer :all]]
[misaki.util.sequence :refer [find-first]]
[clojure.test :refer :all]
[clojure.java.io :as io]))
(set-base-dir! "test/files/core/")
(deftest skip-compile?-test
(testing "symbol or (symbol? (:status %)) is skip"
(are [x y] (= x y)
true (#'misaki.core/skip-compile? 'skip)
true (#'misaki.core/skip-compile? {:status 'skip})))
(testing "invalid"
(are [x y] (= x y)
false (#'misaki.core/skip-compile? 1)
false (#'misaki.core/skip-compile? "str")
false (#'misaki.core/skip-compile? {:status true})))
(testing "iregular"
; if additional options exists, return false
(are [x y] (= x y)
false (#'misaki.core/skip-compile? {:status 'skip :stop-compile? true})
false (#'misaki.core/skip-compile? {:status 'skip :all-compile? true}))))
;; call-compiler-fn
(deftest* call-compiler-fn-test
(testing "single compiler"
(bind-config [:compiler {'-extension #(list :clj)}]
(is (= [:clj] (#'misaki.core/call-compiler-fn :-extension)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :txt)}
{'-extension #(list :clj)}]]
(are [x y] (= x y)
[:txt :clj] (#'misaki.core/call-compiler-fn :-extension)
[:js] (#'misaki.core/call-compiler-fn {'-extension #(list :js)} :-extension))))
(testing "_config.clj (default and cljs compiler)"
(is (= [:clj :cljs] (#'misaki.core/call-compiler-fn :-extension)))))
;; get-watch-file-extensions
(deftest* get-watch-file-extensions-test
(testing "single compiler"
(bind-config [:compiler {'-extension #(list :clj)}]
(is (= [:clj] (get-watch-file-extensions)))))
(testing "normalized extentions"
(bind-config [:compiler {'-extension #(list "clj" "*.txt")}]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :clj)}
{'-extension #(list :txt)}]]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "multiple compiler(duplicated extention)"
(bind-config [:compiler [{'-extension #(list :clj :txt)}
{'-extension #(list :txt)}]]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "_config.clj (default and cljs compiler)"
(is (= [:clj :cljs] (get-watch-file-extensions)))))
;; get-template-files
(deftest* get-template-files-test
(testing "default template directory"
(let [tmpls (get-template-files)]
(is (find-first #(= "index.html.clj" (.getName %)) tmpls))
(is (= 6 (count tmpls)))))
(testing "find from specified directory"
(let [tmpls (get-template-files :dir (:post-dir *config*))]
(is (= 3 (count tmpls)))
(is (find-first #(= "2000.01.01-foo.html.clj" (.getName %)) tmpls))
(is (find-first #(= "2011.01.01-foo.html.clj" (.getName %)) tmpls))
(is (find-first #(= "2022.02.02-bar.html.clj" (.getName %)) tmpls))))
(testing "not matched directory"
(is (empty? (get-template-files :dir "not_existing_directory"))))
(testing "all extensions"
(bind-config [:compiler {'-extension #(list :*)}]
(let [tmpls (get-template-files)]
(is (= 7 (count tmpls)))
(is (find-first #(= "favicon.ico" (.getName %)) tmpls)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :ico)}
{'-extension #(list :cljs)}]]
(let [tmpls (get-template-files)]
(is (= 2 (count tmpls)))
(is (find-first #(= "favicon.ico" (.getName %)) tmpls))
(is (find-first #(= "hello.cljs" (.getName %)) tmpls))))))
;; get-post-files
(deftest* get-post-files-test
(testing "without sort"
(bind-config [:posts-per-page nil]
(let [files (get-post-files)]
(is (= 3 (count files)))
(is (find-first #(= "2000.01.01-foo.html.clj" (.getName %)) files))
(is (find-first #(= "2011.01.01-foo.html.clj" (.getName %)) files))
(is (find-first #(= "2022.02.02-bar.html.clj" (.getName %)) files)))))
(testing "with sort"
(bind-config [:post-sort-type :date-desc
:posts-per-page nil]
(let [[a b c :as files] (get-post-files :sort? true)]
(are [x y] (= x y)
3 (count files)
"2022.02.02-bar.html.clj" (.getName a)
"2011.01.01-foo.html.clj" (.getName b)
"2000.01.01-foo.html.clj" (.getName c)))))
(testing "with posts-per-page"
(bind-config [:post-sort-type :date
:posts-per-page 1]
(binding [*page-index* 0]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2000.01.01-foo.html.clj" (.getName (first files))))))
(binding [*page-index* 1]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2011.01.01-foo.html.clj" (.getName (first files))))))
(binding [*page-index* 2]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2022.02.02-bar.html.clj" (.getName (first files))))))
(binding [*page-index* 3]
(is (zero? (count (get-post-files :sort? true)))))))
(testing "with all? option"
(is (= 3 (count (get-post-files))))
(bind-config [:posts-per-page 1]
(is (= 1 (count (get-post-files))))
(is (= 3 (count (get-post-files :all? true)))))))
;; update-config
(deftest* update-config-test
(testing "default single compiler"
(bind-config [:compiler {'-config #(merge {:foo "bar"} %)}]
(let [c (update-config)]
(are [x y] (= x y)
(base-path "public/") (:public-dir c)
"bar" (:foo c)))))
(testing "specify compiler"
(let [c (update-config {'-config #(assoc % :foo "bar")})]
(are [x y] (= x y)
(base-path "public/") (:public-dir c)
"bar" (:foo c))))
(testing "multiple compilers"
(bind-config [:compiler [{'-config #(assoc % :foo "bar")}
{'-config #(assoc % :bar "baz")}]]
(let [c (update-config)]
(are [x y] (= x y)
true (sequential? c)
2 (count c)
(base-path "public/") (:public-dir (first c))
"bar" (:foo (first c))
(base-path "public/") (:public-dir (second c))
"baz" (:bar (second c)))))))
;; process-compile-result
(deftest* process-compile-result-test
(let [filename "bar.txt"]
(testing "string result"
(is (process-compile-result "foo" filename))
(let [f (io/file (public-path filename))]
(is (.exists f))
(is (= "foo" (slurp f)))
(.delete f)))
(testing "boolean result"
(is (process-compile-result true filename))
(is (not (process-compile-result false filename)))
(let [f (io/file (str (:public-dir *config*) filename))]
(is (not (.exists f)))))
(testing "detailed result"
(are [x y] (= x y)
false (process-compile-result {} filename)
false (process-compile-result {:status false} filename)
true (process-compile-result {:status true} filename))
(is (process-compile-result
{:status true :filename "a.txt" :body "foo"} ""))
(let [f (io/file (public-path "a.txt"))]
(is (.exists f))
(is (= "foo" (slurp f)))
(.delete f))
(is (not (process-compile-result
{:status false :body "bar"} "b.txt")))
(let [f (io/file (public-path "b.txt"))]
(is (.exists f))
(is (= "bar" (slurp f)))
(.delete f)))))
;;; handleable-compiler?
(deftest handleable-compiler?-test
(let [default (load-compiler-publics "default")
copy (load-compiler-publics "copy")]
(are [x y] (= x y)
true (handleable-compiler? default (io/file "foo.clj"))
false (handleable-compiler? default (io/file "foo"))
true (handleable-compiler? copy (io/file "foo.clj"))
true (handleable-compiler? copy (io/file "foo")))))
;;; compile*
(deftest* compile*-test
(testing "single compiler"
(let [[p c] (compile* (template-file "index.html.clj"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "index.html")]
(is (.exists file))
(.delete file)))
(bind-config [:compiler [(load-compiler-publics "default")
(load-compiler-publics "copy")]]
(testing "multiple compilers: first compiler is used"
(let [[p c] (compile* (template-file "index.html.clj"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "index.html")]
(is (.exists file))
(.delete file)))
(testing "multiple compilers: second compiler is used"
(let [[p c] (compile* (template-file "favicon.ico"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "favicon.ico")]
(is (.exists file))
(.delete file))))
(testing "all skip error test"
(let [[p c] (compile* (template-file "favicon.ico"))]
(is (true? p))
(is (= 'skip c)))))
;;; call-index-compile
(deftest* call-index-compile-test
(testing "call with default config"
(bind-config [:posts-per-page 1]
(test-index-compile (template-file "index.html.clj")))
(let [p1 (public-file "index.html")
p2 (public-file "page2/index.html")
p3 (public-file "page3/index.html")]
(is (.exists p1))
(is (.exists p2))
(is (.exists p3))
(.delete p1)
(.delete p2)
(.delete p3)
(.delete (public-file "page2"))
(.delete (public-file "page3"))))
(testing "call with optional-config"
(bind-config [:posts-per-page 1
:index-template-regexp #"^pagetest"]
(test-index-compile
{:index-template-regexp (:index-template-regexp *config*)}
(template-file "pagetest.html.clj")))
(let [p1 (public-file "pagetest.html")
p2 (public-file "page2/pagetest.html")
p3 (public-file "page3/pagetest.html")]
(is (.exists p1))
(is (.exists p2))
(is (.exists p3))
(.delete p1)
(.delete p2)
(.delete p3)
(.delete (public-file "page2"))
(.delete (public-file "page3")))))
| null | https://raw.githubusercontent.com/liquidz/misaki/b8104e632058e3b3da4487513d10e666e5914ec9/test/misaki/test/core.clj | clojure | if additional options exists, return false
call-compiler-fn
get-watch-file-extensions
get-template-files
get-post-files
update-config
process-compile-result
handleable-compiler?
compile*
call-index-compile | (ns misaki.test.core
(require
[misaki [core :refer :all]
[config :refer :all]
[tester :refer :all]]
[misaki.util.sequence :refer [find-first]]
[clojure.test :refer :all]
[clojure.java.io :as io]))
(set-base-dir! "test/files/core/")
(deftest skip-compile?-test
(testing "symbol or (symbol? (:status %)) is skip"
(are [x y] (= x y)
true (#'misaki.core/skip-compile? 'skip)
true (#'misaki.core/skip-compile? {:status 'skip})))
(testing "invalid"
(are [x y] (= x y)
false (#'misaki.core/skip-compile? 1)
false (#'misaki.core/skip-compile? "str")
false (#'misaki.core/skip-compile? {:status true})))
(testing "iregular"
(are [x y] (= x y)
false (#'misaki.core/skip-compile? {:status 'skip :stop-compile? true})
false (#'misaki.core/skip-compile? {:status 'skip :all-compile? true}))))
(deftest* call-compiler-fn-test
(testing "single compiler"
(bind-config [:compiler {'-extension #(list :clj)}]
(is (= [:clj] (#'misaki.core/call-compiler-fn :-extension)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :txt)}
{'-extension #(list :clj)}]]
(are [x y] (= x y)
[:txt :clj] (#'misaki.core/call-compiler-fn :-extension)
[:js] (#'misaki.core/call-compiler-fn {'-extension #(list :js)} :-extension))))
(testing "_config.clj (default and cljs compiler)"
(is (= [:clj :cljs] (#'misaki.core/call-compiler-fn :-extension)))))
(deftest* get-watch-file-extensions-test
(testing "single compiler"
(bind-config [:compiler {'-extension #(list :clj)}]
(is (= [:clj] (get-watch-file-extensions)))))
(testing "normalized extentions"
(bind-config [:compiler {'-extension #(list "clj" "*.txt")}]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :clj)}
{'-extension #(list :txt)}]]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "multiple compiler(duplicated extention)"
(bind-config [:compiler [{'-extension #(list :clj :txt)}
{'-extension #(list :txt)}]]
(is (= [:clj :txt] (get-watch-file-extensions)))))
(testing "_config.clj (default and cljs compiler)"
(is (= [:clj :cljs] (get-watch-file-extensions)))))
(deftest* get-template-files-test
(testing "default template directory"
(let [tmpls (get-template-files)]
(is (find-first #(= "index.html.clj" (.getName %)) tmpls))
(is (= 6 (count tmpls)))))
(testing "find from specified directory"
(let [tmpls (get-template-files :dir (:post-dir *config*))]
(is (= 3 (count tmpls)))
(is (find-first #(= "2000.01.01-foo.html.clj" (.getName %)) tmpls))
(is (find-first #(= "2011.01.01-foo.html.clj" (.getName %)) tmpls))
(is (find-first #(= "2022.02.02-bar.html.clj" (.getName %)) tmpls))))
(testing "not matched directory"
(is (empty? (get-template-files :dir "not_existing_directory"))))
(testing "all extensions"
(bind-config [:compiler {'-extension #(list :*)}]
(let [tmpls (get-template-files)]
(is (= 7 (count tmpls)))
(is (find-first #(= "favicon.ico" (.getName %)) tmpls)))))
(testing "multiple compiler"
(bind-config [:compiler [{'-extension #(list :ico)}
{'-extension #(list :cljs)}]]
(let [tmpls (get-template-files)]
(is (= 2 (count tmpls)))
(is (find-first #(= "favicon.ico" (.getName %)) tmpls))
(is (find-first #(= "hello.cljs" (.getName %)) tmpls))))))
(deftest* get-post-files-test
(testing "without sort"
(bind-config [:posts-per-page nil]
(let [files (get-post-files)]
(is (= 3 (count files)))
(is (find-first #(= "2000.01.01-foo.html.clj" (.getName %)) files))
(is (find-first #(= "2011.01.01-foo.html.clj" (.getName %)) files))
(is (find-first #(= "2022.02.02-bar.html.clj" (.getName %)) files)))))
(testing "with sort"
(bind-config [:post-sort-type :date-desc
:posts-per-page nil]
(let [[a b c :as files] (get-post-files :sort? true)]
(are [x y] (= x y)
3 (count files)
"2022.02.02-bar.html.clj" (.getName a)
"2011.01.01-foo.html.clj" (.getName b)
"2000.01.01-foo.html.clj" (.getName c)))))
(testing "with posts-per-page"
(bind-config [:post-sort-type :date
:posts-per-page 1]
(binding [*page-index* 0]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2000.01.01-foo.html.clj" (.getName (first files))))))
(binding [*page-index* 1]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2011.01.01-foo.html.clj" (.getName (first files))))))
(binding [*page-index* 2]
(let [files (get-post-files :sort? true)]
(is (= 1 (count files)))
(is (= "2022.02.02-bar.html.clj" (.getName (first files))))))
(binding [*page-index* 3]
(is (zero? (count (get-post-files :sort? true)))))))
(testing "with all? option"
(is (= 3 (count (get-post-files))))
(bind-config [:posts-per-page 1]
(is (= 1 (count (get-post-files))))
(is (= 3 (count (get-post-files :all? true)))))))
(deftest* update-config-test
(testing "default single compiler"
(bind-config [:compiler {'-config #(merge {:foo "bar"} %)}]
(let [c (update-config)]
(are [x y] (= x y)
(base-path "public/") (:public-dir c)
"bar" (:foo c)))))
(testing "specify compiler"
(let [c (update-config {'-config #(assoc % :foo "bar")})]
(are [x y] (= x y)
(base-path "public/") (:public-dir c)
"bar" (:foo c))))
(testing "multiple compilers"
(bind-config [:compiler [{'-config #(assoc % :foo "bar")}
{'-config #(assoc % :bar "baz")}]]
(let [c (update-config)]
(are [x y] (= x y)
true (sequential? c)
2 (count c)
(base-path "public/") (:public-dir (first c))
"bar" (:foo (first c))
(base-path "public/") (:public-dir (second c))
"baz" (:bar (second c)))))))
(deftest* process-compile-result-test
(let [filename "bar.txt"]
(testing "string result"
(is (process-compile-result "foo" filename))
(let [f (io/file (public-path filename))]
(is (.exists f))
(is (= "foo" (slurp f)))
(.delete f)))
(testing "boolean result"
(is (process-compile-result true filename))
(is (not (process-compile-result false filename)))
(let [f (io/file (str (:public-dir *config*) filename))]
(is (not (.exists f)))))
(testing "detailed result"
(are [x y] (= x y)
false (process-compile-result {} filename)
false (process-compile-result {:status false} filename)
true (process-compile-result {:status true} filename))
(is (process-compile-result
{:status true :filename "a.txt" :body "foo"} ""))
(let [f (io/file (public-path "a.txt"))]
(is (.exists f))
(is (= "foo" (slurp f)))
(.delete f))
(is (not (process-compile-result
{:status false :body "bar"} "b.txt")))
(let [f (io/file (public-path "b.txt"))]
(is (.exists f))
(is (= "bar" (slurp f)))
(.delete f)))))
(deftest handleable-compiler?-test
(let [default (load-compiler-publics "default")
copy (load-compiler-publics "copy")]
(are [x y] (= x y)
true (handleable-compiler? default (io/file "foo.clj"))
false (handleable-compiler? default (io/file "foo"))
true (handleable-compiler? copy (io/file "foo.clj"))
true (handleable-compiler? copy (io/file "foo")))))
(deftest* compile*-test
(testing "single compiler"
(let [[p c] (compile* (template-file "index.html.clj"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "index.html")]
(is (.exists file))
(.delete file)))
(bind-config [:compiler [(load-compiler-publics "default")
(load-compiler-publics "copy")]]
(testing "multiple compilers: first compiler is used"
(let [[p c] (compile* (template-file "index.html.clj"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "index.html")]
(is (.exists file))
(.delete file)))
(testing "multiple compilers: second compiler is used"
(let [[p c] (compile* (template-file "favicon.ico"))]
(is (not (false? p)))
(is (not (false? c))))
(let [file (public-file "favicon.ico")]
(is (.exists file))
(.delete file))))
(testing "all skip error test"
(let [[p c] (compile* (template-file "favicon.ico"))]
(is (true? p))
(is (= 'skip c)))))
(deftest* call-index-compile-test
(testing "call with default config"
(bind-config [:posts-per-page 1]
(test-index-compile (template-file "index.html.clj")))
(let [p1 (public-file "index.html")
p2 (public-file "page2/index.html")
p3 (public-file "page3/index.html")]
(is (.exists p1))
(is (.exists p2))
(is (.exists p3))
(.delete p1)
(.delete p2)
(.delete p3)
(.delete (public-file "page2"))
(.delete (public-file "page3"))))
(testing "call with optional-config"
(bind-config [:posts-per-page 1
:index-template-regexp #"^pagetest"]
(test-index-compile
{:index-template-regexp (:index-template-regexp *config*)}
(template-file "pagetest.html.clj")))
(let [p1 (public-file "pagetest.html")
p2 (public-file "page2/pagetest.html")
p3 (public-file "page3/pagetest.html")]
(is (.exists p1))
(is (.exists p2))
(is (.exists p3))
(.delete p1)
(.delete p2)
(.delete p3)
(.delete (public-file "page2"))
(.delete (public-file "page3")))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.