_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
ee18ea2bae9bfa64ea90fc6034b0be3db46e19364bfa7df62266f867ce34152b | oshyshko/adventofcode | D06.hs | # LANGUAGE DefaultSignatures , ,
DerivingStrategies , StandaloneDeriving #
DerivingStrategies, StandaloneDeriving #-}
module Y15.D06 where
import qualified Data.Array.Base as AB
import Data.Array.IO (IOUArray)
import Data.Array.MArray (MArray)
import qualified Data.HashMap.Strict as MH
import qualified Data.IntMap.Strict as MI
import qualified Data.Map.Strict as MS
import qualified Data.Vector.Generic.Mutable as VM
import qualified Data.Vector.Mutable as VBM
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Vector.Unboxed.Mutable as VUM
import Imports
import Parser
MA - Data . Array . IO.IOUArray
MB - Data . Vector . Mutable ( boxed )
MS - Data . Vector . Storable . Mutable
MU - Data . Vector . . Mutable
MZ - Data . Vector . . Mutable + Data . Bit
PH - Data . . Strict
PI - Data . IntMap . Strict
PS - Data . Map . Strict
type Side = Int
type Brightness = Word16
data Op = On | Off | Toggle deriving Show
data XY = XY Side Side deriving Show
data Command = Command
{ op :: Op
, xy0 :: XY
, xy1 :: XY
} deriving Show
TODO find a way to migrate L1 / L2 to a newtype + have MArray instances
type L1 = Bool
type L1B = Bit
type L2 = Word16
TODO determine sides from input ?
side = 1000
turn off 199,133 through 461,193
toggle 322,558 through 977,958
turn on 226,196 through 599,390
commands :: Parser [Command]
commands =
command `endBy` eol
where
command :: Parser Command
command = Command
<$> op <* char ' '
<*> xy
<* string " through " -- TODO many1 space ...
<*> xy
xy :: Parser XY
xy = XY <$> natural <* char ',' <*> natural
op :: Parser Op
op = try (string "turn on") $> On
<|> try (string "turn off") $> Off
<|> string "toggle" $> Toggle
class Light a where
initial :: a
brightness :: a -> Brightness
operate :: Op -> a -> a
instance Light L1 where
initial = False
brightness = bool 0 1
operate = \case
On -> const True
Off -> const False
Toggle -> not
instance Light L1B where
initial = Bit False
brightness = bool 0 1 . unBit
operate = \case
On -> const (Bit True)
Off -> const (Bit False)
Toggle -> Bit . not . unBit
instance Light L2 where
initial = 0
brightness = id
operate = \case
On -> (+1)
Off -> \v -> bool (v-1) 0 (v==0)
Toggle -> (+2)
note : otherwise it allocates 1.7 GB for arr / vec
command2indexes :: Command -> [Side]
command2indexes (Command _ (XY x0 y0) (XY x1 y1)) =
[ x * side + y
| x <- [x0..x1]
, y <- [y0..y1]
]
class StorageMonadic s v m where
emptySM :: Side -> v -> m s
alterSM :: s -> (v -> v) -> Side -> m ()
foldlSM :: (a -> v -> a) -> a -> s -> m a
NOTE see 3 instances below
default emptySM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => Side -> v -> m s
emptySM = VM.replicate
default alterSM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => s -> (v -> v) -> Side -> m ()
alterSM = VM.modify
default foldlSM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => (a -> v -> a) -> a -> s -> m a
foldlSM = VM.foldl'
class StoragePure s v where
emptySP :: s
alterSP :: (v -> v) -> Side -> s -> s
foldlSP :: (a -> v -> a) -> a -> s -> a
# INLINE solvePure #
# ANN solvePure ( " HLint : ignore Avoid lambda " : : String ) #
solvePure :: forall s v. (Light v, StoragePure s v) => String -> Int
solvePure =
foldlSP @s @v (\a v -> a + fromIntegral (brightness v)) 0 -- fold
. foldl' applyCommand (emptySP @s @v) -- create, iterate + alter
. parseOrDie commands
where
applyCommand :: s -> Command -> s
applyCommand s c@Command{op} =
foldl' (\a v -> alterSP @s @v (operate op) v a) s (command2indexes c)
# INLINE solveMonadic #
# ANN solveMonadic ( " HLint : ignore Avoid lambda " : : String ) #
solveMonadic :: forall s v m. (Light v, Monad m, StorageMonadic s v m) => String -> m Int
solveMonadic input = do
s <- emptySM @s @v (side * side) initial -- create
forM_ (parseOrDie commands input) (applyCommand s) -- iterate + alter
foldlSM @s @v (\a v -> a + fromIntegral (brightness v)) 0 s -- fold
where
applyCommand :: s -> Command -> m ()
applyCommand s c@Command{op}=
forM_ (command2indexes c)
(\k -> alterSM @s @v s (operate op) k)
-- solutions
solve1PH, solve2PH
, solve1PS, solve2PS
, solve1PI, solve2PI
:: String -> Int
solve1, solve2
, solve1MA, solve2MA
, solve1MB, solve2MB
, solve1MS, solve2MS
, solve1MU, solve2MU
, zolve1MZ
:: String -> IO Int
solve1 = solve1MU -- best performance
solve2 = solve2MU
solve1PH = solvePure @(MH.HashMap Side L1) @L1
solve2PH = solvePure @(MH.HashMap Side L2) @L2
solve1PS = solvePure @(MS.Map Side L1) @L1
solve2PS = solvePure @(MS.Map Side L2) @L2
solve1PI = solvePure @(MI.IntMap L1) @L1
solve2PI = solvePure @(MI.IntMap L2) @L2
solve1MA = solveMonadic @(IOUArray Side L1) @L1
solve2MA = solveMonadic @(IOUArray Side L2) @L2
solve1MB = solveMonadic @(VBM.MVector (PrimState IO) L1) @L1 @IO
solve2MB = solveMonadic @(VBM.MVector (PrimState IO) L2) @L2 @IO
solve1MS = solveMonadic @(VSM.MVector (PrimState IO) L1) @L1 @IO
solve2MS = solveMonadic @(VSM.MVector (PrimState IO) L2) @L2 @IO
solve1MU = solveMonadic @(VUM.MVector (PrimState IO) L1) @L1 @IO
solve2MU = solveMonadic @(VUM.MVector (PrimState IO) L2) @L2 @IO
TODO make it possible in MainExe to run one solver ( day 1 or 2 )
zolve1MZ = solveMonadic @(VUM.MVector (PrimState IO) L1B) @L1B @IO
-- monadic instances
instance (MArray IOUArray v m, k ~ Side) => StorageMonadic (IOUArray k v) v m where
emptySM k = AB.newArray (0, k - 1)
alterSM s f k = AB.unsafeRead s k >>= AB.unsafeWrite s k . f
foldlSM f a s = AB.getNumElements s >>= \n -> go a (n-1)
where
go !aa 0 = return aa
go !aa i = do
x <- AB.unsafeRead s i
go (f aa x) (i-1)
TODO figure hout how this works . See 3 defaults in StorageMonadic + language extensions at the top
deriving anyclass instance (PrimMonad m, s ~ PrimState m) => StorageMonadic (VBM.MVector s v) v m
deriving anyclass instance (PrimMonad m, s ~ PrimState m, VSM.Storable v) => StorageMonadic (VSM.MVector s v) v m
deriving anyclass instance (PrimMonad m, s ~ PrimState m, VUM.Unbox v) => StorageMonadic (VUM.MVector s v) v m
-- pure instances
instance (Light v) => StoragePure (MS.Map Side v) v where
emptySP = MS.empty; alterSP = mkAlter MS.alter; foldlSP = MS.foldl'
TODO find out why is slower than Map
-- also see -perf/dictionaries
instance (Light v) => StoragePure (MH.HashMap Side v) v where
emptySP = MH.empty; alterSP = mkAlter MH.alter; foldlSP = MH.foldl'
instance (Light v) => StoragePure (MI.IntMap v) v where
emptySP = MI.empty; alterSP = mkAlter MI.alter; foldlSP = MI.foldl'
# INLINE mkAlter #
mkAlter :: forall k v m. (Light v) =>
((Maybe v -> Maybe v) -> k -> m v -> m v)
-> (v -> v) -> k -> m v -> m v
mkAlter a f = a \mv ->
let x = f $ fromMaybe initial mv
in x `seq` Just x
| null | https://raw.githubusercontent.com/oshyshko/adventofcode/fc0ce87c1dfffc30647763fa5b84ff9fcf58b8b3/src/Y15/D06.hs | haskell | TODO many1 space ...
fold
create, iterate + alter
create
iterate + alter
fold
solutions
best performance
monadic instances
pure instances
also see -perf/dictionaries | # LANGUAGE DefaultSignatures , ,
DerivingStrategies , StandaloneDeriving #
DerivingStrategies, StandaloneDeriving #-}
module Y15.D06 where
import qualified Data.Array.Base as AB
import Data.Array.IO (IOUArray)
import Data.Array.MArray (MArray)
import qualified Data.HashMap.Strict as MH
import qualified Data.IntMap.Strict as MI
import qualified Data.Map.Strict as MS
import qualified Data.Vector.Generic.Mutable as VM
import qualified Data.Vector.Mutable as VBM
import qualified Data.Vector.Storable.Mutable as VSM
import qualified Data.Vector.Unboxed.Mutable as VUM
import Imports
import Parser
MA - Data . Array . IO.IOUArray
MB - Data . Vector . Mutable ( boxed )
MS - Data . Vector . Storable . Mutable
MU - Data . Vector . . Mutable
MZ - Data . Vector . . Mutable + Data . Bit
PH - Data . . Strict
PI - Data . IntMap . Strict
PS - Data . Map . Strict
type Side = Int
type Brightness = Word16
data Op = On | Off | Toggle deriving Show
data XY = XY Side Side deriving Show
data Command = Command
{ op :: Op
, xy0 :: XY
, xy1 :: XY
} deriving Show
TODO find a way to migrate L1 / L2 to a newtype + have MArray instances
type L1 = Bool
type L1B = Bit
type L2 = Word16
TODO determine sides from input ?
side = 1000
turn off 199,133 through 461,193
toggle 322,558 through 977,958
turn on 226,196 through 599,390
commands :: Parser [Command]
commands =
command `endBy` eol
where
command :: Parser Command
command = Command
<$> op <* char ' '
<*> xy
<*> xy
xy :: Parser XY
xy = XY <$> natural <* char ',' <*> natural
op :: Parser Op
op = try (string "turn on") $> On
<|> try (string "turn off") $> Off
<|> string "toggle" $> Toggle
class Light a where
initial :: a
brightness :: a -> Brightness
operate :: Op -> a -> a
instance Light L1 where
initial = False
brightness = bool 0 1
operate = \case
On -> const True
Off -> const False
Toggle -> not
instance Light L1B where
initial = Bit False
brightness = bool 0 1 . unBit
operate = \case
On -> const (Bit True)
Off -> const (Bit False)
Toggle -> Bit . not . unBit
instance Light L2 where
initial = 0
brightness = id
operate = \case
On -> (+1)
Off -> \v -> bool (v-1) 0 (v==0)
Toggle -> (+2)
note : otherwise it allocates 1.7 GB for arr / vec
command2indexes :: Command -> [Side]
command2indexes (Command _ (XY x0 y0) (XY x1 y1)) =
[ x * side + y
| x <- [x0..x1]
, y <- [y0..y1]
]
class StorageMonadic s v m where
emptySM :: Side -> v -> m s
alterSM :: s -> (v -> v) -> Side -> m ()
foldlSM :: (a -> v -> a) -> a -> s -> m a
NOTE see 3 instances below
default emptySM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => Side -> v -> m s
emptySM = VM.replicate
default alterSM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => s -> (v -> v) -> Side -> m ()
alterSM = VM.modify
default foldlSM :: (vec x v ~ s, PrimMonad m, x ~ PrimState m, VM.MVector vec v) => (a -> v -> a) -> a -> s -> m a
foldlSM = VM.foldl'
class StoragePure s v where
emptySP :: s
alterSP :: (v -> v) -> Side -> s -> s
foldlSP :: (a -> v -> a) -> a -> s -> a
# INLINE solvePure #
# ANN solvePure ( " HLint : ignore Avoid lambda " : : String ) #
solvePure :: forall s v. (Light v, StoragePure s v) => String -> Int
solvePure =
. parseOrDie commands
where
applyCommand :: s -> Command -> s
applyCommand s c@Command{op} =
foldl' (\a v -> alterSP @s @v (operate op) v a) s (command2indexes c)
# INLINE solveMonadic #
# ANN solveMonadic ( " HLint : ignore Avoid lambda " : : String ) #
solveMonadic :: forall s v m. (Light v, Monad m, StorageMonadic s v m) => String -> m Int
solveMonadic input = do
where
applyCommand :: s -> Command -> m ()
applyCommand s c@Command{op}=
forM_ (command2indexes c)
(\k -> alterSM @s @v s (operate op) k)
solve1PH, solve2PH
, solve1PS, solve2PS
, solve1PI, solve2PI
:: String -> Int
solve1, solve2
, solve1MA, solve2MA
, solve1MB, solve2MB
, solve1MS, solve2MS
, solve1MU, solve2MU
, zolve1MZ
:: String -> IO Int
solve2 = solve2MU
solve1PH = solvePure @(MH.HashMap Side L1) @L1
solve2PH = solvePure @(MH.HashMap Side L2) @L2
solve1PS = solvePure @(MS.Map Side L1) @L1
solve2PS = solvePure @(MS.Map Side L2) @L2
solve1PI = solvePure @(MI.IntMap L1) @L1
solve2PI = solvePure @(MI.IntMap L2) @L2
solve1MA = solveMonadic @(IOUArray Side L1) @L1
solve2MA = solveMonadic @(IOUArray Side L2) @L2
solve1MB = solveMonadic @(VBM.MVector (PrimState IO) L1) @L1 @IO
solve2MB = solveMonadic @(VBM.MVector (PrimState IO) L2) @L2 @IO
solve1MS = solveMonadic @(VSM.MVector (PrimState IO) L1) @L1 @IO
solve2MS = solveMonadic @(VSM.MVector (PrimState IO) L2) @L2 @IO
solve1MU = solveMonadic @(VUM.MVector (PrimState IO) L1) @L1 @IO
solve2MU = solveMonadic @(VUM.MVector (PrimState IO) L2) @L2 @IO
TODO make it possible in MainExe to run one solver ( day 1 or 2 )
zolve1MZ = solveMonadic @(VUM.MVector (PrimState IO) L1B) @L1B @IO
instance (MArray IOUArray v m, k ~ Side) => StorageMonadic (IOUArray k v) v m where
emptySM k = AB.newArray (0, k - 1)
alterSM s f k = AB.unsafeRead s k >>= AB.unsafeWrite s k . f
foldlSM f a s = AB.getNumElements s >>= \n -> go a (n-1)
where
go !aa 0 = return aa
go !aa i = do
x <- AB.unsafeRead s i
go (f aa x) (i-1)
TODO figure hout how this works . See 3 defaults in StorageMonadic + language extensions at the top
deriving anyclass instance (PrimMonad m, s ~ PrimState m) => StorageMonadic (VBM.MVector s v) v m
deriving anyclass instance (PrimMonad m, s ~ PrimState m, VSM.Storable v) => StorageMonadic (VSM.MVector s v) v m
deriving anyclass instance (PrimMonad m, s ~ PrimState m, VUM.Unbox v) => StorageMonadic (VUM.MVector s v) v m
instance (Light v) => StoragePure (MS.Map Side v) v where
emptySP = MS.empty; alterSP = mkAlter MS.alter; foldlSP = MS.foldl'
TODO find out why is slower than Map
instance (Light v) => StoragePure (MH.HashMap Side v) v where
emptySP = MH.empty; alterSP = mkAlter MH.alter; foldlSP = MH.foldl'
instance (Light v) => StoragePure (MI.IntMap v) v where
emptySP = MI.empty; alterSP = mkAlter MI.alter; foldlSP = MI.foldl'
# INLINE mkAlter #
mkAlter :: forall k v m. (Light v) =>
((Maybe v -> Maybe v) -> k -> m v -> m v)
-> (v -> v) -> k -> m v -> m v
mkAlter a f = a \mv ->
let x = f $ fromMaybe initial mv
in x `seq` Just x
|
5878b9cd6d08bf816f5008c7fe25306bae67093dfa25326afb65153b06ae178c | ulisses/Static-Code-Analyzer | Main.hs | {-# OPTIONS -XNoMonomorphismRestriction #-}
-- example folder: /Users/ulissesaraujocosta/ulisses/univ/msc/el/pi/Static-Code-Analyzer/sample_app/data/concursos/contest-1/en-1/user-1/tent-20110303163050/
module Main where
import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Gtk
import Graphics.Rendering.Chart.Plot
import Data.Colour
import Data.Colour.Names
import Data.Accessor
import System.Random
import System.Environment(getArgs)
import ExtractValues
main = getArgs >>= ren
ren _ = renderableToPNGFile (toRenderable l) 800 600 "out.png"
l = layout1_title ^="Price History"
$ layout1_background ^= solidFillStyle (opaque white)
$ layout1_left_axis ^: laxis_override ^= axisTicksHide
$ layout1_plots ^= [ Left (toPlot f) ]
$ setLayout1Foreground (opaque black)
$ defaultLayout1
f = area_spots_4d_title ^= "random value"
$ area_spots_4d_max_radius ^= 20
$ area_spots_4d_values ^= values
$ defaultAreaSpots4D
values = [ (d, v, z, t) | ((d,v,z),t) <- zip vals colours ]
where colours :: [Int]
colours = [1..]
| null | https://raw.githubusercontent.com/ulisses/Static-Code-Analyzer/4c3f6423d43e1bccb9d1cf04e74ae60d9170186f/Haskell/graphForContest/Main.hs | haskell | # OPTIONS -XNoMonomorphismRestriction #
example folder: /Users/ulissesaraujocosta/ulisses/univ/msc/el/pi/Static-Code-Analyzer/sample_app/data/concursos/contest-1/en-1/user-1/tent-20110303163050/ |
module Main where
import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Gtk
import Graphics.Rendering.Chart.Plot
import Data.Colour
import Data.Colour.Names
import Data.Accessor
import System.Random
import System.Environment(getArgs)
import ExtractValues
main = getArgs >>= ren
ren _ = renderableToPNGFile (toRenderable l) 800 600 "out.png"
l = layout1_title ^="Price History"
$ layout1_background ^= solidFillStyle (opaque white)
$ layout1_left_axis ^: laxis_override ^= axisTicksHide
$ layout1_plots ^= [ Left (toPlot f) ]
$ setLayout1Foreground (opaque black)
$ defaultLayout1
f = area_spots_4d_title ^= "random value"
$ area_spots_4d_max_radius ^= 20
$ area_spots_4d_values ^= values
$ defaultAreaSpots4D
values = [ (d, v, z, t) | ((d,v,z),t) <- zip vals colours ]
where colours :: [Int]
colours = [1..]
|
742eac9f3faafdbe80f83fe3f5b39baa6bc7eeca8cff9d1ed39ea5cc2e77d912 | onedata/op-worker | atm_list_store_content_browse_options.erl | %%%-------------------------------------------------------------------
@author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% Record expressing store content browse options specialization for
%%% list store used in automation machinery.
%%% @end
%%%-------------------------------------------------------------------
-module(atm_list_store_content_browse_options).
-author("Bartosz Walkowicz").
-behaviour(atm_store_content_browse_options).
-include("modules/automation/atm_execution.hrl").
%% API
-export([sanitize/1]).
-type record() :: #atm_list_store_content_browse_options{}.
-export_type([record/0]).
%%%===================================================================
%%% API
%%%===================================================================
-spec sanitize(json_utils:json_map()) -> record() | no_return().
sanitize(#{<<"type">> := <<"listStoreContentBrowseOptions">>} = Data) ->
#atm_list_store_content_browse_options{
listing_opts = atm_store_container_infinite_log_backend:sanitize_listing_opts(
Data, timestamp_agnostic
)
}.
| null | https://raw.githubusercontent.com/onedata/op-worker/2db1516da782d8acc6bdd2418a3791819ff19581/src/modules/automation/store/container/list/atm_list_store_content_browse_options.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
@doc
Record expressing store content browse options specialization for
list store used in automation machinery.
@end
-------------------------------------------------------------------
API
===================================================================
API
=================================================================== | @author
( C ) 2022 ACK CYFRONET AGH
This software is released under the MIT license
cited in ' LICENSE.txt ' .
-module(atm_list_store_content_browse_options).
-author("Bartosz Walkowicz").
-behaviour(atm_store_content_browse_options).
-include("modules/automation/atm_execution.hrl").
-export([sanitize/1]).
-type record() :: #atm_list_store_content_browse_options{}.
-export_type([record/0]).
-spec sanitize(json_utils:json_map()) -> record() | no_return().
sanitize(#{<<"type">> := <<"listStoreContentBrowseOptions">>} = Data) ->
#atm_list_store_content_browse_options{
listing_opts = atm_store_container_infinite_log_backend:sanitize_listing_opts(
Data, timestamp_agnostic
)
}.
|
22ebbbb120394a6924faf4e686ed6b95ebe69b4b88347f28e5a2c7d40c1e2faf | souenzzo/souenzzo.github.io | hiete.clj | (ns br.com.souenzzo.hiete
(:require [io.pedestal.http.route :as route]
[br.com.souenzzo.dvm :as dvm]
[io.pedestal.http.csrf :as csrf]
[ring.util.mime-type :as mime]
[clojure.string :as string])
(:import (java.nio.charset StandardCharsets)))
(def ^String utf-8 (str (StandardCharsets/UTF_8)))
(set! *warn-on-reflection* true)
(def ^:dynamic *route* nil)
(defn href
[route-name & opts]
(apply route/url-for route-name opts))
(defn mutation
[{::csrf/keys [anti-forgery-token]} sym]
(prn anti-forgery-token)
{:method "POST"
:action (href :conduit.api/mutation
:params {:sym sym
(keyword csrf/anti-forgery-token-str) anti-forgery-token})})
(defn read-token
[{:keys [query-params]}]
(prn query-params)
(get query-params (keyword csrf/anti-forgery-token-str)))
(def render-hiccup
{:name ::render-hiccup
:enter (fn [{:keys [route]
:as ctx}]
(assoc-in ctx [:bindings #'*route*] route))
:leave (fn [{:keys [response request]
:as ctx}]
(if-let [body (:html response)]
(-> ctx
(assoc-in [:response :body] (->> (dvm/render-to-string request body)
(conj ["<!DOCTYPE html>"])
(string/join "\n")))
(assoc-in [:response :headers "Content-Type"] (mime/default-mime-types "html")))
ctx))})
| null | https://raw.githubusercontent.com/souenzzo/souenzzo.github.io/30a811c4e5633ad07bba1d58d19eb091dac222e9/conduit/src/br/com/souenzzo/hiete.clj | clojure | (ns br.com.souenzzo.hiete
(:require [io.pedestal.http.route :as route]
[br.com.souenzzo.dvm :as dvm]
[io.pedestal.http.csrf :as csrf]
[ring.util.mime-type :as mime]
[clojure.string :as string])
(:import (java.nio.charset StandardCharsets)))
(def ^String utf-8 (str (StandardCharsets/UTF_8)))
(set! *warn-on-reflection* true)
(def ^:dynamic *route* nil)
(defn href
[route-name & opts]
(apply route/url-for route-name opts))
(defn mutation
[{::csrf/keys [anti-forgery-token]} sym]
(prn anti-forgery-token)
{:method "POST"
:action (href :conduit.api/mutation
:params {:sym sym
(keyword csrf/anti-forgery-token-str) anti-forgery-token})})
(defn read-token
[{:keys [query-params]}]
(prn query-params)
(get query-params (keyword csrf/anti-forgery-token-str)))
(def render-hiccup
{:name ::render-hiccup
:enter (fn [{:keys [route]
:as ctx}]
(assoc-in ctx [:bindings #'*route*] route))
:leave (fn [{:keys [response request]
:as ctx}]
(if-let [body (:html response)]
(-> ctx
(assoc-in [:response :body] (->> (dvm/render-to-string request body)
(conj ["<!DOCTYPE html>"])
(string/join "\n")))
(assoc-in [:response :headers "Content-Type"] (mime/default-mime-types "html")))
ctx))})
|
|
023ec0a7c06bce48e98924420b905069ec64072962d870ce721cbfb7348c57e3 | afiskon/simple-neural-networks | MainXorLogistic.hs | import AI.NeuralNetworks.Simple
import Text.Printf
import System.Random
import Control.Monad
calcXor net x y =
let [r] = runNeuralNetwork net [x, y]
in r
mse net =
let square x = x * x
e1 = square $ calcXor net 0 0
e2 = square $ calcXor net 1 0 - 1
e3 = square $ calcXor net 0 1 - 1
e4 = square $ calcXor net 1 1
in 0.5 * (e1 + e2 + e3 + e4)
stopf best gnum = do
let e = mse best
when (gnum `rem` 100 == 0) $
printf "Generation: %02d, MSE: %.4f\n" gnum e
return $ e < 0.002 || gnum >= 10000
main = do
gen <- newStdGen
let (randomNet, _) = randomNeuralNetwork gen [2,2,1] [Logistic, Logistic] 0.45
examples = [ ([0,0],[0]), ([0,1],[1]), ([1,0],[1]), ([1,1],[0]) ]
net <- backpropagationBatchParallel randomNet examples 0.4 stopf :: IO (NeuralNetwork Double)
putStrLn ""
putStrLn $ "Result: " ++ show net
_ <- printf "0 xor 0 = %.4f\n" (calcXor net 0 0)
_ <- printf "1 xor 0 = %.4f\n" (calcXor net 1 0)
_ <- printf "0 xor 1 = %.4f\n" (calcXor net 0 1)
printf "1 xor 1 = %.4f" (calcXor net 1 1)
| null | https://raw.githubusercontent.com/afiskon/simple-neural-networks/04856f5b47de170017405eadc0b71504b296dbaf/src/MainXorLogistic.hs | haskell | import AI.NeuralNetworks.Simple
import Text.Printf
import System.Random
import Control.Monad
calcXor net x y =
let [r] = runNeuralNetwork net [x, y]
in r
mse net =
let square x = x * x
e1 = square $ calcXor net 0 0
e2 = square $ calcXor net 1 0 - 1
e3 = square $ calcXor net 0 1 - 1
e4 = square $ calcXor net 1 1
in 0.5 * (e1 + e2 + e3 + e4)
stopf best gnum = do
let e = mse best
when (gnum `rem` 100 == 0) $
printf "Generation: %02d, MSE: %.4f\n" gnum e
return $ e < 0.002 || gnum >= 10000
main = do
gen <- newStdGen
let (randomNet, _) = randomNeuralNetwork gen [2,2,1] [Logistic, Logistic] 0.45
examples = [ ([0,0],[0]), ([0,1],[1]), ([1,0],[1]), ([1,1],[0]) ]
net <- backpropagationBatchParallel randomNet examples 0.4 stopf :: IO (NeuralNetwork Double)
putStrLn ""
putStrLn $ "Result: " ++ show net
_ <- printf "0 xor 0 = %.4f\n" (calcXor net 0 0)
_ <- printf "1 xor 0 = %.4f\n" (calcXor net 1 0)
_ <- printf "0 xor 1 = %.4f\n" (calcXor net 0 1)
printf "1 xor 1 = %.4f" (calcXor net 1 1)
|
|
47871d8415bfc7253c6423b4e608f71c10248c1956bcfce539351c62ceeccf39 | mythical-linux/rktfetch | user.rkt | #!/usr/bin/env racket
#lang racket/base
(provide get-user)
(define (get-user)
(or (getenv "USER")
(getenv "USERNAME")
"nobody"
)
)
| null | https://raw.githubusercontent.com/mythical-linux/rktfetch/1f635e47274ff50d59d04d88a6509ee7170f4e33/rktfetch/private/get/user.rkt | racket | #!/usr/bin/env racket
#lang racket/base
(provide get-user)
(define (get-user)
(or (getenv "USER")
(getenv "USERNAME")
"nobody"
)
)
|
|
f664d7bce7953fb96c3bc53a8f1d86173bdd03f962a2b5b84d32bbc122100696 | paurkedal/viz | ocaml_unicode.mli | Copyright ( C ) 2011 - -2016 Petter A. Urkedal < >
*
* This file is part of the Viz Standard Library < / > .
*
* The Viz Standard Library ( VSL ) is free software : you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* The VSL is distributed in the hope that it will be useful , but WITHOUT ANY
* WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public License for
* more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with the VSL . If not , see < / > .
*
* This file is part of the Viz Standard Library </>.
*
* The Viz Standard Library (VSL) is free software: you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* The VSL is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the VSL. If not, see </>.
*)
open Ocaml_prereq
open CamomileLibraryDefault.Camomile
module Pervasive : sig
type char = UChar.t
type utf8_string = string
type string = UText.t
val __char_of_code : int -> char
val __char_code : char -> int
val __string_of_utf8 : utf8_string -> string
end
open Pervasive
module Char_ : sig
val of_int : int -> char
val as_int : char -> int
end
module String_ : sig
val length : string -> int
val get : int -> string -> char
val init : int -> (int -> char) -> string
val eq : string -> string -> bool
val cmp : string -> string -> torder
val of_utf8 : utf8_string -> string
val as_utf8 : string -> utf8_string
end
module String_buf : sig
type 'f r
val create : ('f, 'f r) effect
val contents : 'f r -> ('f, string) effect
val length : 'f r -> ('f, int) effect
val clear : 'f r -> ('f, unit) effect
val put_char : 'f r -> char -> ('f, unit) effect
val put_string : 'f r -> string -> ('f, unit) effect
end
module Utf8_string : sig
val of_string : string -> utf8_string
val as_string : utf8_string -> string
val length : utf8_string -> int
end
| null | https://raw.githubusercontent.com/paurkedal/viz/ab1f1071fafdc51eae69185ec55d7a6e7bb94ea9/vsl/compat/ocaml_unicode.mli | ocaml | Copyright ( C ) 2011 - -2016 Petter A. Urkedal < >
*
* This file is part of the Viz Standard Library < / > .
*
* The Viz Standard Library ( VSL ) is free software : you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation , either version 3 of the
* License , or ( at your option ) any later version .
*
* The VSL is distributed in the hope that it will be useful , but WITHOUT ANY
* WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public License for
* more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with the VSL . If not , see < / > .
*
* This file is part of the Viz Standard Library </>.
*
* The Viz Standard Library (VSL) is free software: you can redistribute it
* and/or modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* The VSL is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the VSL. If not, see </>.
*)
open Ocaml_prereq
open CamomileLibraryDefault.Camomile
module Pervasive : sig
type char = UChar.t
type utf8_string = string
type string = UText.t
val __char_of_code : int -> char
val __char_code : char -> int
val __string_of_utf8 : utf8_string -> string
end
open Pervasive
module Char_ : sig
val of_int : int -> char
val as_int : char -> int
end
module String_ : sig
val length : string -> int
val get : int -> string -> char
val init : int -> (int -> char) -> string
val eq : string -> string -> bool
val cmp : string -> string -> torder
val of_utf8 : utf8_string -> string
val as_utf8 : string -> utf8_string
end
module String_buf : sig
type 'f r
val create : ('f, 'f r) effect
val contents : 'f r -> ('f, string) effect
val length : 'f r -> ('f, int) effect
val clear : 'f r -> ('f, unit) effect
val put_char : 'f r -> char -> ('f, unit) effect
val put_string : 'f r -> string -> ('f, unit) effect
end
module Utf8_string : sig
val of_string : string -> utf8_string
val as_string : utf8_string -> string
val length : utf8_string -> int
end
|
|
92962531bd463e5e40181d1b0117e85ba982cf3c17f4a3390c71c11a8115609e | racket/frtime | frtime.rkt | #lang s-exp "lang-utils.rkt"
(provide value-nowable? behaviorof
(all-from-out "lang-ext.rkt")
(all-from-out "lang-utils.rkt")
(all-from-out "frp-snip.rkt"))
(require "frp-snip.rkt"
(as-is:unchecked (except-in frtime/core/frp undefined undefined?)
event-set? signal-value)
(except-in "lang-ext.rkt" lift deep-value-now))
(define (value-nowable? x)
(or (not (signal? x))
(not (event-set? (signal-value x)))))
(define ((behaviorof pred) x)
(let ([v (value-now x)])
(or (undefined? v)
(pred v))))
| null | https://raw.githubusercontent.com/racket/frtime/9b9db67581107f4d7b995541c70f2d08f03ae89e/frtime.rkt | racket | #lang s-exp "lang-utils.rkt"
(provide value-nowable? behaviorof
(all-from-out "lang-ext.rkt")
(all-from-out "lang-utils.rkt")
(all-from-out "frp-snip.rkt"))
(require "frp-snip.rkt"
(as-is:unchecked (except-in frtime/core/frp undefined undefined?)
event-set? signal-value)
(except-in "lang-ext.rkt" lift deep-value-now))
(define (value-nowable? x)
(or (not (signal? x))
(not (event-set? (signal-value x)))))
(define ((behaviorof pred) x)
(let ([v (value-now x)])
(or (undefined? v)
(pred v))))
|
|
56e69709eb0da90b7da2b4c11586a3a247345180a68f8b679fb3803382f487f9 | joinr/spork | math.clj | ;;TODO->port this to core.matrix or something official.
;;Note: hollowed out, most stuff is in vecmath.
;I've been getting some crazy ideas from computational geometry....namely the
ability to unify 2d and 3d ops under a single set of rules and operations ..
;It's pretty cool...In any case, this is currently a library based on the
canonical matrix - based linear algebra routines for defining transformations .
(ns spork.geometry.math
(:require [spork.util.vectors :refer :all]
[spork.util [vecmath :as math]]))
;nice macros to have...
;with-coordinate-system
;with-basis [x y z]
;with-projection
;if I want to project a vec2 [x,y] onto a vec3, then...
(declare make-matrix)
(defprotocol IDimensioned
(get-dimension [x])
(drop-dimension [x])
(add-dimension [x]))
(defn make-vector
"Creates 2D and 3D vectors."
([^double x ^double y] (->vec2 x y))
([^double x ^double y ^double z] (->vec3 x y z))
([^double x ^double y ^double z ^double w] (->vec4 x y z w)))
(defn ^double negate [^double x] (* -1.0 x))
;;if we have homogenous coordinates, our basis changes.
;an orthonormal set of basis vectors.
(def x-axis (->vec4 1.0 0.0 0.0 0.0))
(def y-axis (->vec4 0.0 1.0 0.0 0.0))
(def z-axis (->vec4 0.0 0.0 1.0 0.0))
(def w-axis (->vec4 0.0 0.0 0.0 1.0))
| null | https://raw.githubusercontent.com/joinr/spork/bb80eddadf90bf92745bf5315217e25a99fbf9d6/obe/geometry/math.clj | clojure | TODO->port this to core.matrix or something official.
Note: hollowed out, most stuff is in vecmath.
I've been getting some crazy ideas from computational geometry....namely the
It's pretty cool...In any case, this is currently a library based on the
nice macros to have...
with-coordinate-system
with-basis [x y z]
with-projection
if I want to project a vec2 [x,y] onto a vec3, then...
if we have homogenous coordinates, our basis changes.
an orthonormal set of basis vectors. | ability to unify 2d and 3d ops under a single set of rules and operations ..
canonical matrix - based linear algebra routines for defining transformations .
(ns spork.geometry.math
(:require [spork.util.vectors :refer :all]
[spork.util [vecmath :as math]]))
(declare make-matrix)
(defprotocol IDimensioned
(get-dimension [x])
(drop-dimension [x])
(add-dimension [x]))
(defn make-vector
"Creates 2D and 3D vectors."
([^double x ^double y] (->vec2 x y))
([^double x ^double y ^double z] (->vec3 x y z))
([^double x ^double y ^double z ^double w] (->vec4 x y z w)))
(defn ^double negate [^double x] (* -1.0 x))
(def x-axis (->vec4 1.0 0.0 0.0 0.0))
(def y-axis (->vec4 0.0 1.0 0.0 0.0))
(def z-axis (->vec4 0.0 0.0 1.0 0.0))
(def w-axis (->vec4 0.0 0.0 0.0 1.0))
|
3151cb53e88e32b468a86a2dd6aa26fce3be7fcc9818fac28ae39d0948a44add | reborg/clojure-essential-reference | 5.clj | (require '[clojure.core.reducers :refer [fold]])
(require '[clojure.string :refer [blank? split split-lines lower-case]])
< 1 >
(if (blank? line)
freqs
(let [words (split (lower-case line) #"\s+")]
(reduce #(update %1 %2 (fnil inc 0)) freqs words))))
< 2 >
([] {})
([m1 m2] (merge-with + m1 m2)))
(def war-and-peace "")
(def book (slurp war-and-peace))
(defn freq-used-words [s] ; <3>
(->> (split-lines s)
(fold 512 combinef reducef)
(sort-by last >)
(take 5)))
(freq-used-words book)
( [ " the " 34258 ] [ " and " 21396 ] [ " to " 16500 ] [ " of " 14904 ] [ " a " 10388 ] ) | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/Collections/Generalpurpose/frequencies/5.clj | clojure | <3> | (require '[clojure.core.reducers :refer [fold]])
(require '[clojure.string :refer [blank? split split-lines lower-case]])
< 1 >
(if (blank? line)
freqs
(let [words (split (lower-case line) #"\s+")]
(reduce #(update %1 %2 (fnil inc 0)) freqs words))))
< 2 >
([] {})
([m1 m2] (merge-with + m1 m2)))
(def war-and-peace "")
(def book (slurp war-and-peace))
(->> (split-lines s)
(fold 512 combinef reducef)
(sort-by last >)
(take 5)))
(freq-used-words book)
( [ " the " 34258 ] [ " and " 21396 ] [ " to " 16500 ] [ " of " 14904 ] [ " a " 10388 ] ) |
0b1479af6f111dbf82b8e8461a6582755cf8bd747021d816945b9f5a06ed3e02 | skynet-gh/skylobby | data.cljc | (ns skylobby.data
(:require
[clojure.string :as string]))
(defn filter-battles
[battles {:keys [filter-battles
hide-empty-battles
hide-locked-battles
hide-passworded-battles
hide-running-battles
users]}]
(let [
filter-battles (when (string? filter-battles)
filter-battles)
filter-lc (when-not (string/blank? filter-battles)
(string/lower-case filter-battles))]
(->> battles
vals
(filter :battle-title)
(filter
(fn [{:keys [battle-map battle-modname battle-title]}]
(if filter-lc
(or (and battle-map (string/includes? (string/lower-case battle-map) filter-lc))
(and battle-modname (string/includes? (string/lower-case battle-modname) filter-lc))
(string/includes? (string/lower-case battle-title) filter-lc))
true)))
(remove
(fn [{:keys [battle-passworded]}]
(if hide-passworded-battles
(= "1" battle-passworded)
false)))
(remove
(fn [{:keys [battle-locked]}]
(if hide-locked-battles
(= "1" battle-locked)
false)))
(remove
(fn [{:keys [users]}]
(if hide-empty-battles
TODO bot vs human hosts
false)))
(remove
(fn [{:keys [host-username]}]
(if hide-running-battles
(boolean (get-in users [host-username :client-status :ingame]))
false)))
(sort-by (juxt (comp count :users) :battle-spectators))
reverse
doall)))
| null | https://raw.githubusercontent.com/skynet-gh/skylobby/1d6b5f222e138512f8c974f86b97eada3e579508/graal/cljc/skylobby/data.cljc | clojure | (ns skylobby.data
(:require
[clojure.string :as string]))
(defn filter-battles
[battles {:keys [filter-battles
hide-empty-battles
hide-locked-battles
hide-passworded-battles
hide-running-battles
users]}]
(let [
filter-battles (when (string? filter-battles)
filter-battles)
filter-lc (when-not (string/blank? filter-battles)
(string/lower-case filter-battles))]
(->> battles
vals
(filter :battle-title)
(filter
(fn [{:keys [battle-map battle-modname battle-title]}]
(if filter-lc
(or (and battle-map (string/includes? (string/lower-case battle-map) filter-lc))
(and battle-modname (string/includes? (string/lower-case battle-modname) filter-lc))
(string/includes? (string/lower-case battle-title) filter-lc))
true)))
(remove
(fn [{:keys [battle-passworded]}]
(if hide-passworded-battles
(= "1" battle-passworded)
false)))
(remove
(fn [{:keys [battle-locked]}]
(if hide-locked-battles
(= "1" battle-locked)
false)))
(remove
(fn [{:keys [users]}]
(if hide-empty-battles
TODO bot vs human hosts
false)))
(remove
(fn [{:keys [host-username]}]
(if hide-running-battles
(boolean (get-in users [host-username :client-status :ingame]))
false)))
(sort-by (juxt (comp count :users) :battle-spectators))
reverse
doall)))
|
|
d1ea3fcae908d2263ad8e07a55ea8988a81913e387d2938b19ad9012403d7c04 | dabrady/LittleLogicLangs | mk-stx.rkt | (module mk-stx racket
(require syntax/parse)
(provide
symbol
goal-cons
goal-expr
goal-seq
relation
builtin-relation?
relation-id?
valid-goal-cons?)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; List of built-in relations. Put any identifier here that you wish to sneak by the macro system.
(define BUILTIN-RELATIONS '(quote quasiquote fresh conde run run* == =/= disj+ conj+))
(define builtin-relation?
(lambda (id)
(memq id BUILTIN-RELATIONS)))
(define relation-id?
(lambda (id)
(let* ([id-string (symbol->string id)]
[last (string-ref id-string (sub1 (string-length id-string)))])
(char=? last #\o)))) ; Relations conventionally end in -o, and we are enforcing that condition
(define valid-goal-cons?
(lambda (id)
(or (builtin-relation? id)
(relation-id? id))))
I do n't know why this is n't already a thing . They 've got one for all the other primitives .
(define-syntax-class symbol
#:description "a symbol"
#:datum-literals (quote)
(pattern (quote x:id)))
; (define-syntax-class proc
; #:description "a procedure"
; (pattern x:expr
; #:fail-unless (procedure? x)))
; (define-syntax-class literal-expr
; #:description "an expression that evaluates to a non-function literal"
; (pattern (~or x:boolean x:str x:char x:number x:symbol x:proc)))
(define-syntax-class goal-cons
#:description "a goal constructor"
(pattern proc:id
#:fail-unless (valid-goal-cons? (syntax-e #'proc))
(let ([id (syntax-e #'proc)])
(cond
; Someday, maybe these will get replaced by something smarter.
[(eq? id 'cond)
(format "did you mean \"conde\"?")]
[(eq? id 'freshe)
(format "did you mean \"fresh\"?")]
[else (format "~a may not be a goal constructor (identifier doesn't end in -o)" id)]))))
(define-syntax-class goal-expr
#:description "a goal-expression"
one or more args to constructor enforced ; replace ... + with ... if 0 or more is desired
(define-syntax-class goal-seq
#:description "a sequence of goals"
(pattern (g:goal-expr ...+)))
; A freshmen relation is a lambda of >0 arguments whose body reduces to either another relation or a goal.
(define-syntax-class relation
#:description "a relation of one or more arguments"
#:datum-literals (lambda)
(pattern (lambda (x ...+)
(~or body:relation body:goal-expr))))
) | null | https://raw.githubusercontent.com/dabrady/LittleLogicLangs/9735f17281e2362d36ad245a1558cce3f93437a7/mk/lib/mk-stx.rkt | racket |
List of built-in relations. Put any identifier here that you wish to sneak by the macro system.
Relations conventionally end in -o, and we are enforcing that condition
(define-syntax-class proc
#:description "a procedure"
(pattern x:expr
#:fail-unless (procedure? x)))
(define-syntax-class literal-expr
#:description "an expression that evaluates to a non-function literal"
(pattern (~or x:boolean x:str x:char x:number x:symbol x:proc)))
Someday, maybe these will get replaced by something smarter.
replace ... + with ... if 0 or more is desired
A freshmen relation is a lambda of >0 arguments whose body reduces to either another relation or a goal. | (module mk-stx racket
(require syntax/parse)
(provide
symbol
goal-cons
goal-expr
goal-seq
relation
builtin-relation?
relation-id?
valid-goal-cons?)
(define BUILTIN-RELATIONS '(quote quasiquote fresh conde run run* == =/= disj+ conj+))
(define builtin-relation?
(lambda (id)
(memq id BUILTIN-RELATIONS)))
(define relation-id?
(lambda (id)
(let* ([id-string (symbol->string id)]
[last (string-ref id-string (sub1 (string-length id-string)))])
(define valid-goal-cons?
(lambda (id)
(or (builtin-relation? id)
(relation-id? id))))
I do n't know why this is n't already a thing . They 've got one for all the other primitives .
(define-syntax-class symbol
#:description "a symbol"
#:datum-literals (quote)
(pattern (quote x:id)))
(define-syntax-class goal-cons
#:description "a goal constructor"
(pattern proc:id
#:fail-unless (valid-goal-cons? (syntax-e #'proc))
(let ([id (syntax-e #'proc)])
(cond
[(eq? id 'cond)
(format "did you mean \"conde\"?")]
[(eq? id 'freshe)
(format "did you mean \"fresh\"?")]
[else (format "~a may not be a goal constructor (identifier doesn't end in -o)" id)]))))
(define-syntax-class goal-expr
#:description "a goal-expression"
(define-syntax-class goal-seq
#:description "a sequence of goals"
(pattern (g:goal-expr ...+)))
(define-syntax-class relation
#:description "a relation of one or more arguments"
#:datum-literals (lambda)
(pattern (lambda (x ...+)
(~or body:relation body:goal-expr))))
) |
f7125c95589bf3244cd9856dfad9e46689c0803d93f25d60fa03a21627eed129 | radicle-dev/radicle-alpha | CounterAppTest.hs | -- | Test the @example/radicle-counter@ app.
--
Requires access to an IPFS daemon .
module CounterAppTest
( test_counter_app
) where
import Protolude
import System.FilePath
import Test.E2ESupport
test_counter_app :: TestTree
test_counter_app = testCaseSteps "counter app" $ \step -> using RadDaemon1 $ do
machineId <- runTestCommand "rad-machines" ["create"]
step "Initialize machine"
void $ runCounter [machineId, "init"]
initialValue <- runCounter [machineId, "get-value"]
assertEqual "(get-value) on counter chain" "0" initialValue
forM_ [(1::Int)..3] $ \i -> do
step $ "Increment to " <> show i
valueInc <- using RadDaemon2 $ runCounter [machineId, "increment"]
assertEqual "(increment) on counter chain" (show i) valueInc
valueGet <- runCounter [machineId, "get-value"]
assertEqual "(get-value) on counter chain" (show i) valueGet
where
runCounter :: [Text] -> TestM Text
runCounter args = do
p <- asks projectDir
let bin = p </> "examples" </> "radicle-counter"
runTestCommand bin (map toS args)
| null | https://raw.githubusercontent.com/radicle-dev/radicle-alpha/b38a360d9830a938fa83fc066c1d131ec903b5e1/test/e2e/CounterAppTest.hs | haskell | | Test the @example/radicle-counter@ app.
| Requires access to an IPFS daemon .
module CounterAppTest
( test_counter_app
) where
import Protolude
import System.FilePath
import Test.E2ESupport
test_counter_app :: TestTree
test_counter_app = testCaseSteps "counter app" $ \step -> using RadDaemon1 $ do
machineId <- runTestCommand "rad-machines" ["create"]
step "Initialize machine"
void $ runCounter [machineId, "init"]
initialValue <- runCounter [machineId, "get-value"]
assertEqual "(get-value) on counter chain" "0" initialValue
forM_ [(1::Int)..3] $ \i -> do
step $ "Increment to " <> show i
valueInc <- using RadDaemon2 $ runCounter [machineId, "increment"]
assertEqual "(increment) on counter chain" (show i) valueInc
valueGet <- runCounter [machineId, "get-value"]
assertEqual "(get-value) on counter chain" (show i) valueGet
where
runCounter :: [Text] -> TestM Text
runCounter args = do
p <- asks projectDir
let bin = p </> "examples" </> "radicle-counter"
runTestCommand bin (map toS args)
|
3b07e52d2b8145f1d06b15b35d7b247d39f4188a7626225eead36b4d11f9d741 | reborg/clojure-essential-reference | 6.clj | < 1 >
(defn rad [x] (Math/toRadians x))
(defn cos [x] (Math/cos (rad x)))
(defn sq-diff [x y] (sq (Math/sin (/ (rad (- x y)) 2))))
< 2 >
(let [earth-radius-km 6372.8
dlat (sq-diff lat2 lat1)
dlon (sq-diff lon2 lon1)
a (+ dlat (* dlon (cos lat1) (cos lat1)))]
(* earth-radius-km 2 (Math/asin (Math/sqrt a)))))
(defn closest [geos geo] ; <3>
(->> geos
(map (juxt (partial haversine geo) identity))
(apply min-key first)))
(def post-offices
[[51.74836 -0.32237]
[51.75958 -0.22920]
[51.72064 -0.33353]
[51.77781 -0.37057]
[51.77133 -0.29398]
[51.81622 -0.35177]
[51.83104 -0.19737]
[51.79669 -0.18569]
[51.80334 -0.20863]
[51.74472 -0.19791]])
(def residence [51.75049331 -0.34248299])
< 4 >
[ 2.2496423923820656 [ 51.74836 -0.32237 ] ] | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/OperationsonNumbers/max-keyandmin-key/6.clj | clojure | <3> | < 1 >
(defn rad [x] (Math/toRadians x))
(defn cos [x] (Math/cos (rad x)))
(defn sq-diff [x y] (sq (Math/sin (/ (rad (- x y)) 2))))
< 2 >
(let [earth-radius-km 6372.8
dlat (sq-diff lat2 lat1)
dlon (sq-diff lon2 lon1)
a (+ dlat (* dlon (cos lat1) (cos lat1)))]
(* earth-radius-km 2 (Math/asin (Math/sqrt a)))))
(->> geos
(map (juxt (partial haversine geo) identity))
(apply min-key first)))
(def post-offices
[[51.74836 -0.32237]
[51.75958 -0.22920]
[51.72064 -0.33353]
[51.77781 -0.37057]
[51.77133 -0.29398]
[51.81622 -0.35177]
[51.83104 -0.19737]
[51.79669 -0.18569]
[51.80334 -0.20863]
[51.74472 -0.19791]])
(def residence [51.75049331 -0.34248299])
< 4 >
[ 2.2496423923820656 [ 51.74836 -0.32237 ] ] |
0df4e02b78340b0beb84120b7649629319f296c7ddaa06d28737ec2c062156bf | stathissideris/positano | fun.clj | (ns positano.integration-test1.fun
(:require [positano.trace :as trace]))
(trace/deftrace baz [x]
(inc x))
(trace/deftrace bar [x]
(* (baz (/ x 2.0)) 3))
(trace/deftrace foo
"I don't do a whole lot."
[x]
(println "Hello World!")
(bar (first x)))
| null | https://raw.githubusercontent.com/stathissideris/positano/ca5126714b4bcf108726d930ab61a875759214ae/test/positano/integration_test1/fun.clj | clojure | (ns positano.integration-test1.fun
(:require [positano.trace :as trace]))
(trace/deftrace baz [x]
(inc x))
(trace/deftrace bar [x]
(* (baz (/ x 2.0)) 3))
(trace/deftrace foo
"I don't do a whole lot."
[x]
(println "Hello World!")
(bar (first x)))
|
|
61d06ff828eb1ffa8ddf7fc78400882bede5e5d64b6d16b80020a2026039ae1c | htm-community/comportex | algo_graph.cljc | Copyright ( c ) . All rights reserved . The use and
;; distribution terms for this software are covered by the Eclipse Public
;; License 1.0 (-1.0.php) which can
;; be found in the file epl-v10.html at the root of this distribution. By
;; using this software in any fashion, you are agreeing to be bound by the
;; terms of this license. You must not remove this notice, or any other,
;; from this software.
;;
;; graph
;;
;; Basic Graph Theory Algorithms
;;
straszheimjeffrey ( gmail )
Created 23 June 2009
(ns
^{:author "Jeffrey Straszheim",
:doc "Basic graph theory algorithms.
Copied from
because its available snapshot release does not have clojurescript support.
Updated to use records not structs."}
org.nfrac.comportex.util.algo-graph
(:require [clojure.set :refer [union]]))
(defrecord DirectedGraph
[nodes neighbors])
(defn directed-graph
"`nodes` - The nodes of the graph, a collection.
`neighbors` - A function from a node to its neighbor nodes collection."
[nodes neighbors]
(->DirectedGraph nodes neighbors))
(defn get-neighbors
"Get the neighbors of a node."
[g n]
((:neighbors g) n))
;; Graph Modification
(defn reverse-graph
"Given a directed graph, return another directed graph with the
order of the edges reversed."
[g]
(let [op (fn [rna idx]
(let [ns (get-neighbors g idx)
am (fn [m val]
(assoc m val (conj (get m val #{}) idx)))]
(reduce am rna ns)))
rn (reduce op {} (:nodes g))]
(directed-graph (:nodes g) rn)))
(defn add-loops
"For each node n, add the edge n->n if not already present."
[g]
(directed-graph
(:nodes g)
(into {} (map (fn [n]
[n (conj (set (get-neighbors g n)) n)]) (:nodes g)))))
(defn remove-loops
"For each node n, remove any edges n->n."
[g]
(directed-graph
(:nodes g)
(into {} (map (fn [n]
[n (disj (set (get-neighbors g n)) n)]) (:nodes g)))))
;; Graph Walk
(defn lazy-walk
"Return a lazy sequence of the nodes of a graph starting a node n. Optionally,
provide a set of visited notes (v) and a collection of nodes to
visit (ns)."
([g n]
(lazy-walk g [n] #{}))
([g ns v]
(lazy-seq (let [s (seq (drop-while v ns))
n (first s)
ns (rest s)]
(when s
(cons n (lazy-walk g (concat (get-neighbors g n) ns) (conj v n))))))))
(defn transitive-closure
"Returns the transitive closure of a graph. The neighbors are lazily computed.
Note: some version of this algorithm return all edges a->a
regardless of whether such loops exist in the original graph. This
version does not. Loops will be included only if produced by
cycles in the graph. If you have code that depends on such
behavior, call (-> g transitive-closure add-loops)"
[g]
(let [nns (fn [n]
[n (delay (lazy-walk g (get-neighbors g n) #{}))])
nbs (into {} (map nns (:nodes g)))]
(directed-graph
(:nodes g)
(fn [n] (force (nbs n))))))
;; Strongly Connected Components
(defn- post-ordered-visit
"Starting at node n, perform a post-ordered walk."
[g n [visited acc :as state]]
(if (visited n)
state
(let [[v2 acc2] (reduce (fn [st nd] (post-ordered-visit g nd st))
[(conj visited n) acc]
(get-neighbors g n))]
[v2 (conj acc2 n)])))
(defn post-ordered-nodes
"Return a sequence of indexes of a post-ordered walk of the graph."
[g]
(fnext (reduce #(post-ordered-visit g %2 %1)
[#{} []]
(:nodes g))))
(defn scc
"Returns, as a sequence of sets, the strongly connected components
of g."
[g]
(let [po (reverse (post-ordered-nodes g))
rev (reverse-graph g)
step (fn [stack visited acc]
(if (empty? stack)
acc
(let [[nv comp] (post-ordered-visit rev
(first stack)
[visited #{}])
ns (remove nv stack)]
(recur ns nv (conj acc comp)))))]
(step po #{} [])))
(defn component-graph
"Given a graph, perhaps with cycles, return a reduced graph that is acyclic.
Each node in the new graph will be a set of nodes from the old.
These sets are the strongly connected components. Each edge will
be the union of the corresponding edges of the prior graph."
([g]
(component-graph g (scc g)))
([g sccs]
(let [find-node-set (fn [n]
(some #(if (% n) % nil) sccs))
find-neighbors (fn [ns]
(let [nbs1 (map (partial get-neighbors g) ns)
nbs2 (map set nbs1)
nbs3 (apply union nbs2)]
(set (map find-node-set nbs3))))
nm (into {} (map (fn [ns] [ns (find-neighbors ns)]) sccs))]
(directed-graph (set sccs) nm))))
(defn recursive-component?
"Is the component (recieved from scc) self recursive?"
[g ns]
(or (> (count ns) 1)
(let [n (first ns)]
(some #(= % n) (get-neighbors g n)))))
(defn self-recursive-sets
"Returns, as a sequence of sets, the components of a graph that are
self-recursive."
[g]
(filter (partial recursive-component? g) (scc g)))
;; Dependency Lists
(defn fixed-point
"Repeatedly apply fun to data until (equal old-data new-data)
returns true. If max iterations occur, it will throw an
exception. Set max to nil for unlimited iterations."
[data fun max equal]
(let [step (fn step [data idx]
(when (and idx (= 0 idx))
(assert false "Fixed point overflow"))
(let [new-data (fun data)]
(if (equal data new-data)
new-data
(recur new-data (and idx (dec idx))))))]
(step data max)))
(defn- fold-into-sets
[priorities]
(let [max (inc (apply max 0 (vals priorities)))
step (fn [acc [n dep]]
(assoc acc dep (conj (acc dep) n)))]
(reduce step
(vec (replicate max #{}))
priorities)))
(defn dependency-list
"Similar to a topological sort, this returns a vector of sets. The
set of nodes at index 0 are independent. The set at index 1 depend
those at 2 depend on 0 and 1 , and so on . Those withing
a set have no mutual dependencies. Assume the input graph (which
much be acyclic) has an edge a->b when a depends on b."
[g]
(let [step (fn [d]
(let [update (fn [n]
(inc (apply max -1 (map d (get-neighbors g n)))))]
(into {} (map (fn [[k v]] [k (update k)]) d))))
counts (fixed-point (zipmap (:nodes g) (repeat 0))
step
(inc (count (:nodes g)))
=)]
(fold-into-sets counts)))
(defn stratification-list
"Similar to dependency-list (see doc), except two graphs are
provided. The first is as dependency-list. The second (which may
have cycles) provides a partial-dependency relation. If node a
depends on node b (meaning an edge a->b exists) in the second
graph, node a must be equal or later in the sequence."
[g1 g2]
(assert (= (-> g1 :nodes set) (-> g2 :nodes set)))
(let [step (fn [d]
(let [update (fn [n]
(max (inc (apply max -1
(map d (get-neighbors g1 n))))
(apply max -1 (map d (get-neighbors g2 n)))))]
(into {} (map (fn [[k v]] [k (update k)]) d))))
counts (fixed-point (zipmap (:nodes g1) (repeat 0))
step
(inc (count (:nodes g1)))
=)]
(fold-into-sets counts)))
;; End of file
| null | https://raw.githubusercontent.com/htm-community/comportex/cd318492cf2e43cb7f25238b116b90b8195ec5aa/src/org/nfrac/comportex/util/algo_graph.cljc | clojure | distribution terms for this software are covered by the Eclipse Public
License 1.0 (-1.0.php) which can
be found in the file epl-v10.html at the root of this distribution. By
using this software in any fashion, you are agreeing to be bound by the
terms of this license. You must not remove this notice, or any other,
from this software.
graph
Basic Graph Theory Algorithms
Graph Modification
Graph Walk
Strongly Connected Components
Dependency Lists
End of file | Copyright ( c ) . All rights reserved . The use and
straszheimjeffrey ( gmail )
Created 23 June 2009
(ns
^{:author "Jeffrey Straszheim",
:doc "Basic graph theory algorithms.
Copied from
because its available snapshot release does not have clojurescript support.
Updated to use records not structs."}
org.nfrac.comportex.util.algo-graph
(:require [clojure.set :refer [union]]))
(defrecord DirectedGraph
[nodes neighbors])
(defn directed-graph
"`nodes` - The nodes of the graph, a collection.
`neighbors` - A function from a node to its neighbor nodes collection."
[nodes neighbors]
(->DirectedGraph nodes neighbors))
(defn get-neighbors
"Get the neighbors of a node."
[g n]
((:neighbors g) n))
(defn reverse-graph
"Given a directed graph, return another directed graph with the
order of the edges reversed."
[g]
(let [op (fn [rna idx]
(let [ns (get-neighbors g idx)
am (fn [m val]
(assoc m val (conj (get m val #{}) idx)))]
(reduce am rna ns)))
rn (reduce op {} (:nodes g))]
(directed-graph (:nodes g) rn)))
(defn add-loops
"For each node n, add the edge n->n if not already present."
[g]
(directed-graph
(:nodes g)
(into {} (map (fn [n]
[n (conj (set (get-neighbors g n)) n)]) (:nodes g)))))
(defn remove-loops
"For each node n, remove any edges n->n."
[g]
(directed-graph
(:nodes g)
(into {} (map (fn [n]
[n (disj (set (get-neighbors g n)) n)]) (:nodes g)))))
(defn lazy-walk
"Return a lazy sequence of the nodes of a graph starting a node n. Optionally,
provide a set of visited notes (v) and a collection of nodes to
visit (ns)."
([g n]
(lazy-walk g [n] #{}))
([g ns v]
(lazy-seq (let [s (seq (drop-while v ns))
n (first s)
ns (rest s)]
(when s
(cons n (lazy-walk g (concat (get-neighbors g n) ns) (conj v n))))))))
(defn transitive-closure
"Returns the transitive closure of a graph. The neighbors are lazily computed.
Note: some version of this algorithm return all edges a->a
regardless of whether such loops exist in the original graph. This
version does not. Loops will be included only if produced by
cycles in the graph. If you have code that depends on such
behavior, call (-> g transitive-closure add-loops)"
[g]
(let [nns (fn [n]
[n (delay (lazy-walk g (get-neighbors g n) #{}))])
nbs (into {} (map nns (:nodes g)))]
(directed-graph
(:nodes g)
(fn [n] (force (nbs n))))))
(defn- post-ordered-visit
"Starting at node n, perform a post-ordered walk."
[g n [visited acc :as state]]
(if (visited n)
state
(let [[v2 acc2] (reduce (fn [st nd] (post-ordered-visit g nd st))
[(conj visited n) acc]
(get-neighbors g n))]
[v2 (conj acc2 n)])))
(defn post-ordered-nodes
"Return a sequence of indexes of a post-ordered walk of the graph."
[g]
(fnext (reduce #(post-ordered-visit g %2 %1)
[#{} []]
(:nodes g))))
(defn scc
"Returns, as a sequence of sets, the strongly connected components
of g."
[g]
(let [po (reverse (post-ordered-nodes g))
rev (reverse-graph g)
step (fn [stack visited acc]
(if (empty? stack)
acc
(let [[nv comp] (post-ordered-visit rev
(first stack)
[visited #{}])
ns (remove nv stack)]
(recur ns nv (conj acc comp)))))]
(step po #{} [])))
(defn component-graph
"Given a graph, perhaps with cycles, return a reduced graph that is acyclic.
Each node in the new graph will be a set of nodes from the old.
These sets are the strongly connected components. Each edge will
be the union of the corresponding edges of the prior graph."
([g]
(component-graph g (scc g)))
([g sccs]
(let [find-node-set (fn [n]
(some #(if (% n) % nil) sccs))
find-neighbors (fn [ns]
(let [nbs1 (map (partial get-neighbors g) ns)
nbs2 (map set nbs1)
nbs3 (apply union nbs2)]
(set (map find-node-set nbs3))))
nm (into {} (map (fn [ns] [ns (find-neighbors ns)]) sccs))]
(directed-graph (set sccs) nm))))
(defn recursive-component?
"Is the component (recieved from scc) self recursive?"
[g ns]
(or (> (count ns) 1)
(let [n (first ns)]
(some #(= % n) (get-neighbors g n)))))
(defn self-recursive-sets
"Returns, as a sequence of sets, the components of a graph that are
self-recursive."
[g]
(filter (partial recursive-component? g) (scc g)))
(defn fixed-point
"Repeatedly apply fun to data until (equal old-data new-data)
returns true. If max iterations occur, it will throw an
exception. Set max to nil for unlimited iterations."
[data fun max equal]
(let [step (fn step [data idx]
(when (and idx (= 0 idx))
(assert false "Fixed point overflow"))
(let [new-data (fun data)]
(if (equal data new-data)
new-data
(recur new-data (and idx (dec idx))))))]
(step data max)))
(defn- fold-into-sets
[priorities]
(let [max (inc (apply max 0 (vals priorities)))
step (fn [acc [n dep]]
(assoc acc dep (conj (acc dep) n)))]
(reduce step
(vec (replicate max #{}))
priorities)))
(defn dependency-list
"Similar to a topological sort, this returns a vector of sets. The
set of nodes at index 0 are independent. The set at index 1 depend
those at 2 depend on 0 and 1 , and so on . Those withing
a set have no mutual dependencies. Assume the input graph (which
much be acyclic) has an edge a->b when a depends on b."
[g]
(let [step (fn [d]
(let [update (fn [n]
(inc (apply max -1 (map d (get-neighbors g n)))))]
(into {} (map (fn [[k v]] [k (update k)]) d))))
counts (fixed-point (zipmap (:nodes g) (repeat 0))
step
(inc (count (:nodes g)))
=)]
(fold-into-sets counts)))
(defn stratification-list
"Similar to dependency-list (see doc), except two graphs are
provided. The first is as dependency-list. The second (which may
have cycles) provides a partial-dependency relation. If node a
depends on node b (meaning an edge a->b exists) in the second
graph, node a must be equal or later in the sequence."
[g1 g2]
(assert (= (-> g1 :nodes set) (-> g2 :nodes set)))
(let [step (fn [d]
(let [update (fn [n]
(max (inc (apply max -1
(map d (get-neighbors g1 n))))
(apply max -1 (map d (get-neighbors g2 n)))))]
(into {} (map (fn [[k v]] [k (update k)]) d))))
counts (fixed-point (zipmap (:nodes g1) (repeat 0))
step
(inc (count (:nodes g1)))
=)]
(fold-into-sets counts)))
|
0e7de3631c064c28bea14480694484b539c936496d12f5666fd236c000784889 | alanz/ghc-exactprint | T6018Bfail.hs | # LANGUAGE TypeFamilies #
module T6018Bfail where
type family H a b c = (result :: *) | result -> a b c
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/T6018Bfail.hs | haskell | # LANGUAGE TypeFamilies #
module T6018Bfail where
type family H a b c = (result :: *) | result -> a b c
|
|
a2e6347c253535f98e61e6344f2d431b886663cc43196c65a1b61710af90f451 | twosigma/waiter | async_request_test.clj | ;;
Copyright ( c ) Two Sigma Open Source , LLC
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
(ns waiter.async-request-test
(:require [clojure.core.async :as async]
[clojure.string :as str]
[clojure.test :refer :all]
[plumbing.core :as pc]
[waiter.async-request :refer :all]
[waiter.auth.authentication :as auth]
[waiter.scheduler :as scheduler]
[waiter.service :as service]
[waiter.status-codes :refer :all]
[waiter.test-helpers :refer :all]
[waiter.util.utils :as utils])
(:import java.net.URLDecoder))
(deftest test-monitor-async-request
(let [check-interval-ms 10
request-timeout-ms 105
correlation-id "test-monitor-async-request-cid"
status-endpoint "-host.com/waiter-async-status"]
(testing "error-in-response-from-make-request"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
exception-message "exception for testing purposes"
make-http-request (fn [] (async/go {:body (async/chan 1), :error (Exception. exception-message)}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= [error-cause-instance-error] @release-status-atom))
(is (= :make-request-error monitor-result))))
(testing "status-check-timed-out"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= (int (Math/ceil (/ (double request-timeout-ms) check-interval-ms))) @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-request-terminated"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
_ (async/go
(async/<! (async/timeout check-interval-ms))
(async/put! exit-chan :exit))
monitor-result (async/<!! response-chan)]
(is (> (int (Math/ceil (/ (double request-timeout-ms) check-interval-ms))) @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :request-terminated monitor-result))))
(testing "status-check-request-no-longer-active"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
calls-to-in-active 6
request-still-active? (fn [] (< @make-request-counter calls-to-in-active))
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-in-active @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :request-no-longer-active monitor-result))))
(testing "status-check-eventually-201-created"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-201-created, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-non-http-200-ok @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :unknown-status-code monitor-result))))
(testing "status-check-eventually-303-see-other-repeated"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (> calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (> @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-check-eventually-303-relative-url-in-location"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "../result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (> @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-check-eventually-303-absolute-url-in-location"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" ""}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :status-see-other monitor-result))))
(testing "status-check-eventually-303-see-other-evetually-410-gone"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
calls-to-non-200-ok-and-non-303-see-other 10
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (> calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-200-ok}
(if (> calls-to-non-200-ok-and-non-303-see-other @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-410-gone}))))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= @make-request-counter calls-to-non-200-ok-and-non-303-see-other))
(is (= [:success] @release-status-atom))
(is (= :status-gone monitor-result))))
(testing "status-check-eventually-410-gone"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-410-gone}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-non-http-200-ok @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :status-gone monitor-result))))))
(deftest test-complete-async-request-locally
(testing "valid-request-id"
(let [request-id "test-req-0"
initial-state {request-id :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)
release-status-atom (atom [])
release-instance-fn (fn [status] (swap! release-status-atom conj status))]
(complete-async-request-locally async-request-store-atom release-instance-fn request-id error-cause-instance-error)
(is (= [error-cause-instance-error] @release-status-atom))
(is (= (dissoc initial-state request-id) @async-request-store-atom))))
(testing "invalid-request-id"
(let [request-id "test-req-0"
initial-state {"req-0" :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)
release-instance-fn (fn [_] (throw (Exception. "Unexpected call!")))]
(complete-async-request-locally async-request-store-atom release-instance-fn request-id error-cause-instance-error)
(is (= initial-state @async-request-store-atom)))))
(deftest test-terminate-request
(testing "valid-request-id"
(let [request-id "test-req-0"
exit-chan (async/chan 1)
initial-state {request-id {:exit-chan exit-chan}, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)]
(async-request-terminate async-request-store-atom request-id)
(is (= :exit (async/<!! exit-chan)))
(is (= initial-state @async-request-store-atom))))
(testing "invalid-request-id"
(let [request-id "test-req-0"
exit-chan (async/chan 1)
initial-state {"req-0" :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)]
(async-request-terminate async-request-store-atom request-id)
(async/>!! exit-chan :test-value)
(is (= :test-value (async/<!! exit-chan)))
(is (= initial-state @async-request-store-atom)))))
(deftest test-trigger-terminate
(let [local-router-id "local-router-id"
remote-router-id "remote-router-id"
service-id "test-service-id"
request-id "req-123456"
terminate-call-atom (atom false)
async-request-terminate-fn (fn [in-request-id]
(reset! terminate-call-atom "local")
(is (= request-id in-request-id)))
make-inter-router-requests-fn (fn [endpoint fn-key acceptable-router? method-key method-val]
(reset! terminate-call-atom "remote")
(is (= (str "waiter-async/complete/" request-id "/" service-id) endpoint))
(is (= :acceptable-router? fn-key))
(is (= :method method-key))
(is (= :get method-val))
(is (acceptable-router? remote-router-id)))]
(testing "local-trigger-terminate"
(reset! terminate-call-atom "")
(async-trigger-terminate async-request-terminate-fn make-inter-router-requests-fn local-router-id local-router-id service-id request-id)
(is (= "local" @terminate-call-atom)))
(testing "remote-trigger-terminate"
(reset! terminate-call-atom "")
(async-trigger-terminate async-request-terminate-fn make-inter-router-requests-fn local-router-id remote-router-id service-id request-id)
(is (= "remote" @terminate-call-atom)))))
(deftest test-post-process-async-request-response
(doseq [version ["v1" "v2"]]
(testing (str "creating async " version " request")
(let [v2? (= version "v2")
instance-host "www.example.com"
{:keys [host port] :as instance} {:host instance-host :port 1234}
router-id "my-router-id"
service-id "test-service-id"
metric-group "test-metric-group"
backend-proto "http"
user-agent "waiter-async-status-check/1234"
async-request-store-atom (atom {})
request-id "request-2394613984619"
reason-map {:request-id request-id}
request-properties {:async-check-interval-ms 100 :async-request-max-status-checks 50 :async-request-timeout-ms 200}
location (str "/location/" request-id)
query-string "a=b&c=d|e"
auth-params-map (auth/build-auth-params-map :internal "")
make-http-request-fn (fn [in-instance in-request end-route metric-group request-proto]
(is (= instance in-instance))
(is (contains? in-request :request-id))
(is (str/starts-with? (str (:request-id in-request)) "waiter-async-status-check-"))
(is (contains? in-request :request-time))
(is (= (assoc auth-params-map
:body nil
:client-protocol "HTTP/1.1"
:headers {"host" instance-host
"user-agent" "waiter-async-status-check/1234"
"x-cid" "UNKNOWN"}
:internal-protocol "HTTP/1.1"
:query-string "a=b&c=d|e"
:request-method :get
:scheme (if v2? "https" "http")
:uri location)
(dissoc in-request :request-id :request-time)))
(is (= "/location/request-2394613984619" end-route))
(is (= "test-metric-group" metric-group))
(is (= (if v2? "https" "http") request-proto))
(async/go {}))
instance-rpc-chan (async/chan 1)
populate-maintainer-chan! (make-populate-maintainer-chan! instance-rpc-chan)
complete-async-request-atom (atom nil)
response {}
b64-url-json-encode* utils/b64-url-json-encode]
(with-redefs [service/release-instance-go (constantly nil)
utils/b64-url-json-encode #(->> % (into (sorted-map)) b64-url-json-encode*)
monitor-async-request
(fn [make-get-request-fn complete-async-request-fn request-still-active? _
async-check-interval-ms async-request-timeout-ms correlation-id exit-chan]
(is (request-still-active?))
(is (= 100 async-check-interval-ms))
(is (= 200 async-request-timeout-ms))
(is correlation-id)
(is exit-chan)
(make-get-request-fn)
(reset! complete-async-request-atom complete-async-request-fn))]
(let [descriptor {:service-description {"backend-proto" backend-proto
"metric-group" metric-group}
:service-id service-id}
scheduler (reify scheduler/ServiceScheduler
(request-protocol [_ _ i sd]
(if v2? "https" (scheduler/retrieve-protocol i sd))))
{:keys [headers]} (post-process-async-request-response
scheduler router-id async-request-store-atom make-http-request-fn auth-params-map
populate-maintainer-chan! user-agent response descriptor instance reason-map
request-properties location query-string)]
(is (get @async-request-store-atom request-id))
(if v2?
(let [params {:router-id router-id :service-id service-id :host host :port port :proto "https"}
encoded-params (utils/b64-url-json-encode params)]
(is (= (str "/waiter-async/v2/status/" request-id "/" encoded-params location "?" query-string)
(get headers "location"))))
(is (= (str "/waiter-async/status/" request-id "/" router-id "/" service-id "/" host "/" port location "?" query-string)
(get headers "location"))))
(let [complete-async-request-fn @complete-async-request-atom]
(is complete-async-request-fn)
(complete-async-request-fn :success)
(is (nil? (get @async-request-store-atom request-id))))))))))
(deftest test-post-process-async-request-response-sanitized-check-interval
(let [instance-host "www.example.com"
{:keys [host port] :as instance} {:host instance-host :port 1234}
router-id "my-router-id"
service-id "test-service-id"
metric-group "test-metric-group"
backend-proto "http"
user-agent "waiter-async-status-check/1234"
async-request-store-atom (atom {})
request-id "request-2394613984619"
reason-map {:request-id request-id}
async-check-interval-ms 200
async-request-max-status-checks 50
async-request-timeout-ms 100000
sanitized-check-interval-ms (sanitize-check-interval async-request-timeout-ms async-check-interval-ms async-request-max-status-checks)
location (str "/location/" request-id)
query-string "a=b&c=d|e"
auth-params-map (auth/build-auth-params-map :internal "")
make-http-request-fn (fn [in-instance in-request end-route metric-group backend-proto]
(is (= instance in-instance))
(is (contains? in-request :request-id))
(is (str/starts-with? (str (:request-id in-request)) "waiter-async-status-check-"))
(is (contains? in-request :request-time))
(is (= (assoc auth-params-map
:body nil
:client-protocol "HTTP/1.1"
:headers {"host" instance-host
"user-agent" "waiter-async-status-check/1234"
"x-cid" "UNKNOWN"}
:internal-protocol "HTTP/1.1"
:query-string "a=b&c=d|e"
:request-method :get
:scheme "http"
:uri location)
(dissoc in-request :request-id :request-time)))
(is (= "/location/request-2394613984619" end-route))
(is (= "test-metric-group" metric-group))
(is (= "http" backend-proto))
(async/go {}))
instance-rpc-chan (async/chan 1)
populate-maintainer-chan! (make-populate-maintainer-chan! instance-rpc-chan)
complete-async-request-atom (atom nil)
response {}]
(with-redefs [service/release-instance-go (constantly nil)
monitor-async-request
(fn [make-get-request-fn complete-async-request-fn request-still-active? _
in-async-check-interval-ms in-async-request-timeout-ms correlation-id exit-chan]
(is (request-still-active?))
(is (= sanitized-check-interval-ms in-async-check-interval-ms))
(is (= async-request-timeout-ms in-async-request-timeout-ms))
(is correlation-id)
(is exit-chan)
(make-get-request-fn)
(reset! complete-async-request-atom complete-async-request-fn))]
(let [request-properties {:async-check-interval-ms async-check-interval-ms
:async-request-max-status-checks async-request-max-status-checks
:async-request-timeout-ms async-request-timeout-ms}
descriptor {:service-description {"backend-proto" backend-proto
"metric-group" metric-group}
:service-id service-id}
scheduler (reify scheduler/ServiceScheduler
(request-protocol [_ _ i sd]
(scheduler/retrieve-protocol i sd)))
{:keys [headers]} (post-process-async-request-response
scheduler router-id async-request-store-atom make-http-request-fn auth-params-map
populate-maintainer-chan! user-agent response descriptor instance
reason-map request-properties location query-string)]
(is (get @async-request-store-atom request-id))
(is (= (str "/waiter-async/status/" request-id "/" router-id "/" service-id "/" host "/" port location "?" query-string)
(get headers "location")))
(let [complete-async-request-fn @complete-async-request-atom]
(is complete-async-request-fn)
(complete-async-request-fn :success)
(is (nil? (get @async-request-store-atom request-id))))))))
(deftest test-route-params-and-uri-generation
(let [uri->route-params (fn [prefix uri]
(when (str/starts-with? (str uri) prefix)
(let [decode #(URLDecoder/decode % "UTF-8")
route-uri (subs (str uri) (count prefix))
[request-id & remaining-segments] (str/split (str route-uri) #"/")]
(if (str/includes? prefix "/v2/")
(->> {:location (str "/" (str/join "/" (rest remaining-segments)))
:request-id request-id
:request-data (first remaining-segments)}
unpack-async-v2-request-data
(pc/map-vals (comp #(when-not (str/blank? %) %) str)))
(let [[router-id service-id host port & location-parts] remaining-segments]
{:host (when-not (str/blank? host) host)
:location (when (seq location-parts) (str "/" (str/join "/" location-parts)))
:port (when-not (str/blank? port) port)
:request-id (when-not (str/blank? request-id) (decode request-id))
:router-id (when-not (str/blank? router-id) (decode router-id))
:service-id (when-not (str/blank? service-id) service-id)})))))
execute-test (fn [params]
(let [action :action
v2? (contains? params :proto)
prefix (str "/waiter-async/" (when v2? "v2/") (name action) "/")
uri (route-params->uri action params)
decoded-params (uri->route-params prefix uri)
v1-param-keys [:host :location :port :request-id :router-id :service-id]
default-nil-v1-params (pc/map-from-keys (constantly nil) v1-param-keys)]
(is (str/starts-with? uri prefix))
(when-not v2?
(doseq [[_ v] params] (is (str/includes? uri (str v)))))
(is (= (merge default-nil-v1-params
(pc/map-vals #(if (integer? %1) (str %1) %1) params))
decoded-params))))]
(testing "empty-params" (execute-test {}))
(testing "only-host" (execute-test {:host "105.123.025.36"}))
(testing "only-location" (execute-test {:location "/status-location"}))
(testing "only-port" (execute-test {:port 3254}))
(testing "only-request-id" (execute-test {:request-id "6546540.6406460"}))
(testing "only-router-id" (execute-test {:router-id "6546540.6406460"}))
(testing "only-service-id" (execute-test {:service-id "test-service-id"}))
(testing "all-but-proto" (execute-test {:host "105.123.025.36"
:location "/status-location"
:port 3254
:request-id "6546540.6406460"
:router-id "6546540.6406460"
:service-id "test-service-id"}))
(testing "all-params" (execute-test {:host "105.123.025.36"
:location "/status-location"
:port 3254
:proto "h2"
:request-id "6546540.6406460"
:router-id "6546540.6406460"
:service-id "test-service-id"}))))
(deftest test-normalize-location-header
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= "/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result")))
(is (= "/path/to/status/result?a=b&c=d"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result?a=b&c=d")))
(is (= ":1234/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result")))
(is (= ":1234/path/to/status/result?a=b&c=d"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result?a=b&c=d")))
(is (= ":3456/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":3456/path/to/status/result")))
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= "/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html" "result")))
(is (= "/result"
(normalize-location-header ":1234/path/to/status/1234.html" "/result")))
(is (= "/path/to/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../result")))
(is (= "/path/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../../result")))
(is (= "/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../../../result")))
(is (= "/path/to/retrieve/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../retrieve/result"))))
| null | https://raw.githubusercontent.com/twosigma/waiter/fa1d028f61f92c8be15ddb45cfa743b92eeb4058/waiter/test/waiter/async_request_test.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| Copyright ( c ) Two Sigma Open Source , LLC
distributed under the License is distributed on an " AS IS " BASIS ,
(ns waiter.async-request-test
(:require [clojure.core.async :as async]
[clojure.string :as str]
[clojure.test :refer :all]
[plumbing.core :as pc]
[waiter.async-request :refer :all]
[waiter.auth.authentication :as auth]
[waiter.scheduler :as scheduler]
[waiter.service :as service]
[waiter.status-codes :refer :all]
[waiter.test-helpers :refer :all]
[waiter.util.utils :as utils])
(:import java.net.URLDecoder))
(deftest test-monitor-async-request
(let [check-interval-ms 10
request-timeout-ms 105
correlation-id "test-monitor-async-request-cid"
status-endpoint "-host.com/waiter-async-status"]
(testing "error-in-response-from-make-request"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
exception-message "exception for testing purposes"
make-http-request (fn [] (async/go {:body (async/chan 1), :error (Exception. exception-message)}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= [error-cause-instance-error] @release-status-atom))
(is (= :make-request-error monitor-result))))
(testing "status-check-timed-out"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= (int (Math/ceil (/ (double request-timeout-ms) check-interval-ms))) @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-request-terminated"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
_ (async/go
(async/<! (async/timeout check-interval-ms))
(async/put! exit-chan :exit))
monitor-result (async/<!! response-chan)]
(is (> (int (Math/ceil (/ (double request-timeout-ms) check-interval-ms))) @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :request-terminated monitor-result))))
(testing "status-check-request-no-longer-active"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go {:body (async/chan 1), :status http-200-ok}))
calls-to-in-active 6
request-still-active? (fn [] (< @make-request-counter calls-to-in-active))
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-in-active @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :request-no-longer-active monitor-result))))
(testing "status-check-eventually-201-created"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-201-created, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-non-http-200-ok @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :unknown-status-code monitor-result))))
(testing "status-check-eventually-303-see-other-repeated"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (> calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (> @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-check-eventually-303-relative-url-in-location"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "../result"}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (> @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :monitor-timed-out monitor-result))))
(testing "status-check-eventually-303-absolute-url-in-location"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" ""}}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= @make-request-counter calls-to-non-http-200-ok))
(is (= [:success] @release-status-atom))
(is (= :status-see-other monitor-result))))
(testing "status-check-eventually-303-see-other-evetually-410-gone"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
calls-to-non-200-ok-and-non-303-see-other 10
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (> calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-200-ok}
(if (> calls-to-non-200-ok-and-non-303-see-other @make-request-counter)
{:body (async/chan 1), :status http-303-see-other, :headers {"location" "/result"}}
{:body (async/chan 1), :status http-410-gone}))))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= @make-request-counter calls-to-non-200-ok-and-non-303-see-other))
(is (= [:success] @release-status-atom))
(is (= :status-gone monitor-result))))
(testing "status-check-eventually-410-gone"
(let [release-status-atom (atom [])
complete-async-request (fn [status] (swap! release-status-atom conj status))
calls-to-non-http-200-ok 6
make-request-counter (atom 0)
make-http-request (fn []
(swap! make-request-counter inc)
(async/go
(if (= calls-to-non-http-200-ok @make-request-counter)
{:body (async/chan 1), :status http-410-gone}
{:body (async/chan 1), :status http-200-ok})))
request-still-active? (constantly true)
exit-chan (async/chan 1)
response-chan (monitor-async-request make-http-request complete-async-request request-still-active? status-endpoint
check-interval-ms request-timeout-ms correlation-id exit-chan)
monitor-result (async/<!! response-chan)]
(is (= calls-to-non-http-200-ok @make-request-counter))
(is (= [:success] @release-status-atom))
(is (= :status-gone monitor-result))))))
(deftest test-complete-async-request-locally
(testing "valid-request-id"
(let [request-id "test-req-0"
initial-state {request-id :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)
release-status-atom (atom [])
release-instance-fn (fn [status] (swap! release-status-atom conj status))]
(complete-async-request-locally async-request-store-atom release-instance-fn request-id error-cause-instance-error)
(is (= [error-cause-instance-error] @release-status-atom))
(is (= (dissoc initial-state request-id) @async-request-store-atom))))
(testing "invalid-request-id"
(let [request-id "test-req-0"
initial-state {"req-0" :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)
release-instance-fn (fn [_] (throw (Exception. "Unexpected call!")))]
(complete-async-request-locally async-request-store-atom release-instance-fn request-id error-cause-instance-error)
(is (= initial-state @async-request-store-atom)))))
(deftest test-terminate-request
(testing "valid-request-id"
(let [request-id "test-req-0"
exit-chan (async/chan 1)
initial-state {request-id {:exit-chan exit-chan}, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)]
(async-request-terminate async-request-store-atom request-id)
(is (= :exit (async/<!! exit-chan)))
(is (= initial-state @async-request-store-atom))))
(testing "invalid-request-id"
(let [request-id "test-req-0"
exit-chan (async/chan 1)
initial-state {"req-0" :pending, "req-1" :pending, "req-2" :pending}
async-request-store-atom (atom initial-state)]
(async-request-terminate async-request-store-atom request-id)
(async/>!! exit-chan :test-value)
(is (= :test-value (async/<!! exit-chan)))
(is (= initial-state @async-request-store-atom)))))
(deftest test-trigger-terminate
(let [local-router-id "local-router-id"
remote-router-id "remote-router-id"
service-id "test-service-id"
request-id "req-123456"
terminate-call-atom (atom false)
async-request-terminate-fn (fn [in-request-id]
(reset! terminate-call-atom "local")
(is (= request-id in-request-id)))
make-inter-router-requests-fn (fn [endpoint fn-key acceptable-router? method-key method-val]
(reset! terminate-call-atom "remote")
(is (= (str "waiter-async/complete/" request-id "/" service-id) endpoint))
(is (= :acceptable-router? fn-key))
(is (= :method method-key))
(is (= :get method-val))
(is (acceptable-router? remote-router-id)))]
(testing "local-trigger-terminate"
(reset! terminate-call-atom "")
(async-trigger-terminate async-request-terminate-fn make-inter-router-requests-fn local-router-id local-router-id service-id request-id)
(is (= "local" @terminate-call-atom)))
(testing "remote-trigger-terminate"
(reset! terminate-call-atom "")
(async-trigger-terminate async-request-terminate-fn make-inter-router-requests-fn local-router-id remote-router-id service-id request-id)
(is (= "remote" @terminate-call-atom)))))
(deftest test-post-process-async-request-response
(doseq [version ["v1" "v2"]]
(testing (str "creating async " version " request")
(let [v2? (= version "v2")
instance-host "www.example.com"
{:keys [host port] :as instance} {:host instance-host :port 1234}
router-id "my-router-id"
service-id "test-service-id"
metric-group "test-metric-group"
backend-proto "http"
user-agent "waiter-async-status-check/1234"
async-request-store-atom (atom {})
request-id "request-2394613984619"
reason-map {:request-id request-id}
request-properties {:async-check-interval-ms 100 :async-request-max-status-checks 50 :async-request-timeout-ms 200}
location (str "/location/" request-id)
query-string "a=b&c=d|e"
auth-params-map (auth/build-auth-params-map :internal "")
make-http-request-fn (fn [in-instance in-request end-route metric-group request-proto]
(is (= instance in-instance))
(is (contains? in-request :request-id))
(is (str/starts-with? (str (:request-id in-request)) "waiter-async-status-check-"))
(is (contains? in-request :request-time))
(is (= (assoc auth-params-map
:body nil
:client-protocol "HTTP/1.1"
:headers {"host" instance-host
"user-agent" "waiter-async-status-check/1234"
"x-cid" "UNKNOWN"}
:internal-protocol "HTTP/1.1"
:query-string "a=b&c=d|e"
:request-method :get
:scheme (if v2? "https" "http")
:uri location)
(dissoc in-request :request-id :request-time)))
(is (= "/location/request-2394613984619" end-route))
(is (= "test-metric-group" metric-group))
(is (= (if v2? "https" "http") request-proto))
(async/go {}))
instance-rpc-chan (async/chan 1)
populate-maintainer-chan! (make-populate-maintainer-chan! instance-rpc-chan)
complete-async-request-atom (atom nil)
response {}
b64-url-json-encode* utils/b64-url-json-encode]
(with-redefs [service/release-instance-go (constantly nil)
utils/b64-url-json-encode #(->> % (into (sorted-map)) b64-url-json-encode*)
monitor-async-request
(fn [make-get-request-fn complete-async-request-fn request-still-active? _
async-check-interval-ms async-request-timeout-ms correlation-id exit-chan]
(is (request-still-active?))
(is (= 100 async-check-interval-ms))
(is (= 200 async-request-timeout-ms))
(is correlation-id)
(is exit-chan)
(make-get-request-fn)
(reset! complete-async-request-atom complete-async-request-fn))]
(let [descriptor {:service-description {"backend-proto" backend-proto
"metric-group" metric-group}
:service-id service-id}
scheduler (reify scheduler/ServiceScheduler
(request-protocol [_ _ i sd]
(if v2? "https" (scheduler/retrieve-protocol i sd))))
{:keys [headers]} (post-process-async-request-response
scheduler router-id async-request-store-atom make-http-request-fn auth-params-map
populate-maintainer-chan! user-agent response descriptor instance reason-map
request-properties location query-string)]
(is (get @async-request-store-atom request-id))
(if v2?
(let [params {:router-id router-id :service-id service-id :host host :port port :proto "https"}
encoded-params (utils/b64-url-json-encode params)]
(is (= (str "/waiter-async/v2/status/" request-id "/" encoded-params location "?" query-string)
(get headers "location"))))
(is (= (str "/waiter-async/status/" request-id "/" router-id "/" service-id "/" host "/" port location "?" query-string)
(get headers "location"))))
(let [complete-async-request-fn @complete-async-request-atom]
(is complete-async-request-fn)
(complete-async-request-fn :success)
(is (nil? (get @async-request-store-atom request-id))))))))))
(deftest test-post-process-async-request-response-sanitized-check-interval
(let [instance-host "www.example.com"
{:keys [host port] :as instance} {:host instance-host :port 1234}
router-id "my-router-id"
service-id "test-service-id"
metric-group "test-metric-group"
backend-proto "http"
user-agent "waiter-async-status-check/1234"
async-request-store-atom (atom {})
request-id "request-2394613984619"
reason-map {:request-id request-id}
async-check-interval-ms 200
async-request-max-status-checks 50
async-request-timeout-ms 100000
sanitized-check-interval-ms (sanitize-check-interval async-request-timeout-ms async-check-interval-ms async-request-max-status-checks)
location (str "/location/" request-id)
query-string "a=b&c=d|e"
auth-params-map (auth/build-auth-params-map :internal "")
make-http-request-fn (fn [in-instance in-request end-route metric-group backend-proto]
(is (= instance in-instance))
(is (contains? in-request :request-id))
(is (str/starts-with? (str (:request-id in-request)) "waiter-async-status-check-"))
(is (contains? in-request :request-time))
(is (= (assoc auth-params-map
:body nil
:client-protocol "HTTP/1.1"
:headers {"host" instance-host
"user-agent" "waiter-async-status-check/1234"
"x-cid" "UNKNOWN"}
:internal-protocol "HTTP/1.1"
:query-string "a=b&c=d|e"
:request-method :get
:scheme "http"
:uri location)
(dissoc in-request :request-id :request-time)))
(is (= "/location/request-2394613984619" end-route))
(is (= "test-metric-group" metric-group))
(is (= "http" backend-proto))
(async/go {}))
instance-rpc-chan (async/chan 1)
populate-maintainer-chan! (make-populate-maintainer-chan! instance-rpc-chan)
complete-async-request-atom (atom nil)
response {}]
(with-redefs [service/release-instance-go (constantly nil)
monitor-async-request
(fn [make-get-request-fn complete-async-request-fn request-still-active? _
in-async-check-interval-ms in-async-request-timeout-ms correlation-id exit-chan]
(is (request-still-active?))
(is (= sanitized-check-interval-ms in-async-check-interval-ms))
(is (= async-request-timeout-ms in-async-request-timeout-ms))
(is correlation-id)
(is exit-chan)
(make-get-request-fn)
(reset! complete-async-request-atom complete-async-request-fn))]
(let [request-properties {:async-check-interval-ms async-check-interval-ms
:async-request-max-status-checks async-request-max-status-checks
:async-request-timeout-ms async-request-timeout-ms}
descriptor {:service-description {"backend-proto" backend-proto
"metric-group" metric-group}
:service-id service-id}
scheduler (reify scheduler/ServiceScheduler
(request-protocol [_ _ i sd]
(scheduler/retrieve-protocol i sd)))
{:keys [headers]} (post-process-async-request-response
scheduler router-id async-request-store-atom make-http-request-fn auth-params-map
populate-maintainer-chan! user-agent response descriptor instance
reason-map request-properties location query-string)]
(is (get @async-request-store-atom request-id))
(is (= (str "/waiter-async/status/" request-id "/" router-id "/" service-id "/" host "/" port location "?" query-string)
(get headers "location")))
(let [complete-async-request-fn @complete-async-request-atom]
(is complete-async-request-fn)
(complete-async-request-fn :success)
(is (nil? (get @async-request-store-atom request-id))))))))
(deftest test-route-params-and-uri-generation
(let [uri->route-params (fn [prefix uri]
(when (str/starts-with? (str uri) prefix)
(let [decode #(URLDecoder/decode % "UTF-8")
route-uri (subs (str uri) (count prefix))
[request-id & remaining-segments] (str/split (str route-uri) #"/")]
(if (str/includes? prefix "/v2/")
(->> {:location (str "/" (str/join "/" (rest remaining-segments)))
:request-id request-id
:request-data (first remaining-segments)}
unpack-async-v2-request-data
(pc/map-vals (comp #(when-not (str/blank? %) %) str)))
(let [[router-id service-id host port & location-parts] remaining-segments]
{:host (when-not (str/blank? host) host)
:location (when (seq location-parts) (str "/" (str/join "/" location-parts)))
:port (when-not (str/blank? port) port)
:request-id (when-not (str/blank? request-id) (decode request-id))
:router-id (when-not (str/blank? router-id) (decode router-id))
:service-id (when-not (str/blank? service-id) service-id)})))))
execute-test (fn [params]
(let [action :action
v2? (contains? params :proto)
prefix (str "/waiter-async/" (when v2? "v2/") (name action) "/")
uri (route-params->uri action params)
decoded-params (uri->route-params prefix uri)
v1-param-keys [:host :location :port :request-id :router-id :service-id]
default-nil-v1-params (pc/map-from-keys (constantly nil) v1-param-keys)]
(is (str/starts-with? uri prefix))
(when-not v2?
(doseq [[_ v] params] (is (str/includes? uri (str v)))))
(is (= (merge default-nil-v1-params
(pc/map-vals #(if (integer? %1) (str %1) %1) params))
decoded-params))))]
(testing "empty-params" (execute-test {}))
(testing "only-host" (execute-test {:host "105.123.025.36"}))
(testing "only-location" (execute-test {:location "/status-location"}))
(testing "only-port" (execute-test {:port 3254}))
(testing "only-request-id" (execute-test {:request-id "6546540.6406460"}))
(testing "only-router-id" (execute-test {:router-id "6546540.6406460"}))
(testing "only-service-id" (execute-test {:service-id "test-service-id"}))
(testing "all-but-proto" (execute-test {:host "105.123.025.36"
:location "/status-location"
:port 3254
:request-id "6546540.6406460"
:router-id "6546540.6406460"
:service-id "test-service-id"}))
(testing "all-params" (execute-test {:host "105.123.025.36"
:location "/status-location"
:port 3254
:proto "h2"
:request-id "6546540.6406460"
:router-id "6546540.6406460"
:service-id "test-service-id"}))))
(deftest test-normalize-location-header
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= "/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result")))
(is (= "/path/to/status/result?a=b&c=d"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result?a=b&c=d")))
(is (= ":1234/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result")))
(is (= ":1234/path/to/status/result?a=b&c=d"
(normalize-location-header ":1234/path/to/status/1234.html"
":1234/path/to/status/result?a=b&c=d")))
(is (= ":3456/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html"
":3456/path/to/status/result")))
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= ""
(normalize-location-header ":1234/path/to/status/1234.html"
"")))
(is (= "/path/to/status/result"
(normalize-location-header ":1234/path/to/status/1234.html" "result")))
(is (= "/result"
(normalize-location-header ":1234/path/to/status/1234.html" "/result")))
(is (= "/path/to/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../result")))
(is (= "/path/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../../result")))
(is (= "/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../../../result")))
(is (= "/path/to/retrieve/result"
(normalize-location-header ":1234/path/to/status/1234.html" "../retrieve/result"))))
|
f94da1b74eb0ef4dbf4587d4a8de04b5272800575cc89253bc72186405eafa5c | INRIA/zelus | zlsolve.mli |
(* Abstract types for the vectors passed to the model functions. Elements
are accessed or changed by the functions given below. *)
module type ZELUS_SOLVER =
sig
(** Interface for compiled functions *)
(** Configuring and calling the D-C solver *)
(* Log simulation steps and continuous state values. *)
val enable_logging : unit -> unit
(* The solver's minimum and maxmium step sizes. *)
val min_step_size : float option ref
val max_step_size : float option ref
(* The maximum simulation time. *)
val max_sim_time : float option ref
(* A factor relating simulation and wall clock times. *)
val speedup : float ref
val step : 's Zls.f_alloc
-> 's Zls.f_csize
-> 's Zls.f_zsize
-> 's Zls.f_horizon
-> 's Zls.f_maxsize
-> 's Zls.f_ders
-> ('s, 'o) Zls.f_step
-> 's Zls.f_zero
-> 's Zls.f_reset
-> (unit -> 'o option * bool * float) (* result, is_done, delta *)
end
module Make : functor (_ : Zls.STATE_SOLVER)
-> functor (_ : Zls.ZEROC_SOLVER)
-> ZELUS_SOLVER
| null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/lib/std/zlsolve.mli | ocaml | Abstract types for the vectors passed to the model functions. Elements
are accessed or changed by the functions given below.
* Interface for compiled functions
* Configuring and calling the D-C solver
Log simulation steps and continuous state values.
The solver's minimum and maxmium step sizes.
The maximum simulation time.
A factor relating simulation and wall clock times.
result, is_done, delta |
module type ZELUS_SOLVER =
sig
val enable_logging : unit -> unit
val min_step_size : float option ref
val max_step_size : float option ref
val max_sim_time : float option ref
val speedup : float ref
val step : 's Zls.f_alloc
-> 's Zls.f_csize
-> 's Zls.f_zsize
-> 's Zls.f_horizon
-> 's Zls.f_maxsize
-> 's Zls.f_ders
-> ('s, 'o) Zls.f_step
-> 's Zls.f_zero
-> 's Zls.f_reset
end
module Make : functor (_ : Zls.STATE_SOLVER)
-> functor (_ : Zls.ZEROC_SOLVER)
-> ZELUS_SOLVER
|
e26f26f5189170b519a512925b22531cdfd887ebb78176e40c02e4d0bfda5d08 | ds-wizard/engine-backend | Usages.hs | module Wizard.Database.Migration.Development.Usage.Data.Usages where
import Wizard.Api.Resource.Usage.UsageDTO
defaultUsage :: UsageDTO
defaultUsage =
UsageDTO
{ users = defaultUsageUsers
, activeUsers = defaultUsageActiveUsers
, knowledgeModels = defaultUsageKnowledgeModels
, branches = defaultUsageBranches
, documentTemplates = defaultUsageDocumentTemplates
, documentTemplateDrafts = defaultUsageDocumentTemplateDrafts
, questionnaires = defaultUsageQuestionnaires
, documents = defaultUsageDocuments
, locales = defaultUsageLocales
, storage = defaultUsageStorage
}
defaultUsageUsers :: UsageEntryDTO
defaultUsageUsers = UsageEntryDTO {current = 1, max = Nothing}
defaultUsageActiveUsers :: UsageEntryDTO
defaultUsageActiveUsers = UsageEntryDTO {current = 1, max = Nothing}
defaultUsageKnowledgeModels :: UsageEntryDTO
defaultUsageKnowledgeModels = UsageEntryDTO {current = 2, max = Nothing}
defaultUsageBranches :: UsageEntryDTO
defaultUsageBranches = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocumentTemplates :: UsageEntryDTO
defaultUsageDocumentTemplates = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocumentTemplateDrafts :: UsageEntryDTO
defaultUsageDocumentTemplateDrafts = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageQuestionnaires :: UsageEntryDTO
defaultUsageQuestionnaires = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocuments :: UsageEntryDTO
defaultUsageDocuments = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageLocales :: UsageEntryDTO
defaultUsageLocales = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageStorage :: UsageEntryDTO
defaultUsageStorage = UsageEntryDTO {current = 0, max = Nothing}
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-wizard/src/Wizard/Database/Migration/Development/Usage/Data/Usages.hs | haskell | module Wizard.Database.Migration.Development.Usage.Data.Usages where
import Wizard.Api.Resource.Usage.UsageDTO
defaultUsage :: UsageDTO
defaultUsage =
UsageDTO
{ users = defaultUsageUsers
, activeUsers = defaultUsageActiveUsers
, knowledgeModels = defaultUsageKnowledgeModels
, branches = defaultUsageBranches
, documentTemplates = defaultUsageDocumentTemplates
, documentTemplateDrafts = defaultUsageDocumentTemplateDrafts
, questionnaires = defaultUsageQuestionnaires
, documents = defaultUsageDocuments
, locales = defaultUsageLocales
, storage = defaultUsageStorage
}
defaultUsageUsers :: UsageEntryDTO
defaultUsageUsers = UsageEntryDTO {current = 1, max = Nothing}
defaultUsageActiveUsers :: UsageEntryDTO
defaultUsageActiveUsers = UsageEntryDTO {current = 1, max = Nothing}
defaultUsageKnowledgeModels :: UsageEntryDTO
defaultUsageKnowledgeModels = UsageEntryDTO {current = 2, max = Nothing}
defaultUsageBranches :: UsageEntryDTO
defaultUsageBranches = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocumentTemplates :: UsageEntryDTO
defaultUsageDocumentTemplates = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocumentTemplateDrafts :: UsageEntryDTO
defaultUsageDocumentTemplateDrafts = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageQuestionnaires :: UsageEntryDTO
defaultUsageQuestionnaires = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageDocuments :: UsageEntryDTO
defaultUsageDocuments = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageLocales :: UsageEntryDTO
defaultUsageLocales = UsageEntryDTO {current = 0, max = Nothing}
defaultUsageStorage :: UsageEntryDTO
defaultUsageStorage = UsageEntryDTO {current = 0, max = Nothing}
|
|
df339df1bda8ddaaf4f3705febdbb96d0cfeb2b4a63a559ceddf7c4dff479e8d | softwarelanguageslab/maf | R5RS_scp1_polynome-3.scm | ; Changes:
* removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
* swapped branches : 2
; * calls to id fun: 0
(letrec ((make-point (lambda (x y)
(letrec ((dispatch (lambda (msg)
(if (eq? msg 'x-value)
x
(if (eq? msg 'y-value) y (error "wrong message"))))))
dispatch)))
(make-segment (lambda (start end)
(letrec ((midpoint (lambda ()
(make-point (/ (+ (start 'x-value) (end 'x-value)) 2) (/ (+ (start 'y-value) (end 'y-value)) 2))))
(dispatch (lambda (msg)
(if (eq? msg 'start-point)
start
(if (eq? msg 'end-point)
end
(if (eq? msg 'midpoint)
(midpoint)
(error "wrong message")))))))
dispatch)))
(make-w-vector (lambda args
(letrec ((dimension (lambda ()
(length args)))
(coordinate (lambda (n)
(if (let ((__or_res (< n 1))) (if __or_res __or_res (> n (dimension))))
(error "coordinate is out of range")
(list-ref args (- n 1)))))
(add (lambda (w-vector)
(letrec ((loop (lambda (ctr res)
(if (= ctr 0)
(apply make-w-vector res)
(loop (- ctr 1) (cons (+ (coordinate ctr) ((w-vector 'coordinate) ctr)) res))))))
(loop (dimension) ()))))
(dispatch (lambda (msg)
(if (eq? msg 'dimension)
(dimension)
(if (eq? msg 'coordinate)
coordinate
(if (eq? msg 'add) add (error "wrong message")))))))
dispatch)))
(make-polynome (lambda coefficients
(let ((polynome (apply make-w-vector coefficients)))
(letrec ((coefficient (lambda (index)
((polynome 'coordinate) index)))
(order (lambda ()
(- (polynome 'dimension) 1)))
(dispatch (lambda (msg)
(if (eq? msg 'order)
(order)
(if (eq? msg 'coefficient)
coefficient
(error "wrong message"))))))
dispatch))))
(point1 (make-point 6 10))
(point2 (make-point 10 20))
(segment (make-segment point1 point2))
(midpoint (segment 'midpoint))
(w-vector1 (make-w-vector 1 2 3))
(w-vector2 (make-w-vector 4 5 6))
(polynome (make-polynome 1 2 3)))
(if (= (point1 'x-value) 6)
(if (= ((segment 'start-point) 'y-value) 10)
(if (= (midpoint 'x-value) 8)
(if (<change> (= ((w-vector1 'coordinate) 2) 2) (not (= ((w-vector1 'coordinate) 2) 2)))
(if (= ((w-vector2 'coordinate) 1) 4)
(<change>
(if (= ((((w-vector1 'add) w-vector2) 'coordinate) 1) 5)
(if (= (polynome 'order) 2)
(= ((polynome 'coefficient) 2) 2)
#f)
#f)
#f)
(<change>
#f
(if (= ((((w-vector1 'add) w-vector2) 'coordinate) 1) 5)
(if (= (polynome 'order) 2)
#f
(= ((polynome 'coefficient) 2) 2))
#f)))
#f)
#f)
#f)
#f)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_polynome-3.scm | scheme | Changes:
* calls to id fun: 0 | * removed : 0
* added : 0
* swaps : 0
* negated predicates : 1
* swapped branches : 2
(letrec ((make-point (lambda (x y)
(letrec ((dispatch (lambda (msg)
(if (eq? msg 'x-value)
x
(if (eq? msg 'y-value) y (error "wrong message"))))))
dispatch)))
(make-segment (lambda (start end)
(letrec ((midpoint (lambda ()
(make-point (/ (+ (start 'x-value) (end 'x-value)) 2) (/ (+ (start 'y-value) (end 'y-value)) 2))))
(dispatch (lambda (msg)
(if (eq? msg 'start-point)
start
(if (eq? msg 'end-point)
end
(if (eq? msg 'midpoint)
(midpoint)
(error "wrong message")))))))
dispatch)))
(make-w-vector (lambda args
(letrec ((dimension (lambda ()
(length args)))
(coordinate (lambda (n)
(if (let ((__or_res (< n 1))) (if __or_res __or_res (> n (dimension))))
(error "coordinate is out of range")
(list-ref args (- n 1)))))
(add (lambda (w-vector)
(letrec ((loop (lambda (ctr res)
(if (= ctr 0)
(apply make-w-vector res)
(loop (- ctr 1) (cons (+ (coordinate ctr) ((w-vector 'coordinate) ctr)) res))))))
(loop (dimension) ()))))
(dispatch (lambda (msg)
(if (eq? msg 'dimension)
(dimension)
(if (eq? msg 'coordinate)
coordinate
(if (eq? msg 'add) add (error "wrong message")))))))
dispatch)))
(make-polynome (lambda coefficients
(let ((polynome (apply make-w-vector coefficients)))
(letrec ((coefficient (lambda (index)
((polynome 'coordinate) index)))
(order (lambda ()
(- (polynome 'dimension) 1)))
(dispatch (lambda (msg)
(if (eq? msg 'order)
(order)
(if (eq? msg 'coefficient)
coefficient
(error "wrong message"))))))
dispatch))))
(point1 (make-point 6 10))
(point2 (make-point 10 20))
(segment (make-segment point1 point2))
(midpoint (segment 'midpoint))
(w-vector1 (make-w-vector 1 2 3))
(w-vector2 (make-w-vector 4 5 6))
(polynome (make-polynome 1 2 3)))
(if (= (point1 'x-value) 6)
(if (= ((segment 'start-point) 'y-value) 10)
(if (= (midpoint 'x-value) 8)
(if (<change> (= ((w-vector1 'coordinate) 2) 2) (not (= ((w-vector1 'coordinate) 2) 2)))
(if (= ((w-vector2 'coordinate) 1) 4)
(<change>
(if (= ((((w-vector1 'add) w-vector2) 'coordinate) 1) 5)
(if (= (polynome 'order) 2)
(= ((polynome 'coefficient) 2) 2)
#f)
#f)
#f)
(<change>
#f
(if (= ((((w-vector1 'add) w-vector2) 'coordinate) 1) 5)
(if (= (polynome 'order) 2)
#f
(= ((polynome 'coefficient) 2) 2))
#f)))
#f)
#f)
#f)
#f)) |
d710543dc278677a97fcc8c2dfd5999f6bc2084988fdf38c89adee94473e9148 | finnishtransportagency/harja | yllapitokohteet.cljs | (ns harja.tiedot.urakka.yllapitokohteet
"Ylläpitokohteiden tiedot"
(:require
[harja.loki :refer [log tarkkaile!]]
[cljs.core.async :refer [<!]]
[harja.asiakas.kommunikaatio :as k]
[harja.tiedot.urakka :as u]
[harja.domain.yllapitokohde :as yllapitokohteet-domain]
[harja.ui.kartta.esitettavat-asiat :refer [kartalla-esitettavaan-muotoon]]
[harja.tiedot.navigaatio :as nav]
[harja.ui.viesti :as viesti]
[clojure.string :as str]
[harja.tyokalut.local-storage :as local-storage])
(:require-macros [reagent.ratom :refer [reaction]]
[cljs.core.async.macros :refer [go]]
[harja.atom :refer [reaction<!]]))
(def hint-pilko-osoitevali "Pilko paalu\u00ADväli kahdeksi eri kohteeksi")
(def hint-poista-rivi "Poista rivi")
(def hint-lisaa-osa "Lisää osa")
(defn yha-kohde? [kohde]
(some? (:yhaid kohde)))
(defn suodata-yllapitokohteet
"Suodatusoptiot on map, jolla voi valita halutut suodatusperusteet:
:tienumero int
:yha-kohde? boolean
:yllapitokohdetyotyyppi keyword (:paallystys / :paikkaus)
:kohdenumero int
Jos jotain arvoa ei anneta, sitä ei huomioida suodatuksessa"
[kohteet suodatusoptiot]
(let [yha-kohde-fn yha-kohde?
{:keys [tienumero yha-kohde? yllapitokohdetyotyyppi kohdenumero]} suodatusoptiot]
(filterv
#(and (or (nil? yha-kohde?) (if yha-kohde? (yha-kohde-fn %) (not (yha-kohde-fn %))))
(or (nil? tienumero) (= (:tr-numero %) tienumero))
(or (nil? yllapitokohdetyotyyppi) (= (:yllapitokohdetyotyyppi %) yllapitokohdetyotyyppi))
(or (str/blank? kohdenumero)
(and (:kohdenumero %)
(str/starts-with? (str/lower-case (:kohdenumero %))
(str/lower-case kohdenumero)))))
kohteet)))
(defn hae-yllapitokohteet [urakka-id sopimus-id vuosi]
(k/post! :urakan-yllapitokohteet {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi}))
(defn tallenna-yllapitokohteet! [urakka-id sopimus-id vuosi kohteet]
(k/post! :tallenna-yllapitokohteet {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:kohteet kohteet}))
(defn tallenna-yllapitokohdeosat! [{:keys [urakka-id sopimus-id vuosi yllapitokohde-id osat osatyyppi]}]
(k/post! :tallenna-yllapitokohdeosat {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:yllapitokohde-id yllapitokohde-id
:osat osat
:osatyyppi osatyyppi}))
(defn hae-maaramuutokset [urakka-id yllapitokohde-id]
(k/post! :hae-maaramuutokset {:urakka-id urakka-id
:yllapitokohde-id yllapitokohde-id}))
(defn tallenna-maaramuutokset! [{:keys [urakka-id yllapitokohde-id maaramuutokset
sopimus-id vuosi]}]
(k/post! :tallenna-maaramuutokset {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:yllapitokohde-id yllapitokohde-id
:maaramuutokset maaramuutokset}))
(def alku (juxt :tr-alkuosa :tr-alkuetaisyys))
(def loppu (juxt :tr-loppuosa :tr-loppuetaisyys))
(defn lisaa-uusi-pot2-alustarivi
"Lisää uuden POT2-alustarivin annetussa indeksissä olevan kohteen perään (alapuolelle). Muuttaa kaikkien
jälkeen tulevien osien avaimia yhdellä suuremmaksi."
[kohdeosat key yllapitokohde]
(let [rivi (get kohdeosat key)
yhtään kohdeosaa täytetään ajorata ja kaista pääkohteelta
rivi (if rivi
rivi
{:tr-numero (:tr-numero yllapitokohde)
:tr-ajorata (:tr-ajorata yllapitokohde)
:tr-kaista (:tr-kaista yllapitokohde)})
avaimet-jalkeen (filter #(> % key) (keys kohdeosat))
uusi-rivi {:tr-numero (:tr-numero rivi)
:tr-alkuosa nil
:tr-alkuetaisyys nil
:tr-loppuosa (:tr-loppuosa rivi)
:tr-loppuetaisyys (:tr-loppuetaisyys rivi)
:tr-ajorata (:tr-ajorata rivi)
:tr-kaista (:tr-kaista rivi)
:toimenpide nil}]
(if (empty? kohdeosat)
{key uusi-rivi}
(-> kohdeosat
(assoc-in [key :tr-loppuosa] nil)
(assoc-in [key :tr-loppuetaisyys] nil)
(assoc (inc key) uusi-rivi)
(merge (zipmap (map inc avaimet-jalkeen)
(map #(get kohdeosat %) avaimet-jalkeen)))))))
(defn lisaa-paallystekohdeosa
"Lisää uuden kohteen annetussa indeksissä olevan kohteen perään (alapuolelle)."
[kohdeosat key yllapitokohde]
(-> kohdeosat
(assoc (inc key) {:tr-numero (:tr-numero yllapitokohde)})))
(defn pilko-paallystekohdeosa
"Lisää uuden kohteen annetussa indeksissä olevan kohteen perään (alapuolelle). Muuttaa kaikkien
jälkeen tulevien osien avaimia yhdellä suuremmaksi."
[kohdeosat key yllapitokohde]
(let [rivi (get kohdeosat key)
yhtään kohdeosaa täytetään ajorata ja kaista pääkohteelta
rivi (if rivi
rivi
{:tr-numero (:tr-numero yllapitokohde)
:tr-ajorata (:tr-ajorata yllapitokohde)
:tr-kaista (:tr-kaista yllapitokohde)})
avaimet-jalkeen (filter #(> % key) (keys kohdeosat))
uusi-rivi {:nimi ""
:tr-numero (:tr-numero rivi)
:tr-alkuosa nil
:tr-alkuetaisyys nil
:tr-loppuosa (:tr-loppuosa rivi)
:tr-loppuetaisyys (:tr-loppuetaisyys rivi)
:tr-ajorata (:tr-ajorata rivi)
:tr-kaista (:tr-kaista rivi)
:toimenpide ""}]
(if (empty? kohdeosat)
{key uusi-rivi}
(-> kohdeosat
(assoc-in [key :tr-loppuosa] nil)
(assoc-in [key :tr-loppuetaisyys] nil)
(assoc (inc key) uusi-rivi)
(merge (zipmap (map inc avaimet-jalkeen)
(map #(get kohdeosat %) avaimet-jalkeen)))))))
(defn poista-kohdeosa
"Poistaa valitun kohdeosan annetulla avaimella. Huolehtii siitä, että osat pysyvät järjestyksessä
eikä väliin jää puuttumaan avaimia."
[kohdeosat key]
(let [kohdeosat (into (sorted-map)
(dissoc kohdeosat key))
kohdeosat-uusilla-avaimilla (map-indexed (fn [index [vanha-avain rivi]]
[(inc index) rivi])
kohdeosat)
tulos (reduce (fn [tulos [avain arvo]]
(assoc tulos avain arvo))
{}
kohdeosat-uusilla-avaimilla)]
tulos))
(defn kasittele-tallennettavat-kohteet!
([kohteet kohdetyyppi onnistui-fn epaonnistui-fn] (kasittele-tallennettavat-kohteet! kohteet kohdetyyppi onnistui-fn epaonnistui-fn true true))
([kohteet kohdetyyppi onnistui-fn epaonnistui-fn nayta-onnistui? nayta-epaonnistui?]
(go (let [urakka-id (:id @nav/valittu-urakka)
vuosi @u/valittu-urakan-vuosi
[sopimus-id _] @u/valittu-sopimusnumero
_ (log "[YLLÄPITOKOHTEET] Tallennetaan kohteet: " (pr-str kohteet))
vastaus (<! (tallenna-yllapitokohteet!
urakka-id sopimus-id vuosi
(mapv #(assoc % :yllapitokohdetyotyyppi kohdetyyppi)
kohteet)))]
(if (k/virhe? vastaus)
(viesti/nayta! "Kohteiden tallentaminen epännistui" :warning viesti/viestin-nayttoaika-keskipitka)
(do (log "[YLLÄPITOKOHTEET] Kohteet tallennettu: " (pr-str vastaus))
(if (= (:status vastaus) :ok)
(do
(when nayta-onnistui?
(viesti/nayta! "Tallennus onnistui. Tarkista myös muokkaamiesi tieosoitteiden alikohteet."
:success viesti/viestin-nayttoaika-keskipitka))
(onnistui-fn (:yllapitokohteet vastaus)))
(do
(when nayta-epaonnistui?
(viesti/nayta! "Tallennus epäonnistui!"
:danger viesti/viestin-nayttoaika-keskipitka))
(epaonnistui-fn vastaus)))))))))
(defn yllapitokohteet-kartalle
"Ylläpitokohde näytetään kartalla 'kohdeosina'.
Ottaa vectorin ylläpitokohteita ja palauttaa ylläpitokohteiden kohdeosat valmiina näytettäväksi kartalle.
Palautuneilla kohdeosilla on pääkohteen tiedot :yllapitokohde avaimen takana.
yllapitokohteet Vector ylläpitokohteita, joilla on mukana ylläpitokohteen kohdeosat (:kohdeosat avaimessa)
lomakedata Päällystys- tai paikkausilmoituksen lomakkeen tiedot"
([yllapitokohteet] (yllapitokohteet-kartalle yllapitokohteet nil))
([yllapitokohteet lomakedata]
(let [id #(or (:paallystyskohde-id %)
(:paikkauskohde-id %)
(:yllapitokohde-id %))
karttamuodossa (kartalla-esitettavaan-muotoon
yllapitokohteet
#(= (id lomakedata) (id %))
yllapitokohteet-domain/yllapitokohde-kartalle-xf)]
karttamuodossa)))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/4c440de1d426efe3e39d069d33119ea25e52562d/src/cljs/harja/tiedot/urakka/yllapitokohteet.cljs | clojure | (ns harja.tiedot.urakka.yllapitokohteet
"Ylläpitokohteiden tiedot"
(:require
[harja.loki :refer [log tarkkaile!]]
[cljs.core.async :refer [<!]]
[harja.asiakas.kommunikaatio :as k]
[harja.tiedot.urakka :as u]
[harja.domain.yllapitokohde :as yllapitokohteet-domain]
[harja.ui.kartta.esitettavat-asiat :refer [kartalla-esitettavaan-muotoon]]
[harja.tiedot.navigaatio :as nav]
[harja.ui.viesti :as viesti]
[clojure.string :as str]
[harja.tyokalut.local-storage :as local-storage])
(:require-macros [reagent.ratom :refer [reaction]]
[cljs.core.async.macros :refer [go]]
[harja.atom :refer [reaction<!]]))
(def hint-pilko-osoitevali "Pilko paalu\u00ADväli kahdeksi eri kohteeksi")
(def hint-poista-rivi "Poista rivi")
(def hint-lisaa-osa "Lisää osa")
(defn yha-kohde? [kohde]
(some? (:yhaid kohde)))
(defn suodata-yllapitokohteet
"Suodatusoptiot on map, jolla voi valita halutut suodatusperusteet:
:tienumero int
:yha-kohde? boolean
:yllapitokohdetyotyyppi keyword (:paallystys / :paikkaus)
:kohdenumero int
Jos jotain arvoa ei anneta, sitä ei huomioida suodatuksessa"
[kohteet suodatusoptiot]
(let [yha-kohde-fn yha-kohde?
{:keys [tienumero yha-kohde? yllapitokohdetyotyyppi kohdenumero]} suodatusoptiot]
(filterv
#(and (or (nil? yha-kohde?) (if yha-kohde? (yha-kohde-fn %) (not (yha-kohde-fn %))))
(or (nil? tienumero) (= (:tr-numero %) tienumero))
(or (nil? yllapitokohdetyotyyppi) (= (:yllapitokohdetyotyyppi %) yllapitokohdetyotyyppi))
(or (str/blank? kohdenumero)
(and (:kohdenumero %)
(str/starts-with? (str/lower-case (:kohdenumero %))
(str/lower-case kohdenumero)))))
kohteet)))
(defn hae-yllapitokohteet [urakka-id sopimus-id vuosi]
(k/post! :urakan-yllapitokohteet {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi}))
(defn tallenna-yllapitokohteet! [urakka-id sopimus-id vuosi kohteet]
(k/post! :tallenna-yllapitokohteet {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:kohteet kohteet}))
(defn tallenna-yllapitokohdeosat! [{:keys [urakka-id sopimus-id vuosi yllapitokohde-id osat osatyyppi]}]
(k/post! :tallenna-yllapitokohdeosat {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:yllapitokohde-id yllapitokohde-id
:osat osat
:osatyyppi osatyyppi}))
(defn hae-maaramuutokset [urakka-id yllapitokohde-id]
(k/post! :hae-maaramuutokset {:urakka-id urakka-id
:yllapitokohde-id yllapitokohde-id}))
(defn tallenna-maaramuutokset! [{:keys [urakka-id yllapitokohde-id maaramuutokset
sopimus-id vuosi]}]
(k/post! :tallenna-maaramuutokset {:urakka-id urakka-id
:sopimus-id sopimus-id
:vuosi vuosi
:yllapitokohde-id yllapitokohde-id
:maaramuutokset maaramuutokset}))
(def alku (juxt :tr-alkuosa :tr-alkuetaisyys))
(def loppu (juxt :tr-loppuosa :tr-loppuetaisyys))
(defn lisaa-uusi-pot2-alustarivi
"Lisää uuden POT2-alustarivin annetussa indeksissä olevan kohteen perään (alapuolelle). Muuttaa kaikkien
jälkeen tulevien osien avaimia yhdellä suuremmaksi."
[kohdeosat key yllapitokohde]
(let [rivi (get kohdeosat key)
yhtään kohdeosaa täytetään ajorata ja kaista pääkohteelta
rivi (if rivi
rivi
{:tr-numero (:tr-numero yllapitokohde)
:tr-ajorata (:tr-ajorata yllapitokohde)
:tr-kaista (:tr-kaista yllapitokohde)})
avaimet-jalkeen (filter #(> % key) (keys kohdeosat))
uusi-rivi {:tr-numero (:tr-numero rivi)
:tr-alkuosa nil
:tr-alkuetaisyys nil
:tr-loppuosa (:tr-loppuosa rivi)
:tr-loppuetaisyys (:tr-loppuetaisyys rivi)
:tr-ajorata (:tr-ajorata rivi)
:tr-kaista (:tr-kaista rivi)
:toimenpide nil}]
(if (empty? kohdeosat)
{key uusi-rivi}
(-> kohdeosat
(assoc-in [key :tr-loppuosa] nil)
(assoc-in [key :tr-loppuetaisyys] nil)
(assoc (inc key) uusi-rivi)
(merge (zipmap (map inc avaimet-jalkeen)
(map #(get kohdeosat %) avaimet-jalkeen)))))))
(defn lisaa-paallystekohdeosa
"Lisää uuden kohteen annetussa indeksissä olevan kohteen perään (alapuolelle)."
[kohdeosat key yllapitokohde]
(-> kohdeosat
(assoc (inc key) {:tr-numero (:tr-numero yllapitokohde)})))
(defn pilko-paallystekohdeosa
"Lisää uuden kohteen annetussa indeksissä olevan kohteen perään (alapuolelle). Muuttaa kaikkien
jälkeen tulevien osien avaimia yhdellä suuremmaksi."
[kohdeosat key yllapitokohde]
(let [rivi (get kohdeosat key)
yhtään kohdeosaa täytetään ajorata ja kaista pääkohteelta
rivi (if rivi
rivi
{:tr-numero (:tr-numero yllapitokohde)
:tr-ajorata (:tr-ajorata yllapitokohde)
:tr-kaista (:tr-kaista yllapitokohde)})
avaimet-jalkeen (filter #(> % key) (keys kohdeosat))
uusi-rivi {:nimi ""
:tr-numero (:tr-numero rivi)
:tr-alkuosa nil
:tr-alkuetaisyys nil
:tr-loppuosa (:tr-loppuosa rivi)
:tr-loppuetaisyys (:tr-loppuetaisyys rivi)
:tr-ajorata (:tr-ajorata rivi)
:tr-kaista (:tr-kaista rivi)
:toimenpide ""}]
(if (empty? kohdeosat)
{key uusi-rivi}
(-> kohdeosat
(assoc-in [key :tr-loppuosa] nil)
(assoc-in [key :tr-loppuetaisyys] nil)
(assoc (inc key) uusi-rivi)
(merge (zipmap (map inc avaimet-jalkeen)
(map #(get kohdeosat %) avaimet-jalkeen)))))))
(defn poista-kohdeosa
"Poistaa valitun kohdeosan annetulla avaimella. Huolehtii siitä, että osat pysyvät järjestyksessä
eikä väliin jää puuttumaan avaimia."
[kohdeosat key]
(let [kohdeosat (into (sorted-map)
(dissoc kohdeosat key))
kohdeosat-uusilla-avaimilla (map-indexed (fn [index [vanha-avain rivi]]
[(inc index) rivi])
kohdeosat)
tulos (reduce (fn [tulos [avain arvo]]
(assoc tulos avain arvo))
{}
kohdeosat-uusilla-avaimilla)]
tulos))
(defn kasittele-tallennettavat-kohteet!
([kohteet kohdetyyppi onnistui-fn epaonnistui-fn] (kasittele-tallennettavat-kohteet! kohteet kohdetyyppi onnistui-fn epaonnistui-fn true true))
([kohteet kohdetyyppi onnistui-fn epaonnistui-fn nayta-onnistui? nayta-epaonnistui?]
(go (let [urakka-id (:id @nav/valittu-urakka)
vuosi @u/valittu-urakan-vuosi
[sopimus-id _] @u/valittu-sopimusnumero
_ (log "[YLLÄPITOKOHTEET] Tallennetaan kohteet: " (pr-str kohteet))
vastaus (<! (tallenna-yllapitokohteet!
urakka-id sopimus-id vuosi
(mapv #(assoc % :yllapitokohdetyotyyppi kohdetyyppi)
kohteet)))]
(if (k/virhe? vastaus)
(viesti/nayta! "Kohteiden tallentaminen epännistui" :warning viesti/viestin-nayttoaika-keskipitka)
(do (log "[YLLÄPITOKOHTEET] Kohteet tallennettu: " (pr-str vastaus))
(if (= (:status vastaus) :ok)
(do
(when nayta-onnistui?
(viesti/nayta! "Tallennus onnistui. Tarkista myös muokkaamiesi tieosoitteiden alikohteet."
:success viesti/viestin-nayttoaika-keskipitka))
(onnistui-fn (:yllapitokohteet vastaus)))
(do
(when nayta-epaonnistui?
(viesti/nayta! "Tallennus epäonnistui!"
:danger viesti/viestin-nayttoaika-keskipitka))
(epaonnistui-fn vastaus)))))))))
(defn yllapitokohteet-kartalle
"Ylläpitokohde näytetään kartalla 'kohdeosina'.
Ottaa vectorin ylläpitokohteita ja palauttaa ylläpitokohteiden kohdeosat valmiina näytettäväksi kartalle.
Palautuneilla kohdeosilla on pääkohteen tiedot :yllapitokohde avaimen takana.
yllapitokohteet Vector ylläpitokohteita, joilla on mukana ylläpitokohteen kohdeosat (:kohdeosat avaimessa)
lomakedata Päällystys- tai paikkausilmoituksen lomakkeen tiedot"
([yllapitokohteet] (yllapitokohteet-kartalle yllapitokohteet nil))
([yllapitokohteet lomakedata]
(let [id #(or (:paallystyskohde-id %)
(:paikkauskohde-id %)
(:yllapitokohde-id %))
karttamuodossa (kartalla-esitettavaan-muotoon
yllapitokohteet
#(= (id lomakedata) (id %))
yllapitokohteet-domain/yllapitokohde-kartalle-xf)]
karttamuodossa)))
|
|
f3c17d61ae720d542a7dbc3a1ce169cf315f71d63a5826a5df7b9a7c85123fb5 | c-cube/ocaml-containers | CCBV.mli | * Imperative Bitvectors .
A bitvector is stored in some form of internal array ( on the heap ) .
Is it a bit similar to a more storage - efficient version of [ bool
CCVector.vector ] , with additional operations .
{ b BREAKING CHANGES } since 1.2 :
size is now stored along with the bitvector . Some functions have
a new signature .
The size of the bitvector used to be rounded up to the multiple of 30 or 62 .
In other words some functions such as { ! } would iterate on more
bits than what was originally asked for . This is not the case anymore .
A bitvector is stored in some form of internal array (on the heap).
Is it a bit similar to a more storage-efficient version of [bool
CCVector.vector], with additional operations.
{b BREAKING CHANGES} since 1.2:
size is now stored along with the bitvector. Some functions have
a new signature.
The size of the bitvector used to be rounded up to the multiple of 30 or 62.
In other words some functions such as {!val-iter} would iterate on more
bits than what was originally asked for. This is not the case anymore.
*)
type t
(** A resizable bitvector *)
val empty : unit -> t
(** Empty bitvector. Length is 0. *)
val create : size:int -> bool -> t
(** Create a bitvector of given size, with given default value.
Length of result is [size]. *)
val init : int -> (int -> bool) -> t
* [ init len f ] initializes a bitvector of length [ len ] , where bit [ i ]
is true iff [ f i ] is .
@since 3.9
is true iff [f i] is.
@since 3.9 *)
val copy : t -> t
(** Copy of bitvector. *)
val cardinal : t -> int
(** Number of bits set to one, seen as a set of bits. *)
val length : t -> int
* Size of underlying bitvector .
This is not related to the underlying implementation .
Changed at 1.2
This is not related to the underlying implementation.
Changed at 1.2
*)
val capacity : t -> int
* The number of bits this bitvector can store without resizing .
@since 1.2
@since 1.2 *)
val resize : t -> int -> unit
* Resize the BV so that it has the specified length . This can grow
the underlying array , but it will not shrink it , to minimize
memory traffic .
@raise Invalid_argument on negative sizes .
the underlying array, but it will not shrink it, to minimize
memory traffic.
@raise Invalid_argument on negative sizes. *)
val resize_minimize_memory : t -> int -> unit
* Same as { ! resize } , but this can also shrink the underlying
array if this reduces the size .
@raise Invalid_argument on negative sizes .
@since 3.9
array if this reduces the size.
@raise Invalid_argument on negative sizes.
@since 3.9 *)
val is_empty : t -> bool
(** Are there any true bits? *)
val set : t -> int -> unit
(** Set i-th bit, extending the bitvector if needed. *)
val get : t -> int -> bool
(** Is the i-th bit true? Return false if the index is too high. *)
val reset : t -> int -> unit
(** Set i-th bit to 0, extending the bitvector if needed. *)
val set_bool : t -> int -> bool -> unit
* Set or reset [ i]-th bit .
@since 3.9
@since 3.9 *)
val flip : t -> int -> unit
(** Flip i-th bit, extending the bitvector if needed. *)
val clear : t -> unit
(** Set every bit to 0. Does not change the length. *)
val clear_and_shrink : t -> unit
* Set every bit to 0 , and set length to 0 .
@since 3.9
@since 3.9 *)
val iter : t -> (int -> bool -> unit) -> unit
(** Iterate on all bits. *)
val iter_true : t -> (int -> unit) -> unit
* Iterate on bits set to 1 .
val to_list : t -> int list
(** List of indexes that are true. *)
val to_sorted_list : t -> int list
(** Same as {!to_list}, but also guarantees the list is sorted in
increasing order. *)
val of_list : int list -> t
* From a list of true bits .
The bits are interpreted as indices into the returned bitvector , so the final
bitvector [ bv ] will have [ length bv ] equal to 1 more than max of list indices .
The bits are interpreted as indices into the returned bitvector, so the final
bitvector [bv] will have [length bv] equal to 1 more than max of list indices.
*)
val first : t -> int option
* First set bit , or return [ None ] .
Changed type at 1.2
Changed type at 1.2 *)
val first_exn : t -> int
* First set bit , or
@raise Not_found if all bits are 0 .
@since 1.2
@raise Not_found if all bits are 0.
@since 1.2 *)
val filter : t -> (int -> bool) -> unit
(** [filter bv p] only keeps the true bits of [bv] whose [index]
satisfies [p index].
Length is unchanged. *)
val negate_self : t -> unit
* [ negate_self t ] flips all of the bits in [ t ] . Length is unchanged .
@since 1.2
@since 1.2 *)
val negate : t -> t
(** [negate t] returns a copy of [t] with all of the bits flipped.
Length is unchanged. *)
val union_into : into:t -> t -> unit
(** [union_into ~into bv] sets [into] to the union of itself and [bv].
Also updates the length of [into] to be at least [length bv]. *)
val inter_into : into:t -> t -> unit
(** [inter_into ~into bv] sets [into] to the intersection of itself and [bv].
Also updates the length of [into] to be at most [length bv].
After executing:
- [length ~into' = min (length into) (length bv)].
- [for all i: get into' ==> get into i /\ get bv i]
*)
val union : t -> t -> t
* [ union bv1 bv2 ] returns the union of the two sets . The length
of the result is the max of the inputs ' lengths .
of the result is the max of the inputs' lengths. *)
val inter : t -> t -> t
* [ inter bv1 bv2 ] returns the intersection of the two sets . The length
of the result is the min of the inputs ' lengths .
of the result is the min of the inputs' lengths. *)
val diff_into : into:t -> t -> unit
* [ diff_into ~into t ] modifies [ into ] with only the bits set but not in [ t ] .
@since 1.2
@since 1.2 *)
val diff : t -> t -> t
* [ diff t1 t2 ] returns those bits found in [ t1 ] but not in [ t2 ] .
@since 1.2
@since 1.2 *)
val select : t -> 'a array -> 'a list
(** [select arr bv] selects the elements of [arr] whose index
corresponds to a true bit in [bv]. If [bv] is too short, elements of [arr]
with too high an index cannot be selected and are therefore not
selected. *)
val selecti : t -> 'a array -> ('a * int) list
(** Same as {!select}, but selected elements are paired with their indexes. *)
val equal : t -> t -> bool
* Bitwise comparison , including the size ( [ equal a b ] implies [ length a = length b ] ) .
@since 3.5
@since 3.5 *)
type 'a iter = ('a -> unit) -> unit
val to_iter : t -> int iter
(** Iterate over the true bits. *)
val of_iter : int iter -> t
(** Build from true bits. *)
val pp : Format.formatter -> t -> unit
* Print the bitvector as a string of bits .
@since 0.13
@since 0.13 *)
(**/**)
module Internal_ : sig
val __to_word_l : t -> char list
val __popcount8 : int -> int
val __lsb_mask : int -> int
val __check_invariant : t -> unit
end
(**/**)
| null | https://raw.githubusercontent.com/c-cube/ocaml-containers/a30e471a6fb6e59fcdbcfef406e577a0b46d69c1/src/data/CCBV.mli | ocaml | * A resizable bitvector
* Empty bitvector. Length is 0.
* Create a bitvector of given size, with given default value.
Length of result is [size].
* Copy of bitvector.
* Number of bits set to one, seen as a set of bits.
* Are there any true bits?
* Set i-th bit, extending the bitvector if needed.
* Is the i-th bit true? Return false if the index is too high.
* Set i-th bit to 0, extending the bitvector if needed.
* Flip i-th bit, extending the bitvector if needed.
* Set every bit to 0. Does not change the length.
* Iterate on all bits.
* List of indexes that are true.
* Same as {!to_list}, but also guarantees the list is sorted in
increasing order.
* [filter bv p] only keeps the true bits of [bv] whose [index]
satisfies [p index].
Length is unchanged.
* [negate t] returns a copy of [t] with all of the bits flipped.
Length is unchanged.
* [union_into ~into bv] sets [into] to the union of itself and [bv].
Also updates the length of [into] to be at least [length bv].
* [inter_into ~into bv] sets [into] to the intersection of itself and [bv].
Also updates the length of [into] to be at most [length bv].
After executing:
- [length ~into' = min (length into) (length bv)].
- [for all i: get into' ==> get into i /\ get bv i]
* [select arr bv] selects the elements of [arr] whose index
corresponds to a true bit in [bv]. If [bv] is too short, elements of [arr]
with too high an index cannot be selected and are therefore not
selected.
* Same as {!select}, but selected elements are paired with their indexes.
* Iterate over the true bits.
* Build from true bits.
*/*
*/* | * Imperative Bitvectors .
A bitvector is stored in some form of internal array ( on the heap ) .
Is it a bit similar to a more storage - efficient version of [ bool
CCVector.vector ] , with additional operations .
{ b BREAKING CHANGES } since 1.2 :
size is now stored along with the bitvector . Some functions have
a new signature .
The size of the bitvector used to be rounded up to the multiple of 30 or 62 .
In other words some functions such as { ! } would iterate on more
bits than what was originally asked for . This is not the case anymore .
A bitvector is stored in some form of internal array (on the heap).
Is it a bit similar to a more storage-efficient version of [bool
CCVector.vector], with additional operations.
{b BREAKING CHANGES} since 1.2:
size is now stored along with the bitvector. Some functions have
a new signature.
The size of the bitvector used to be rounded up to the multiple of 30 or 62.
In other words some functions such as {!val-iter} would iterate on more
bits than what was originally asked for. This is not the case anymore.
*)
type t
val empty : unit -> t
val create : size:int -> bool -> t
val init : int -> (int -> bool) -> t
* [ init len f ] initializes a bitvector of length [ len ] , where bit [ i ]
is true iff [ f i ] is .
@since 3.9
is true iff [f i] is.
@since 3.9 *)
val copy : t -> t
val cardinal : t -> int
val length : t -> int
* Size of underlying bitvector .
This is not related to the underlying implementation .
Changed at 1.2
This is not related to the underlying implementation.
Changed at 1.2
*)
val capacity : t -> int
* The number of bits this bitvector can store without resizing .
@since 1.2
@since 1.2 *)
val resize : t -> int -> unit
* Resize the BV so that it has the specified length . This can grow
the underlying array , but it will not shrink it , to minimize
memory traffic .
@raise Invalid_argument on negative sizes .
the underlying array, but it will not shrink it, to minimize
memory traffic.
@raise Invalid_argument on negative sizes. *)
val resize_minimize_memory : t -> int -> unit
* Same as { ! resize } , but this can also shrink the underlying
array if this reduces the size .
@raise Invalid_argument on negative sizes .
@since 3.9
array if this reduces the size.
@raise Invalid_argument on negative sizes.
@since 3.9 *)
val is_empty : t -> bool
val set : t -> int -> unit
val get : t -> int -> bool
val reset : t -> int -> unit
val set_bool : t -> int -> bool -> unit
* Set or reset [ i]-th bit .
@since 3.9
@since 3.9 *)
val flip : t -> int -> unit
val clear : t -> unit
val clear_and_shrink : t -> unit
* Set every bit to 0 , and set length to 0 .
@since 3.9
@since 3.9 *)
val iter : t -> (int -> bool -> unit) -> unit
val iter_true : t -> (int -> unit) -> unit
* Iterate on bits set to 1 .
val to_list : t -> int list
val to_sorted_list : t -> int list
val of_list : int list -> t
* From a list of true bits .
The bits are interpreted as indices into the returned bitvector , so the final
bitvector [ bv ] will have [ length bv ] equal to 1 more than max of list indices .
The bits are interpreted as indices into the returned bitvector, so the final
bitvector [bv] will have [length bv] equal to 1 more than max of list indices.
*)
val first : t -> int option
* First set bit , or return [ None ] .
Changed type at 1.2
Changed type at 1.2 *)
val first_exn : t -> int
* First set bit , or
@raise Not_found if all bits are 0 .
@since 1.2
@raise Not_found if all bits are 0.
@since 1.2 *)
val filter : t -> (int -> bool) -> unit
val negate_self : t -> unit
* [ negate_self t ] flips all of the bits in [ t ] . Length is unchanged .
@since 1.2
@since 1.2 *)
val negate : t -> t
val union_into : into:t -> t -> unit
val inter_into : into:t -> t -> unit
val union : t -> t -> t
* [ union bv1 bv2 ] returns the union of the two sets . The length
of the result is the max of the inputs ' lengths .
of the result is the max of the inputs' lengths. *)
val inter : t -> t -> t
* [ inter bv1 bv2 ] returns the intersection of the two sets . The length
of the result is the min of the inputs ' lengths .
of the result is the min of the inputs' lengths. *)
val diff_into : into:t -> t -> unit
* [ diff_into ~into t ] modifies [ into ] with only the bits set but not in [ t ] .
@since 1.2
@since 1.2 *)
val diff : t -> t -> t
* [ diff t1 t2 ] returns those bits found in [ t1 ] but not in [ t2 ] .
@since 1.2
@since 1.2 *)
val select : t -> 'a array -> 'a list
val selecti : t -> 'a array -> ('a * int) list
val equal : t -> t -> bool
* Bitwise comparison , including the size ( [ equal a b ] implies [ length a = length b ] ) .
@since 3.5
@since 3.5 *)
type 'a iter = ('a -> unit) -> unit
val to_iter : t -> int iter
val of_iter : int iter -> t
val pp : Format.formatter -> t -> unit
* Print the bitvector as a string of bits .
@since 0.13
@since 0.13 *)
module Internal_ : sig
val __to_word_l : t -> char list
val __popcount8 : int -> int
val __lsb_mask : int -> int
val __check_invariant : t -> unit
end
|
44aa0b27e938716649bfbb107da40d8d42b05257611f2ee3b29037d9e14820b2 | Lambda-Logan/faker | Utils.hs | # LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
|
Module : Faker . Utils
Description : Module with helper functions for all other ' Faker ' modules
Copyright : ( c ) , 2014 - 2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
Module : Faker.Utils
Description : Module with helper functions for all other 'Faker' modules
Copyright : (c) Alexey Gaziev, 2014-2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
-}
module Faker.Utils
(
-- * Data types
Faker(..)
, Locale(..)
, FakerConfig(..)
-- * Helper functions for other 'Faker' modules
, runFaker
, runFakerWith
, runFakerWithSeed
, randomValue
, randomInt
, replaceSymbols
, evalRegex
) where
import Control.Monad.State
import Data.List (intercalate)
import Data.List.Split (splitOn)
import Gimlh
import System.Random (StdGen, mkStdGen, newStdGen, randomR)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Paths_faker
-- | Value represent locales
^ Default locale , US
^ Russian locale
deriving (Show)
-- | Config for faker functions
newtype FakerConfig = FakerConfig
{ fakerLocale :: Locale -- ^ Contains locale for 'Faker' functions
} deriving (Show)
-- | Fake data storage, contains default and requested locales data and
-- stdGen
data FakerData = FakerData
{ defaultLocaleData :: SimpleGiml -- ^ Fake data for default locale (for fallbacks)
^ Fake data for locale stored in ' FakerConfig ' provided ( same as ' defaultLocaleData ' if none )
, stdGen :: StdGen -- ^ Generator for fetching random values from data
}
-- | Stateful type for faker values
newtype Faker a = Faker (State FakerData a)
deriving (Functor, Monad, Applicative, MonadState FakerData)
localeFileName :: Locale -> String
localeFileName Russian = "ru"
localeFileName _ = "en"
loadGimlData :: FilePath -> IO SimpleGiml
loadGimlData fname = do
filePath <- getDataFileName fname
contents <- parseFile filePath
return $ simplifyGiml contents
| Function for run ' Faker ' functions with specific seed and default ' US '
-- locale.
runFakerWithSeed :: Int -> Faker a -> IO a
runFakerWithSeed seed (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
let localeData = defaultLocaleData
stdGen = mkStdGen seed
return $ evalState action FakerData{..}
| Function for run ' Faker ' functions with ' FakerConfig ' ( currently with
-- specified locale in it)
runFakerWith :: FakerConfig -> Faker a -> IO a
runFakerWith config (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
localeData <- getLocaleData $ fakerLocale config
stdGen <- newStdGen
return $ evalState action FakerData{..}
| Function for run ' Faker ' functions with default ' US ' locale
runFaker :: Faker a -> IO a
runFaker (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
stdGen <- newStdGen
let localeData = defaultLocaleData
return $ evalState action FakerData{..}
getDefaultLocaleData :: IO SimpleGiml
getDefaultLocaleData = getLocaleData US
getLocaleData :: Locale -> IO SimpleGiml
getLocaleData locale =
loadGimlData $ "data/" ++ (localeFileName locale) ++ ".giml"
readFromGiml :: String -> Faker [String]
readFromGiml thing = do
d <- gets localeData
defaultData <- gets defaultLocaleData
case fetch d thing of
Just x -> return $ val2List x
Nothing -> case fetch defaultData thing of
Just x -> return $ val2List x
Nothing -> error "no element and sucky error handling"
-- | Internal function, used in other 'Faker' modules
-- to fetch specific value from data storage by namespace and
-- value type:
--
-- >>> runFaker $ randomValue "name" "first_name"
" "
randomValue :: String -> String -> Faker String
randomValue namespace valType = do
valList <- readFromGiml (namespace ++ "$" ++ valType)
ind <- randomInt (0, length valList - 1)
return $ valList !! ind
-- | Internal function, used in other 'Faker' modules
-- to get random number inside provided bounds:
--
-- >>> runFaker $ randomInt (1,4)
3
randomInt :: (Int, Int) -> Faker Int
randomInt bounds = do
fakerData <- get
let (int, newGen) = randomR bounds (stdGen fakerData)
put (fakerData { stdGen = newGen })
return int
-- | Internal function, used in other 'Faker' modules
-- to replace special chars '#' with random numbers
--
> > > runFaker $ replaceSymbols " # # - # # "
-- "12-48"
replaceSymbols :: String -> Faker String
replaceSymbols [] = return ""
replaceSymbols (x:xs) = do
restOfLine <- replaceSymbols xs
randInt <- randomInt (0,9)
return $ case x of
'#' -> show randInt ++ restOfLine
_ -> x : restOfLine
-- | Internal function, used in other 'Faker' modules
to eval special regex and turn them into ' Faker String '
--
-- >>> runFaker $ evalRegex "/5[1-5]-#{3,5}/"
-- "555-6384"
--
-- >>> runFaker $ evalRegex "/5[1-5]-#{3,5}/"
" 5555 - 177 "
evalRegex :: String -> Faker String
evalRegex regex = do
let preparedRegex =
if head regex == '/' && last regex == '/'
then init $ tail regex
else regex
replaceExpressions preparedRegex >>= replaceSymbols
replaceExpressions :: String -> Faker String
replaceExpressions [] = return ""
replaceExpressions [a] = return [a]
replaceExpressions (x:y:xs) = case y of
'{' -> replicateChars x (y:xs) >>= replaceExpressions
_ -> case x of
'[' -> randomizeChar (x:y:xs) >>= replaceExpressions
_ -> do
rest <- replaceExpressions (y:xs)
return $ x : rest
replicateChars :: Char -> String -> Faker String
replicateChars char rest = do
let splittedLine = splitOn "}" rest
range = read $ "(" ++ tail (head splittedLine) ++ ")" :: (Int, Int)
randInt <- randomInt range
return $ replicate randInt char ++ intercalate "}" (tail splittedLine)
randomizeChar :: String -> Faker String
randomizeChar rest = do
let splittedLine = splitOn "]" rest
rangeNumbers = intercalate "," (splitOn "-" (tail $ head splittedLine))
range = read $ "(" ++ rangeNumbers ++ ")" :: (Int, Int)
randInt <- randomInt range
return $ show randInt ++ intercalate "]" (tail splittedLine)
| null | https://raw.githubusercontent.com/Lambda-Logan/faker/8935346192e67631b97c1a52f6644ba5ed48a1a2/src/Faker/Utils.hs | haskell | * Data types
* Helper functions for other 'Faker' modules
| Value represent locales
| Config for faker functions
^ Contains locale for 'Faker' functions
| Fake data storage, contains default and requested locales data and
stdGen
^ Fake data for default locale (for fallbacks)
^ Generator for fetching random values from data
| Stateful type for faker values
locale.
specified locale in it)
| Internal function, used in other 'Faker' modules
to fetch specific value from data storage by namespace and
value type:
>>> runFaker $ randomValue "name" "first_name"
| Internal function, used in other 'Faker' modules
to get random number inside provided bounds:
>>> runFaker $ randomInt (1,4)
| Internal function, used in other 'Faker' modules
to replace special chars '#' with random numbers
"12-48"
| Internal function, used in other 'Faker' modules
>>> runFaker $ evalRegex "/5[1-5]-#{3,5}/"
"555-6384"
>>> runFaker $ evalRegex "/5[1-5]-#{3,5}/" | # LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
|
Module : Faker . Utils
Description : Module with helper functions for all other ' Faker ' modules
Copyright : ( c ) , 2014 - 2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
Module : Faker.Utils
Description : Module with helper functions for all other 'Faker' modules
Copyright : (c) Alexey Gaziev, 2014-2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
-}
module Faker.Utils
(
Faker(..)
, Locale(..)
, FakerConfig(..)
, runFaker
, runFakerWith
, runFakerWithSeed
, randomValue
, randomInt
, replaceSymbols
, evalRegex
) where
import Control.Monad.State
import Data.List (intercalate)
import Data.List.Split (splitOn)
import Gimlh
import System.Random (StdGen, mkStdGen, newStdGen, randomR)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Paths_faker
^ Default locale , US
^ Russian locale
deriving (Show)
newtype FakerConfig = FakerConfig
} deriving (Show)
data FakerData = FakerData
^ Fake data for locale stored in ' FakerConfig ' provided ( same as ' defaultLocaleData ' if none )
}
newtype Faker a = Faker (State FakerData a)
deriving (Functor, Monad, Applicative, MonadState FakerData)
localeFileName :: Locale -> String
localeFileName Russian = "ru"
localeFileName _ = "en"
loadGimlData :: FilePath -> IO SimpleGiml
loadGimlData fname = do
filePath <- getDataFileName fname
contents <- parseFile filePath
return $ simplifyGiml contents
| Function for run ' Faker ' functions with specific seed and default ' US '
runFakerWithSeed :: Int -> Faker a -> IO a
runFakerWithSeed seed (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
let localeData = defaultLocaleData
stdGen = mkStdGen seed
return $ evalState action FakerData{..}
| Function for run ' Faker ' functions with ' FakerConfig ' ( currently with
runFakerWith :: FakerConfig -> Faker a -> IO a
runFakerWith config (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
localeData <- getLocaleData $ fakerLocale config
stdGen <- newStdGen
return $ evalState action FakerData{..}
| Function for run ' Faker ' functions with default ' US ' locale
runFaker :: Faker a -> IO a
runFaker (Faker action) = do
defaultLocaleData <- getDefaultLocaleData
stdGen <- newStdGen
let localeData = defaultLocaleData
return $ evalState action FakerData{..}
getDefaultLocaleData :: IO SimpleGiml
getDefaultLocaleData = getLocaleData US
getLocaleData :: Locale -> IO SimpleGiml
getLocaleData locale =
loadGimlData $ "data/" ++ (localeFileName locale) ++ ".giml"
readFromGiml :: String -> Faker [String]
readFromGiml thing = do
d <- gets localeData
defaultData <- gets defaultLocaleData
case fetch d thing of
Just x -> return $ val2List x
Nothing -> case fetch defaultData thing of
Just x -> return $ val2List x
Nothing -> error "no element and sucky error handling"
" "
randomValue :: String -> String -> Faker String
randomValue namespace valType = do
valList <- readFromGiml (namespace ++ "$" ++ valType)
ind <- randomInt (0, length valList - 1)
return $ valList !! ind
3
randomInt :: (Int, Int) -> Faker Int
randomInt bounds = do
fakerData <- get
let (int, newGen) = randomR bounds (stdGen fakerData)
put (fakerData { stdGen = newGen })
return int
> > > runFaker $ replaceSymbols " # # - # # "
replaceSymbols :: String -> Faker String
replaceSymbols [] = return ""
replaceSymbols (x:xs) = do
restOfLine <- replaceSymbols xs
randInt <- randomInt (0,9)
return $ case x of
'#' -> show randInt ++ restOfLine
_ -> x : restOfLine
to eval special regex and turn them into ' Faker String '
" 5555 - 177 "
evalRegex :: String -> Faker String
evalRegex regex = do
let preparedRegex =
if head regex == '/' && last regex == '/'
then init $ tail regex
else regex
replaceExpressions preparedRegex >>= replaceSymbols
replaceExpressions :: String -> Faker String
replaceExpressions [] = return ""
replaceExpressions [a] = return [a]
replaceExpressions (x:y:xs) = case y of
'{' -> replicateChars x (y:xs) >>= replaceExpressions
_ -> case x of
'[' -> randomizeChar (x:y:xs) >>= replaceExpressions
_ -> do
rest <- replaceExpressions (y:xs)
return $ x : rest
replicateChars :: Char -> String -> Faker String
replicateChars char rest = do
let splittedLine = splitOn "}" rest
range = read $ "(" ++ tail (head splittedLine) ++ ")" :: (Int, Int)
randInt <- randomInt range
return $ replicate randInt char ++ intercalate "}" (tail splittedLine)
randomizeChar :: String -> Faker String
randomizeChar rest = do
let splittedLine = splitOn "]" rest
rangeNumbers = intercalate "," (splitOn "-" (tail $ head splittedLine))
range = read $ "(" ++ rangeNumbers ++ ")" :: (Int, Int)
randInt <- randomInt range
return $ show randInt ++ intercalate "]" (tail splittedLine)
|
d1976230a3c73d8ac4a433db12569c31aa5a3395672a52f72ef9d9b05ee5bd8c | rudymatela/tankode | html-palette.hs | -- |
Program : html-palette.hs
Copyright : ( c ) 2017
License : LGPL 2.1 ( see the file LICENSE )
--
Generate an HTML with the colour pallete for Tankode .
import Colour (Colour, showRGB)
import Colour (red,green,blue,cyan,magenta,yellow,black)
import Tankode.Palette
import Prelude hiding (head,break)
import Data.List (transpose)
break :: String -> String
break s = "\n" ++ s ++ "\n"
field0 :: String -> String -> String
field0 n = field n []
field :: String -> [String] -> String -> String
field n as "" = "<" ++ unwords (n:as) ++ " />"
field n as s = "<" ++ unwords (n:as) ++ ">"
++ s
++ "</" ++ n ++ ">"
html :: String -> String
html = field0 "html" . break
head :: String -> String
head = field0 "head"
title :: String -> String
title = field0 "title"
body :: String -> String -> String
body bgcolor = field "body" ["bgcolor=" ++ bgcolor] . break
table :: String -> String
table = field "table" ["width=100%","height=100%","cellspacing=24"] . break
tr :: String -> String
tr = field0 "tr"
td :: String -> String
td bgcolor = field "td" ["bgcolor=" ++ bgcolor, "style=\"border-radius:6px\""] ""
main = putStrLn $ html $ h ++ "\n" ++ b
where
h = head . title $ "Color table"
b = body (showRGB bgcolour) . table . init . unlines $ rs
rs :: [String]
rs = map (tr . concat) $ css
css :: [[String]]
css = map (map (td . showRGB)) colors
bgcolour :: Colour
bgcolour = Tankode.Palette.black
partial palette : all primary colors , some secondary and tertiary
colors :: [[Colour]]
colors = transpose
[ [grey1,red1,orange1,yellow1,green1,blue1,magenta1,white1,black1]
, [grey2,red2,orange2,yellow2,green2,blue2,magenta2,white2,black2]
, [grey3,red3,orange3,yellow3,green3,blue3,magenta3,white3,black3]
, [grey4,red4,orange4,yellow4,green4,blue4,magenta4,white4,black4]
, [grey5,red5,orange5,yellow5,green5,blue5,magenta5,white5,black5]
, [grey6,red6,orange6,yellow6,green6,blue6,magenta6,white6,black6]
, [grey7,red7,orange7,yellow7,green7,blue7,magenta7,white7,black7]
, [grey8,red8,orange8,yellow8,green8,blue8,magenta8,white8,black8]
, [grey9,red9,orange9,yellow9,green9,blue9,magenta9,white9,black9]
]
-- full palette : all primary , secondary and tertiary colors
colors : : [ [ Colour ] ]
colors = transpose
[ [ grey1,red1,orange1,yellow1,lime1,green1,aquamarine1,cyan1,azure1,blue1,violet1,magenta1,rose1 ]
, [ grey2,red2,orange2,yellow2,lime2,green2,aquamarine2,cyan2,azure2,blue2,violet2,magenta2,rose2 ]
, [ grey3,red3,orange3,yellow3,lime3,green3,aquamarine3,cyan3,azure3,blue3,violet3,magenta3,rose3 ]
, [ grey4,red4,orange4,yellow4,lime4,green4,aquamarine4,cyan4,azure4,blue4,violet4,magenta4,rose4 ]
, [ grey5,red5,orange5,yellow5,lime5,green5,aquamarine5,cyan5,azure5,blue5,violet5,magenta5,rose5 ]
, [ grey6,red6,orange6,yellow6,lime6,green6,aquamarine6,cyan6,azure6,blue6,violet6,magenta6,rose6 ]
, [ grey7,red7,orange7,yellow7,lime7,green7,aquamarine7,cyan7,azure7,blue7,violet7,magenta7,rose7 ]
, [ grey8,red8,orange8,yellow8,lime8,green8,aquamarine8,cyan8,azure8,blue8,violet8,magenta8,rose8 ]
, [ grey9,red9,orange9,yellow9,lime9,green9,aquamarine9,cyan9,azure9,blue9,violet9,magenta9,rose9 ]
]
-- full palette: all primary, secondary and tertiary colors
colors :: [[Colour]]
colors = transpose
[ [grey1,red1,orange1,yellow1,lime1,green1,aquamarine1,cyan1,azure1,blue1,violet1,magenta1,rose1]
, [grey2,red2,orange2,yellow2,lime2,green2,aquamarine2,cyan2,azure2,blue2,violet2,magenta2,rose2]
, [grey3,red3,orange3,yellow3,lime3,green3,aquamarine3,cyan3,azure3,blue3,violet3,magenta3,rose3]
, [grey4,red4,orange4,yellow4,lime4,green4,aquamarine4,cyan4,azure4,blue4,violet4,magenta4,rose4]
, [grey5,red5,orange5,yellow5,lime5,green5,aquamarine5,cyan5,azure5,blue5,violet5,magenta5,rose5]
, [grey6,red6,orange6,yellow6,lime6,green6,aquamarine6,cyan6,azure6,blue6,violet6,magenta6,rose6]
, [grey7,red7,orange7,yellow7,lime7,green7,aquamarine7,cyan7,azure7,blue7,violet7,magenta7,rose7]
, [grey8,red8,orange8,yellow8,lime8,green8,aquamarine8,cyan8,azure8,blue8,violet8,magenta8,rose8]
, [grey9,red9,orange9,yellow9,lime9,green9,aquamarine9,cyan9,azure9,blue9,violet9,magenta9,rose9]
]
-}
| null | https://raw.githubusercontent.com/rudymatela/tankode/299ec6f78a9a18a8fc902be911a556d0497c30e1/runner/src/html-palette.hs | haskell | |
full palette : all primary , secondary and tertiary colors
full palette: all primary, secondary and tertiary colors | Program : html-palette.hs
Copyright : ( c ) 2017
License : LGPL 2.1 ( see the file LICENSE )
Generate an HTML with the colour pallete for Tankode .
import Colour (Colour, showRGB)
import Colour (red,green,blue,cyan,magenta,yellow,black)
import Tankode.Palette
import Prelude hiding (head,break)
import Data.List (transpose)
break :: String -> String
break s = "\n" ++ s ++ "\n"
field0 :: String -> String -> String
field0 n = field n []
field :: String -> [String] -> String -> String
field n as "" = "<" ++ unwords (n:as) ++ " />"
field n as s = "<" ++ unwords (n:as) ++ ">"
++ s
++ "</" ++ n ++ ">"
html :: String -> String
html = field0 "html" . break
head :: String -> String
head = field0 "head"
title :: String -> String
title = field0 "title"
body :: String -> String -> String
body bgcolor = field "body" ["bgcolor=" ++ bgcolor] . break
table :: String -> String
table = field "table" ["width=100%","height=100%","cellspacing=24"] . break
tr :: String -> String
tr = field0 "tr"
td :: String -> String
td bgcolor = field "td" ["bgcolor=" ++ bgcolor, "style=\"border-radius:6px\""] ""
main = putStrLn $ html $ h ++ "\n" ++ b
where
h = head . title $ "Color table"
b = body (showRGB bgcolour) . table . init . unlines $ rs
rs :: [String]
rs = map (tr . concat) $ css
css :: [[String]]
css = map (map (td . showRGB)) colors
bgcolour :: Colour
bgcolour = Tankode.Palette.black
partial palette : all primary colors , some secondary and tertiary
colors :: [[Colour]]
colors = transpose
[ [grey1,red1,orange1,yellow1,green1,blue1,magenta1,white1,black1]
, [grey2,red2,orange2,yellow2,green2,blue2,magenta2,white2,black2]
, [grey3,red3,orange3,yellow3,green3,blue3,magenta3,white3,black3]
, [grey4,red4,orange4,yellow4,green4,blue4,magenta4,white4,black4]
, [grey5,red5,orange5,yellow5,green5,blue5,magenta5,white5,black5]
, [grey6,red6,orange6,yellow6,green6,blue6,magenta6,white6,black6]
, [grey7,red7,orange7,yellow7,green7,blue7,magenta7,white7,black7]
, [grey8,red8,orange8,yellow8,green8,blue8,magenta8,white8,black8]
, [grey9,red9,orange9,yellow9,green9,blue9,magenta9,white9,black9]
]
colors : : [ [ Colour ] ]
colors = transpose
[ [ grey1,red1,orange1,yellow1,lime1,green1,aquamarine1,cyan1,azure1,blue1,violet1,magenta1,rose1 ]
, [ grey2,red2,orange2,yellow2,lime2,green2,aquamarine2,cyan2,azure2,blue2,violet2,magenta2,rose2 ]
, [ grey3,red3,orange3,yellow3,lime3,green3,aquamarine3,cyan3,azure3,blue3,violet3,magenta3,rose3 ]
, [ grey4,red4,orange4,yellow4,lime4,green4,aquamarine4,cyan4,azure4,blue4,violet4,magenta4,rose4 ]
, [ grey5,red5,orange5,yellow5,lime5,green5,aquamarine5,cyan5,azure5,blue5,violet5,magenta5,rose5 ]
, [ grey6,red6,orange6,yellow6,lime6,green6,aquamarine6,cyan6,azure6,blue6,violet6,magenta6,rose6 ]
, [ grey7,red7,orange7,yellow7,lime7,green7,aquamarine7,cyan7,azure7,blue7,violet7,magenta7,rose7 ]
, [ grey8,red8,orange8,yellow8,lime8,green8,aquamarine8,cyan8,azure8,blue8,violet8,magenta8,rose8 ]
, [ grey9,red9,orange9,yellow9,lime9,green9,aquamarine9,cyan9,azure9,blue9,violet9,magenta9,rose9 ]
]
colors :: [[Colour]]
colors = transpose
[ [grey1,red1,orange1,yellow1,lime1,green1,aquamarine1,cyan1,azure1,blue1,violet1,magenta1,rose1]
, [grey2,red2,orange2,yellow2,lime2,green2,aquamarine2,cyan2,azure2,blue2,violet2,magenta2,rose2]
, [grey3,red3,orange3,yellow3,lime3,green3,aquamarine3,cyan3,azure3,blue3,violet3,magenta3,rose3]
, [grey4,red4,orange4,yellow4,lime4,green4,aquamarine4,cyan4,azure4,blue4,violet4,magenta4,rose4]
, [grey5,red5,orange5,yellow5,lime5,green5,aquamarine5,cyan5,azure5,blue5,violet5,magenta5,rose5]
, [grey6,red6,orange6,yellow6,lime6,green6,aquamarine6,cyan6,azure6,blue6,violet6,magenta6,rose6]
, [grey7,red7,orange7,yellow7,lime7,green7,aquamarine7,cyan7,azure7,blue7,violet7,magenta7,rose7]
, [grey8,red8,orange8,yellow8,lime8,green8,aquamarine8,cyan8,azure8,blue8,violet8,magenta8,rose8]
, [grey9,red9,orange9,yellow9,lime9,green9,aquamarine9,cyan9,azure9,blue9,violet9,magenta9,rose9]
]
-}
|
2232868d8e20f2447920844294aa5f26571cc0a4187eeae662b1322571a938b1 | kaznum/programming_in_ocaml_exercise | queue1.ml | module type QUEUE =
sig
type 'a t
val empty: 'a t
val add: 'a t -> 'a -> 'a t
val take: 'a t -> 'a * 'a t
val peek: 'a t -> 'a
exception Empty
end
;;
module Queue1 : QUEUE =
struct
type 'a t = 'a list
let empty = []
let peek = function [] -> raise Empty | x :: rest -> x
let add table x = table @ [x]
let take table =
match table with
[] -> raise Empty
| x :: rest -> (x, rest)
exception Empty
end
;;
let ( <<< ) table x = Queue1.add table x;;
let table = Queue1.empty <<< 1 <<< 5 <<< 4 <<< 3 <<< 2;;
Queue1.peek table;;
let (q, rest) = Queue1.take table;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
| null | https://raw.githubusercontent.com/kaznum/programming_in_ocaml_exercise/6f6a5d62a7a87a1c93561db88f08ae4e445b7d4e/ex9.3/queue1.ml | ocaml | module type QUEUE =
sig
type 'a t
val empty: 'a t
val add: 'a t -> 'a -> 'a t
val take: 'a t -> 'a * 'a t
val peek: 'a t -> 'a
exception Empty
end
;;
module Queue1 : QUEUE =
struct
type 'a t = 'a list
let empty = []
let peek = function [] -> raise Empty | x :: rest -> x
let add table x = table @ [x]
let take table =
match table with
[] -> raise Empty
| x :: rest -> (x, rest)
exception Empty
end
;;
let ( <<< ) table x = Queue1.add table x;;
let table = Queue1.empty <<< 1 <<< 5 <<< 4 <<< 3 <<< 2;;
Queue1.peek table;;
let (q, rest) = Queue1.take table;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
let (q, rest) = Queue1.take rest;;
|
|
422215ab0e3ead2b47aeda8c7252c2024e16ee402b41e9227d07ca03e57b7c05 | joinr/spork | sparse.clj | ;;Namespace for sparse persistent data structures, and
;;flyweight data structures, particularly for use with
;;spork.util.table.
(ns spork.data.sparse)
;;lookup the item associated with the nested vectors in xs.
;; (definline row-col [row col xs]
;; (let [xs (with-meta xs {:tag 'clojure.lang.PersistentVector})
c ( with - meta ( " c " ) { : tag ' clojure.lang . PersistentVector } ) ]
;; `(let [~c (.nth ~xs ~col)]
;; (.nth ~c ~row))))
(defn row-col [row col ^clojure.lang.IPersistentVector cols]
(.nth ^clojure.lang.Indexed (.nth cols col) row))
(definline row-col! [row col cols]
(let [column (with-meta (gensym "column") {:tag 'clojure.lang.IPersistentVector})
columns (with-meta (gensym "columns") {:tag 'clojure.lang.PersistentVector})]
`(let [~columns ~cols
~column (.nth ~columns (int ~col))]
(.nth ~column (int ~row)))))
(defn assoc-row-col [^clojure.lang.IPersistentVector cols row col v]
(let [^clojure.lang.IPersistentVector c (.nth cols col)]
(.assocN cols col (.assocN c row v))))
(definline assoc-row-col! [cols row col v]
(let [column (with-meta (gensym "column") {:tag 'clojure.lang.IPersistentVector})
columns (with-meta (gensym "columns") {:tag 'clojure.lang.PersistentVector})]
`(let [~col (int ~col)
~row (int ~row)
~columns ~cols
~column (.nth ~columns ~col)]
(.assocN ~columns (int ~col)
(.assocN ~column ~row ~v)))))
;;since we're only allowing fields to be keywords, numbers, or
;;strings, we can utilize some optimizations, namely
;;identity equality vs. generic clojure.util/equiv
;;checks.
;;We might even be able to
(defn index-of [itm xs]
(reduce-kv (fn [acc idx x]
(if (= x itm)
(reduced idx)
acc)) nil xs))
;;This is way faster....
(definline index-of! [itm xs]
(let [arr (with-meta (gensym "arr") {:tag 'objects})
vec (with-meta xs {:tag 'clojure.lang.PersistentVector})
]
`(let [~arr (.arrayFor ~vec 0)
bound# (.count ~vec)]
(loop [idx# 0]
(if (== idx# bound#) nil
(if (identical? (aget ~arr idx#) ~itm) idx#
(recur (unchecked-inc idx#))))))))
(defn drop-indices [idxs v]
(reduce-kv (fn [acc idx x]
(if (contains? idxs idx)
acc
(conj acc x)))
[] v))
(defn sparse-seq
([idx rows length]
(when (< idx length)
(if-let [l (first rows)]
(if (< idx (first l))
(concat (repeat (- (first l) idx) nil)
(sparse-seq
(first l) rows length))
(let [r (or (second rows) [length nil])
[lidx lval] l
[ridx rval] r
len (dec (- ridx lidx))]
(cons lval (concat (repeat len nil)
(sparse-seq
ridx (rest rows) length)))))
(if (== idx (dec length)) nil
(repeat (- length idx) nil)))))
([rows length] (sparse-seq 0 rows length)))
_ _ Hashing _ _
;;So...we'd like our sparse columns to be considered
;;structurally equal to normal vectors. They just
;;take far fewer entries.
;;The hashing scheme clojure uses, with some tweaks, is this:
(defn simple-hash [xs]
(reduce (fn [acc x]
(unchecked-add-int
(unchecked-multiply-int acc 31)
(hash x)))
1
xs))
;;After which we pass it to another function for better hashing,
;;called mix-collection-hash.
;;Some ground rules: nil, like 0, hashes to 0.
So ... if we have a vector of 10 nils , we get
( simple - hash ( vec ( repeat 10 nil ) ) ) = > 31 ^ 10 , although
;;with intentional integer overflow, we end up with
;;something negative....
(= ( simple - hash ( vec ( repeat 10 nil ) ) ) ( int - pow 31 10 ) )
(defn int-pow [base exp]
(cond (zero? exp) 1
(neg? exp) (throw (Exception. "no negative powers at the moment"))
:else
(loop [i 1
acc base]
(if (== i exp) acc
(recur (unchecked-inc i)
(unchecked-multiply-int acc base))))))
;;Since we're working with multiplication and addition, we
can use int - pow to compute the factor for our zeroes
in one step .
;;__Data Structures__
Simulates a vector with one or more rows . Backed by a hashmap .
No equality checks at the moment , so we ca n't use it for .
;;A sparse vector is no different than a vector, so we should be
;;able to simulate hash-codes easily enough.
;;We know the formula for hashing... multiply the hash of the value
by 31 and sum .
;;If every other value is nil, then the contribution is....
(deftype sparserows [^clojure.lang.IPersistentMap rows length]
clojure.lang.ILookup
;;gives us (Get pm key) ... behavior
(valAt [this k]
(if-let [v (.valAt rows k)]
v
(cond (and (>= k 0) (< k length)) nil
:else (throw (Exception.
(str "Index " k " out of bounds for sparserows"))))))
(valAt [this k not-found]
(if-let [v (.valAt rows k)]
v
(cond (and (>= k 0) (< k length)) nil
:else not-found)))
clojure.lang.IPersistentVector
(count [this] length)
(assocN [this k v]
(if (>= k 0)
(sparserows. (assoc rows k v) (max (unchecked-inc k) length))
(throw (Exception. (str "cannot assoc a negative index in sparserows! " k)))))
(empty [this] (sparserows. {} 0))
;;cons defines conj behavior
(cons [this e] (.assocN this e length))
(equiv [this o] (throw (Exception.
(str "Equiv not implemented for sparserows!"))))
(hashCode [this] (* (hash rows) length))
(equals [this o] (identical? this o))
;;containsKey implements (contains? pm k) behavior
(containsKey [this k] (and (pos? k) (< k length)))
(entryAt [this k]
(if-let [e (.entryAt rows k)] e
(if (< k length) nil
(throw (Exception. (str "Entry " k " out of bounds in sparserows!"))))))
(seq [this] (sparse-seq 0 rows length))
;;without implements (dissoc pm k) behavior
;; (without [this k]
;; (if (< k length)
( sparserows . ( dissoc rows k ) ( ( unchecked - inc k )
;; (unchecked-dec length)))))
clojure.lang.Indexed
(nth [this i] (.valAt this i))
(nth [this i not-found] (.valAt this i not-found))
)
Simulates a persistent vector . Only stores one value ,
;;however will act like a persistent vector in that
;;calls will return nil for values within the range
;;defined by length.
Since we know the count , and the number of nils , we
;;can hash this as if it were a bunch of entries.
(deftype sparsecolumn [row val length]
clojure.lang.ILookup
(valAt [this k]
(cond (== k row) val
(and (pos? k) (< k length)) nil
:else (throw (Exception.
(str "Index " k " out of bounds for sparserows")))))
(valAt [this k not-found]
(cond (== k row) val
(and (pos? k) (< k length)) nil
:else not-found))
clojure.lang.IPersistentVector
(count [this] length)
(assocN [this k v]
(cond (== k row) (sparsecolumn. row v length)
convert to sparserows
:else (throw (Exception. (str "Cannot assoc a negative index in sparserows! " k)))))
(empty [this] (sparsecolumn. 0 nil 0))
;cons defines conj behavior
(cons [this e] (.assocN this length e))
(equiv [this o] (throw (Exception.
(str "Equiv not implemented for sparsecolumn!"))))
(hashCode [this] (* (hash val) row length))
;containsKey implements (contains? pm k) behavior
(containsKey [this k] (and (pos? k) (< k length)))
(entryAt [this k]
(cond (== k row) (clojure.lang.MapEntry. row val)
(< k length) nil
:else (throw (Exception. (str "Entry " k " out of bounds in sparsecolumn")))))
(seq [this] (concat (take row (repeat nil))
(cons val (take (dec (- length row))
(repeat nil)))))
clojure.lang.Indexed
(nth [this i] (if (== i row) val
(.valAt this i)))
(nth [this i not-found] (if (== i row) val
(.valAt this i not-found)))
)
(defn ->sparsecolumn
"This simulates a vector with a single entry, surrounded by nil values.
Length is known, so it's indexed. Really, we only have a value at index row.
This is specifically meant to support flyrecords, and
to allow assoc and dissoc behaviors on them rather than building maps."
([row val length]
(sparsecolumn. row val length))
([rows length] (sparserows. rows length)))
;;Easiest thing to do for efficient assoc/dissoc behavior...
;;is to use the exdisting columns and fields (as we would a table).
;;Assoc => add a sparse column.
(defprotocol ICursor
(set-cursor [obj n]))
(defn mapeq [l r]
(reduce-kv (fn [acc k v]
(if-let [other (get r k)]
(if (= other v)
acc
(reduced nil))
(reduced nil)))
true l))
;;__Flyrecord__
;;a flyrecord can have hashequivalence, based in its fields.
;;We can have a read-only recordset....
;;A subtable, if you will, that shares structure with the parent
;;table.
;;There might be an argument to move out tables to this format in the
;;future. For now, it's an optimization on the underlying naive
;;vector-based table implementation.
(deftype flyrecord [^:unsynchronized-mutable ^long n
^clojure.lang.PersistentVector fields
^clojure.lang.PersistentVector columns
^:unsynchronized-mutable ^int _hasheq
^:unsynchronized-mutable ^int _hash]
ICursor
(set-cursor [obj idx] (do (set! n (long idx))
(set! _hasheq (int -1))
(set! _hash (int -1))
obj))
clojure.core.protocols.IKVReduce
(kv-reduce [amap f init]
(reduce-kv (fn [acc col fld]
(f acc fld (row-col! n col columns)))
init fields))
clojure.core.protocols.CollReduce
(coll-reduce [coll f]
(reduce-kv (fn [acc idx fld]
(f acc [fld (row-col! n idx columns)]))
[0 (.nth fields 0)]
(subvec fields 1)))
(coll-reduce [coll f val]
(reduce-kv (fn [acc idx fld]
(f acc [fld (row-col! n idx columns)]))
val
fields))
Object
(toString [this] (str (.seq this)))
clojure.lang.ILookup
; valAt gives (get pm key) and (get pm key not-found) behavior
(valAt [this k] (row-col! n (index-of! k fields) columns))
(valAt [this k not-found]
(if-let [col (index-of! k fields)]
(row-col! n col columns)
not-found))
clojure.lang.IHashEq
(hasheq [this]
(if (== _hasheq (int -1))
(let [h (hash-unordered-coll (seq this))]
(do (set! _hasheq (int h))
h))
_hasheq))
clojure.lang.IPersistentMap
(count [this] (.count fields))
(assoc [this k v]
(if-let [idx (index-of! k fields)]
( flyrecord . n fields ( assoc - row - col ! columns n idx v ) -1 -1 )
;; it's faster to just conj the new column/cell rather than updating the backing vector.
(flyrecord. n fields (.assocN ^clojure.lang.PersistentVector
columns (int idx)
(->sparsecolumn n v (.count ^clojure.lang.PersistentVector (.nth columns 0))))
-1 -1)
(let [new-column (->sparsecolumn n v (.count ^clojure.lang.PersistentVector (.nth columns 0)))]
(flyrecord. n (.cons fields k) (.cons columns new-column) -1 -1))))
(empty [this] (flyrecord. 0 [] [] -1 -1))
;cons defines conj behavior
(cons [this e] (.assoc this (first e) (second e)))
(equiv [this o]
(cond (identical? this o) true
(instance? clojure.lang.IHashEq o) (== (hash this) (hash o))
(instance? clojure.lang.IPersistentMap o) (and (== (count this) (count o))
(mapeq this o))
(or (instance? clojure.lang.Sequential o)
(instance? java.util.List o)) (clojure.lang.Util/equiv (seq this) (seq o))
:else nil))
(hashCode [this]
(if (== _hash (int -1))
(let [h (hash-unordered-coll (seq this))]
(do (set! _hash (int h))
h))
_hash))
(equals [this o] (identical? this o))
;containsKey implements (contains? pm k) behavior
(containsKey [this k] (reduce (fn [acc k]
(if (= k k) (reduced true)
acc)) nil fields))
(entryAt [this k]
(reduce-kv
(fn [acc idx fld]
(if (= k fld)
(let [^clojure.lang.PersistentVector col (.nth columns idx)]
(reduced (clojure.lang.MapEntry. fld (.nth col n)))
acc))) nil fields))
(seq [this]
(map-indexed (fn [idx fld]
(clojure.lang.MapEntry. fld (row-col! n idx columns)))
fields))
;without implements (dissoc pm k) behavior
(without [this k]
(if-let [idx (index-of! k fields)]
(flyrecord. n (drop-indices #{idx} fields )
(drop-indices #{idx} columns) -1 -1)))
clojure.lang.Indexed
(nth [this i] (row-col! n i columns))
(nth [this i not-found] (if (<= i (.count fields))
(row-col! n i columns)
not-found))
Iterable
(iterator [this] (clojure.lang.SeqIterator. (.seq this)))
clojure.lang.IFn
;makes lex map usable as a function
(invoke [this k] (.valAt this k))
(invoke [this k not-found] (.valAt this k not-found))
clojure.lang .
;; ;adds metadata support
;; (meta [this] (meta entries))
;; (withMeta [this m] (avec. (.withMeta entries m)))
clojure.lang.Reversible
(rseq [this] (reverse (.seq this )))
java.io.Serializable ;Serialization comes for free with the other things implemented
clojure.lang .
)
(defn ->flyrecord
"Creates a lightweight record abstraction on top of a columnar table with a
matching vector of fields. Each field corresponds to a column in the table.
Fly records, for flyweight records, will provide what looks like a
hashmap, but is actually a view onto the underlying fields and column
vectors. For computational tasks, this may not be preferable to other methods,
but for memoery intensive traversal and queries, we should see definite
benefits over copying record sequences as we currently do. These are
particularly useful when we retain large tables in memoery and wish
to compute views on them, or view them as records instead of columns."
[row flds cols]
(flyrecord. row flds cols -1 -1))
| null | https://raw.githubusercontent.com/joinr/spork/bb80eddadf90bf92745bf5315217e25a99fbf9d6/src/spork/data/sparse.clj | clojure | Namespace for sparse persistent data structures, and
flyweight data structures, particularly for use with
spork.util.table.
lookup the item associated with the nested vectors in xs.
(definline row-col [row col xs]
(let [xs (with-meta xs {:tag 'clojure.lang.PersistentVector})
`(let [~c (.nth ~xs ~col)]
(.nth ~c ~row))))
since we're only allowing fields to be keywords, numbers, or
strings, we can utilize some optimizations, namely
identity equality vs. generic clojure.util/equiv
checks.
We might even be able to
This is way faster....
So...we'd like our sparse columns to be considered
structurally equal to normal vectors. They just
take far fewer entries.
The hashing scheme clojure uses, with some tweaks, is this:
After which we pass it to another function for better hashing,
called mix-collection-hash.
Some ground rules: nil, like 0, hashes to 0.
with intentional integer overflow, we end up with
something negative....
Since we're working with multiplication and addition, we
__Data Structures__
A sparse vector is no different than a vector, so we should be
able to simulate hash-codes easily enough.
We know the formula for hashing... multiply the hash of the value
If every other value is nil, then the contribution is....
gives us (Get pm key) ... behavior
cons defines conj behavior
containsKey implements (contains? pm k) behavior
without implements (dissoc pm k) behavior
(without [this k]
(if (< k length)
(unchecked-dec length)))))
however will act like a persistent vector in that
calls will return nil for values within the range
defined by length.
can hash this as if it were a bunch of entries.
cons defines conj behavior
containsKey implements (contains? pm k) behavior
Easiest thing to do for efficient assoc/dissoc behavior...
is to use the exdisting columns and fields (as we would a table).
Assoc => add a sparse column.
__Flyrecord__
a flyrecord can have hashequivalence, based in its fields.
We can have a read-only recordset....
A subtable, if you will, that shares structure with the parent
table.
There might be an argument to move out tables to this format in the
future. For now, it's an optimization on the underlying naive
vector-based table implementation.
valAt gives (get pm key) and (get pm key not-found) behavior
it's faster to just conj the new column/cell rather than updating the backing vector.
cons defines conj behavior
containsKey implements (contains? pm k) behavior
without implements (dissoc pm k) behavior
makes lex map usable as a function
;adds metadata support
(meta [this] (meta entries))
(withMeta [this m] (avec. (.withMeta entries m)))
Serialization comes for free with the other things implemented | (ns spork.data.sparse)
c ( with - meta ( " c " ) { : tag ' clojure.lang . PersistentVector } ) ]
(defn row-col [row col ^clojure.lang.IPersistentVector cols]
(.nth ^clojure.lang.Indexed (.nth cols col) row))
(definline row-col! [row col cols]
(let [column (with-meta (gensym "column") {:tag 'clojure.lang.IPersistentVector})
columns (with-meta (gensym "columns") {:tag 'clojure.lang.PersistentVector})]
`(let [~columns ~cols
~column (.nth ~columns (int ~col))]
(.nth ~column (int ~row)))))
(defn assoc-row-col [^clojure.lang.IPersistentVector cols row col v]
(let [^clojure.lang.IPersistentVector c (.nth cols col)]
(.assocN cols col (.assocN c row v))))
(definline assoc-row-col! [cols row col v]
(let [column (with-meta (gensym "column") {:tag 'clojure.lang.IPersistentVector})
columns (with-meta (gensym "columns") {:tag 'clojure.lang.PersistentVector})]
`(let [~col (int ~col)
~row (int ~row)
~columns ~cols
~column (.nth ~columns ~col)]
(.assocN ~columns (int ~col)
(.assocN ~column ~row ~v)))))
(defn index-of [itm xs]
(reduce-kv (fn [acc idx x]
(if (= x itm)
(reduced idx)
acc)) nil xs))
(definline index-of! [itm xs]
(let [arr (with-meta (gensym "arr") {:tag 'objects})
vec (with-meta xs {:tag 'clojure.lang.PersistentVector})
]
`(let [~arr (.arrayFor ~vec 0)
bound# (.count ~vec)]
(loop [idx# 0]
(if (== idx# bound#) nil
(if (identical? (aget ~arr idx#) ~itm) idx#
(recur (unchecked-inc idx#))))))))
(defn drop-indices [idxs v]
(reduce-kv (fn [acc idx x]
(if (contains? idxs idx)
acc
(conj acc x)))
[] v))
(defn sparse-seq
([idx rows length]
(when (< idx length)
(if-let [l (first rows)]
(if (< idx (first l))
(concat (repeat (- (first l) idx) nil)
(sparse-seq
(first l) rows length))
(let [r (or (second rows) [length nil])
[lidx lval] l
[ridx rval] r
len (dec (- ridx lidx))]
(cons lval (concat (repeat len nil)
(sparse-seq
ridx (rest rows) length)))))
(if (== idx (dec length)) nil
(repeat (- length idx) nil)))))
([rows length] (sparse-seq 0 rows length)))
_ _ Hashing _ _
(defn simple-hash [xs]
(reduce (fn [acc x]
(unchecked-add-int
(unchecked-multiply-int acc 31)
(hash x)))
1
xs))
So ... if we have a vector of 10 nils , we get
( simple - hash ( vec ( repeat 10 nil ) ) ) = > 31 ^ 10 , although
(= ( simple - hash ( vec ( repeat 10 nil ) ) ) ( int - pow 31 10 ) )
(defn int-pow [base exp]
(cond (zero? exp) 1
(neg? exp) (throw (Exception. "no negative powers at the moment"))
:else
(loop [i 1
acc base]
(if (== i exp) acc
(recur (unchecked-inc i)
(unchecked-multiply-int acc base))))))
can use int - pow to compute the factor for our zeroes
in one step .
Simulates a vector with one or more rows . Backed by a hashmap .
No equality checks at the moment , so we ca n't use it for .
by 31 and sum .
(deftype sparserows [^clojure.lang.IPersistentMap rows length]
clojure.lang.ILookup
(valAt [this k]
(if-let [v (.valAt rows k)]
v
(cond (and (>= k 0) (< k length)) nil
:else (throw (Exception.
(str "Index " k " out of bounds for sparserows"))))))
(valAt [this k not-found]
(if-let [v (.valAt rows k)]
v
(cond (and (>= k 0) (< k length)) nil
:else not-found)))
clojure.lang.IPersistentVector
(count [this] length)
(assocN [this k v]
(if (>= k 0)
(sparserows. (assoc rows k v) (max (unchecked-inc k) length))
(throw (Exception. (str "cannot assoc a negative index in sparserows! " k)))))
(empty [this] (sparserows. {} 0))
(cons [this e] (.assocN this e length))
(equiv [this o] (throw (Exception.
(str "Equiv not implemented for sparserows!"))))
(hashCode [this] (* (hash rows) length))
(equals [this o] (identical? this o))
(containsKey [this k] (and (pos? k) (< k length)))
(entryAt [this k]
(if-let [e (.entryAt rows k)] e
(if (< k length) nil
(throw (Exception. (str "Entry " k " out of bounds in sparserows!"))))))
(seq [this] (sparse-seq 0 rows length))
( sparserows . ( dissoc rows k ) ( ( unchecked - inc k )
clojure.lang.Indexed
(nth [this i] (.valAt this i))
(nth [this i not-found] (.valAt this i not-found))
)
Simulates a persistent vector . Only stores one value ,
Since we know the count , and the number of nils , we
(deftype sparsecolumn [row val length]
clojure.lang.ILookup
(valAt [this k]
(cond (== k row) val
(and (pos? k) (< k length)) nil
:else (throw (Exception.
(str "Index " k " out of bounds for sparserows")))))
(valAt [this k not-found]
(cond (== k row) val
(and (pos? k) (< k length)) nil
:else not-found))
clojure.lang.IPersistentVector
(count [this] length)
(assocN [this k v]
(cond (== k row) (sparsecolumn. row v length)
convert to sparserows
:else (throw (Exception. (str "Cannot assoc a negative index in sparserows! " k)))))
(empty [this] (sparsecolumn. 0 nil 0))
(cons [this e] (.assocN this length e))
(equiv [this o] (throw (Exception.
(str "Equiv not implemented for sparsecolumn!"))))
(hashCode [this] (* (hash val) row length))
(containsKey [this k] (and (pos? k) (< k length)))
(entryAt [this k]
(cond (== k row) (clojure.lang.MapEntry. row val)
(< k length) nil
:else (throw (Exception. (str "Entry " k " out of bounds in sparsecolumn")))))
(seq [this] (concat (take row (repeat nil))
(cons val (take (dec (- length row))
(repeat nil)))))
clojure.lang.Indexed
(nth [this i] (if (== i row) val
(.valAt this i)))
(nth [this i not-found] (if (== i row) val
(.valAt this i not-found)))
)
(defn ->sparsecolumn
"This simulates a vector with a single entry, surrounded by nil values.
Length is known, so it's indexed. Really, we only have a value at index row.
This is specifically meant to support flyrecords, and
to allow assoc and dissoc behaviors on them rather than building maps."
([row val length]
(sparsecolumn. row val length))
([rows length] (sparserows. rows length)))
(defprotocol ICursor
(set-cursor [obj n]))
(defn mapeq [l r]
(reduce-kv (fn [acc k v]
(if-let [other (get r k)]
(if (= other v)
acc
(reduced nil))
(reduced nil)))
true l))
(deftype flyrecord [^:unsynchronized-mutable ^long n
^clojure.lang.PersistentVector fields
^clojure.lang.PersistentVector columns
^:unsynchronized-mutable ^int _hasheq
^:unsynchronized-mutable ^int _hash]
ICursor
(set-cursor [obj idx] (do (set! n (long idx))
(set! _hasheq (int -1))
(set! _hash (int -1))
obj))
clojure.core.protocols.IKVReduce
(kv-reduce [amap f init]
(reduce-kv (fn [acc col fld]
(f acc fld (row-col! n col columns)))
init fields))
clojure.core.protocols.CollReduce
(coll-reduce [coll f]
(reduce-kv (fn [acc idx fld]
(f acc [fld (row-col! n idx columns)]))
[0 (.nth fields 0)]
(subvec fields 1)))
(coll-reduce [coll f val]
(reduce-kv (fn [acc idx fld]
(f acc [fld (row-col! n idx columns)]))
val
fields))
Object
(toString [this] (str (.seq this)))
clojure.lang.ILookup
(valAt [this k] (row-col! n (index-of! k fields) columns))
(valAt [this k not-found]
(if-let [col (index-of! k fields)]
(row-col! n col columns)
not-found))
clojure.lang.IHashEq
(hasheq [this]
(if (== _hasheq (int -1))
(let [h (hash-unordered-coll (seq this))]
(do (set! _hasheq (int h))
h))
_hasheq))
clojure.lang.IPersistentMap
(count [this] (.count fields))
(assoc [this k v]
(if-let [idx (index-of! k fields)]
( flyrecord . n fields ( assoc - row - col ! columns n idx v ) -1 -1 )
(flyrecord. n fields (.assocN ^clojure.lang.PersistentVector
columns (int idx)
(->sparsecolumn n v (.count ^clojure.lang.PersistentVector (.nth columns 0))))
-1 -1)
(let [new-column (->sparsecolumn n v (.count ^clojure.lang.PersistentVector (.nth columns 0)))]
(flyrecord. n (.cons fields k) (.cons columns new-column) -1 -1))))
(empty [this] (flyrecord. 0 [] [] -1 -1))
(cons [this e] (.assoc this (first e) (second e)))
(equiv [this o]
(cond (identical? this o) true
(instance? clojure.lang.IHashEq o) (== (hash this) (hash o))
(instance? clojure.lang.IPersistentMap o) (and (== (count this) (count o))
(mapeq this o))
(or (instance? clojure.lang.Sequential o)
(instance? java.util.List o)) (clojure.lang.Util/equiv (seq this) (seq o))
:else nil))
(hashCode [this]
(if (== _hash (int -1))
(let [h (hash-unordered-coll (seq this))]
(do (set! _hash (int h))
h))
_hash))
(equals [this o] (identical? this o))
(containsKey [this k] (reduce (fn [acc k]
(if (= k k) (reduced true)
acc)) nil fields))
(entryAt [this k]
(reduce-kv
(fn [acc idx fld]
(if (= k fld)
(let [^clojure.lang.PersistentVector col (.nth columns idx)]
(reduced (clojure.lang.MapEntry. fld (.nth col n)))
acc))) nil fields))
(seq [this]
(map-indexed (fn [idx fld]
(clojure.lang.MapEntry. fld (row-col! n idx columns)))
fields))
(without [this k]
(if-let [idx (index-of! k fields)]
(flyrecord. n (drop-indices #{idx} fields )
(drop-indices #{idx} columns) -1 -1)))
clojure.lang.Indexed
(nth [this i] (row-col! n i columns))
(nth [this i not-found] (if (<= i (.count fields))
(row-col! n i columns)
not-found))
Iterable
(iterator [this] (clojure.lang.SeqIterator. (.seq this)))
clojure.lang.IFn
(invoke [this k] (.valAt this k))
(invoke [this k not-found] (.valAt this k not-found))
clojure.lang .
clojure.lang.Reversible
(rseq [this] (reverse (.seq this )))
clojure.lang .
)
(defn ->flyrecord
"Creates a lightweight record abstraction on top of a columnar table with a
matching vector of fields. Each field corresponds to a column in the table.
Fly records, for flyweight records, will provide what looks like a
hashmap, but is actually a view onto the underlying fields and column
vectors. For computational tasks, this may not be preferable to other methods,
but for memoery intensive traversal and queries, we should see definite
benefits over copying record sequences as we currently do. These are
particularly useful when we retain large tables in memoery and wish
to compute views on them, or view them as records instead of columns."
[row flds cols]
(flyrecord. row flds cols -1 -1))
|
6401d0df3c5b7f3e913ecf975ccee20c0cc58dc5fd887d58c795a5944d06b7ad | IBM/openai-gym-ocaml | json.mli |
* This file is part of the gym - http - api OCaml binding project .
*
* Copyright 2016 - 2017 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* This file is part of the gym-http-api OCaml binding project.
*
* Copyright 2016-2017 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Json_t
(** Json utilities. *)
val read_json_file : (Yojson.lexer_state -> Lexing.lexbuf -> 'a) -> string -> 'a
*
[ read_json_file reader fname ] reads the file [ fname ] using the
reader [ reader ] generated by atdgen .
[read_json_file reader fname] reads the file [fname] using the
reader [reader] generated by atdgen.
*)
(** {3 Builders} *)
val null : json
(** The [null] value of JSON. *)
val int : int -> json
(** [int n] build the value of JSON [n]. *)
val bool : bool -> json
(** [bool b] build the value of JSON [b]. *)
val string : string -> json
(** [string s] build the value of JSON [s]. *)
val assoc : (string * json) list -> json
(** [assoc o] build the JSON object [o]. *)
val list : json list -> json
(** [list l] build the JSON list [l]. *)
* { 3 Manipulation functions }
val set : json -> string -> json -> json
(**
[set o x v] add (or replace) the a field [x] of the object [o] with
value [v].
*)
val get : json -> string -> json option
(**
[get o x] gets the value of the field [x] of the object [o].
*)
val take : json -> string -> json * json option
(**
[take o x] gets the value of the field [x] of the object [o] and
remove the field from the object. The left part of the return value
is the modified object and the right part is the value of the
field.
*)
val assign : json list -> json
*
[ assign [ o1 ; ... ; on ] ] create a json object that contains all the
fields of the objets [ o1 ] , ... , [ on ] . It is similare the the JavaScript
function [ Object.assing ( { } , o1 , ... on ) ] .
[assign [o1; ...; on]] create a json object that contains all the
fields of the objets [o1], ..., [on]. It is similare the the JavaScript
function [Object.assing({}, o1, ... on)].
*)
val push : json -> string -> json -> json
(**
[push o x v] add the value [v] in the list stored in a field [x]
of the object [o]. It the field [x] doesn't exists, it creates it.
*)
val pop : json -> string -> json * json option
(**
[pop o x] take a value in a list stored in the field [x] of [o].
*)
* { 3 Setters and getters }
* { 4 Boolean fields }
val set_bool : json -> string -> bool -> json
(**
[set_bool o x b] sets the a field [x] of the object [o] with value
[b].
*)
val get_bool : json -> string -> bool option
(**
[get_bool o x] gets the value of the field [x] of the object [o].
*)
* { 4 String fields }
val set_string : json -> string -> string -> json
(**
[set_string o x x] sets the a field [x] of the object [o] with string
[s].
*)
val get_string : json -> string -> string option
(**
[get_string o x] gets the value of the field [x] of the object [o].
*)
val take_string : json -> string -> json * string option
(**
[take_string o x] takes the value of the field [x] of the object [o].
*)
| null | https://raw.githubusercontent.com/IBM/openai-gym-ocaml/7556dea6a189b0502e6832034104a3aff22a8ec1/openai-gym/json.mli | ocaml | * Json utilities.
* {3 Builders}
* The [null] value of JSON.
* [int n] build the value of JSON [n].
* [bool b] build the value of JSON [b].
* [string s] build the value of JSON [s].
* [assoc o] build the JSON object [o].
* [list l] build the JSON list [l].
*
[set o x v] add (or replace) the a field [x] of the object [o] with
value [v].
*
[get o x] gets the value of the field [x] of the object [o].
*
[take o x] gets the value of the field [x] of the object [o] and
remove the field from the object. The left part of the return value
is the modified object and the right part is the value of the
field.
*
[push o x v] add the value [v] in the list stored in a field [x]
of the object [o]. It the field [x] doesn't exists, it creates it.
*
[pop o x] take a value in a list stored in the field [x] of [o].
*
[set_bool o x b] sets the a field [x] of the object [o] with value
[b].
*
[get_bool o x] gets the value of the field [x] of the object [o].
*
[set_string o x x] sets the a field [x] of the object [o] with string
[s].
*
[get_string o x] gets the value of the field [x] of the object [o].
*
[take_string o x] takes the value of the field [x] of the object [o].
|
* This file is part of the gym - http - api OCaml binding project .
*
* Copyright 2016 - 2017 IBM Corporation
*
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
* This file is part of the gym-http-api OCaml binding project.
*
* Copyright 2016-2017 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* -2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*)
open Json_t
val read_json_file : (Yojson.lexer_state -> Lexing.lexbuf -> 'a) -> string -> 'a
*
[ read_json_file reader fname ] reads the file [ fname ] using the
reader [ reader ] generated by atdgen .
[read_json_file reader fname] reads the file [fname] using the
reader [reader] generated by atdgen.
*)
val null : json
val int : int -> json
val bool : bool -> json
val string : string -> json
val assoc : (string * json) list -> json
val list : json list -> json
* { 3 Manipulation functions }
val set : json -> string -> json -> json
val get : json -> string -> json option
val take : json -> string -> json * json option
val assign : json list -> json
*
[ assign [ o1 ; ... ; on ] ] create a json object that contains all the
fields of the objets [ o1 ] , ... , [ on ] . It is similare the the JavaScript
function [ Object.assing ( { } , o1 , ... on ) ] .
[assign [o1; ...; on]] create a json object that contains all the
fields of the objets [o1], ..., [on]. It is similare the the JavaScript
function [Object.assing({}, o1, ... on)].
*)
val push : json -> string -> json -> json
val pop : json -> string -> json * json option
* { 3 Setters and getters }
* { 4 Boolean fields }
val set_bool : json -> string -> bool -> json
val get_bool : json -> string -> bool option
* { 4 String fields }
val set_string : json -> string -> string -> json
val get_string : json -> string -> string option
val take_string : json -> string -> json * string option
|
02c247983c45db28ea7c3abb277704170b4f83e477d0dd7c251e1453939a2616 | alanz/ghc-exactprint | T9233a.hs | module T9233a where
data X = X {
f1 :: String,
f2 :: !Bool,
f3 :: !Bool,
f4 :: !Bool,
f5 :: !Bool,
f6 :: !Bool,
f7 :: !Bool,
f8 :: !Bool,
f9 :: !Bool,
f10 :: !Bool,
f11 :: !Bool,
f12 :: !Bool,
f13 :: !Bool,
f14 :: !Bool,
f15 :: !Bool,
f16 :: !Bool,
f17 :: !Bool,
f18 :: !Bool,
f19 :: !Bool,
f20 :: !Bool,
f21 :: !Bool,
f22 :: !Bool,
f23 :: !Bool,
f24 :: !Bool,
f25 :: !Bool,
f26 :: !Bool,
f27 :: !Bool,
f28 :: !Bool,
f29 :: !Bool,
f30 :: !Bool,
f31 :: !Bool,
f32 :: !Bool,
f33 :: !Bool,
f34 :: !Bool,
f35 :: !Bool,
f36 :: !Bool,
f37 :: !Bool,
f38 :: !Bool,
f39 :: !Bool,
f40 :: !Bool,
f41 :: !Bool,
f42 :: !Bool,
f43 :: !Bool,
f44 :: !Bool,
f45 :: !Bool,
f46 :: !Bool,
f47 :: !Bool,
f48 :: !Bool,
f49 :: !Bool,
f50 :: !Bool,
f51 :: !Bool,
f52 :: !Bool,
f53 :: !Bool,
f54 :: !Bool,
f55 :: !Bool,
f56 :: !Bool,
f57 :: !Bool,
f58 :: !Bool,
f59 :: !Bool,
f60 :: !Bool,
f61 :: !Bool,
f62 :: !Bool,
f63 :: !Bool,
f64 :: !Bool,
f65 :: !Bool,
f66 :: !Bool,
f67 :: !Bool,
f68 :: !Bool,
f69 :: !Bool,
f70 :: !Bool,
f71 :: !Bool,
f72 :: !Bool,
f73 :: !Bool,
f74 :: !Bool,
f75 :: !Bool,
f76 :: !Bool,
f77 :: !Bool,
f78 :: !Bool,
f79 :: !Bool,
f80 :: !Bool,
f81 :: !Bool,
f82 :: !Bool,
f83 :: !Bool,
f84 :: !Bool,
f85 :: !Bool,
f86 :: !Bool,
f87 :: !Bool,
f88 :: !Bool,
f89 :: !Bool,
f90 :: !Bool,
f91 :: !Bool,
f92 :: !Bool,
f93 :: !Bool,
f94 :: !Bool,
f95 :: !Bool,
f96 :: !Bool,
f97 :: !Bool,
f98 :: !Bool,
f99 :: !Bool,
f100 :: !Bool
}
data Options = Options {
flags :: !X,
o2 :: !Bool,
o3 :: !Bool,
o4 :: !Bool,
o5 :: !Bool,
o6 :: !Bool,
o7 :: !Bool,
o8 :: !Bool,
o9 :: !Bool,
o10 :: !Bool,
o11 :: !Bool,
o12 :: !Bool
}
splitComma :: String -> String
splitComma _ = "a"
# NOINLINE splitComma #
getOpt :: Monad m => [String -> Options -> m Options] -> m ()
getOpt _ = return ()
# NOINLINE getOpt #
| null | https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/T9233a.hs | haskell | module T9233a where
data X = X {
f1 :: String,
f2 :: !Bool,
f3 :: !Bool,
f4 :: !Bool,
f5 :: !Bool,
f6 :: !Bool,
f7 :: !Bool,
f8 :: !Bool,
f9 :: !Bool,
f10 :: !Bool,
f11 :: !Bool,
f12 :: !Bool,
f13 :: !Bool,
f14 :: !Bool,
f15 :: !Bool,
f16 :: !Bool,
f17 :: !Bool,
f18 :: !Bool,
f19 :: !Bool,
f20 :: !Bool,
f21 :: !Bool,
f22 :: !Bool,
f23 :: !Bool,
f24 :: !Bool,
f25 :: !Bool,
f26 :: !Bool,
f27 :: !Bool,
f28 :: !Bool,
f29 :: !Bool,
f30 :: !Bool,
f31 :: !Bool,
f32 :: !Bool,
f33 :: !Bool,
f34 :: !Bool,
f35 :: !Bool,
f36 :: !Bool,
f37 :: !Bool,
f38 :: !Bool,
f39 :: !Bool,
f40 :: !Bool,
f41 :: !Bool,
f42 :: !Bool,
f43 :: !Bool,
f44 :: !Bool,
f45 :: !Bool,
f46 :: !Bool,
f47 :: !Bool,
f48 :: !Bool,
f49 :: !Bool,
f50 :: !Bool,
f51 :: !Bool,
f52 :: !Bool,
f53 :: !Bool,
f54 :: !Bool,
f55 :: !Bool,
f56 :: !Bool,
f57 :: !Bool,
f58 :: !Bool,
f59 :: !Bool,
f60 :: !Bool,
f61 :: !Bool,
f62 :: !Bool,
f63 :: !Bool,
f64 :: !Bool,
f65 :: !Bool,
f66 :: !Bool,
f67 :: !Bool,
f68 :: !Bool,
f69 :: !Bool,
f70 :: !Bool,
f71 :: !Bool,
f72 :: !Bool,
f73 :: !Bool,
f74 :: !Bool,
f75 :: !Bool,
f76 :: !Bool,
f77 :: !Bool,
f78 :: !Bool,
f79 :: !Bool,
f80 :: !Bool,
f81 :: !Bool,
f82 :: !Bool,
f83 :: !Bool,
f84 :: !Bool,
f85 :: !Bool,
f86 :: !Bool,
f87 :: !Bool,
f88 :: !Bool,
f89 :: !Bool,
f90 :: !Bool,
f91 :: !Bool,
f92 :: !Bool,
f93 :: !Bool,
f94 :: !Bool,
f95 :: !Bool,
f96 :: !Bool,
f97 :: !Bool,
f98 :: !Bool,
f99 :: !Bool,
f100 :: !Bool
}
data Options = Options {
flags :: !X,
o2 :: !Bool,
o3 :: !Bool,
o4 :: !Bool,
o5 :: !Bool,
o6 :: !Bool,
o7 :: !Bool,
o8 :: !Bool,
o9 :: !Bool,
o10 :: !Bool,
o11 :: !Bool,
o12 :: !Bool
}
splitComma :: String -> String
splitComma _ = "a"
# NOINLINE splitComma #
getOpt :: Monad m => [String -> Options -> m Options] -> m ()
getOpt _ = return ()
# NOINLINE getOpt #
|
|
37ea6126479e33e51db97462c81c22fa99527c81bffbe28cf52791fde49408a6 | bjornbm/astro | CroppedSpec.hs | module Astro.Trajectory.CroppedSpec where
import Test.Hspec
--import Test.QuickCheck (property, (==>))
import TestInstances
import Astro.Trajectory
import Astro.Trajectory.Cropped
import Astro.Trajectory.EphemTrajectory
import qualified Prelude
import Numeric.Units.Dimensional.Prelude
import Astro.Time
import Astro.Time.At
import Data.AEq
import Data.List
import Astro.Orbit.MEOE
import Astro.Orbit.Types
main = hspec spec
spec = do
spec_uncropped
spec_croppedStartTime
spec_croppedEndTime
spec_badValidity
spec_croppedEphemeris
spec_croppedEphemeris'
Do n't think using QuickCheck is warranted in this case as
-- the test MEOEs are very random. We implement our own 'property'
-- instead which feed the test MEOEs.
property f = f testM1 testM2
-- ----------------------------------------------------------------------
TODO move this spec elsewhere !
spec_uncropped = describe "Uncropped trajectory" $ do
it "does not change startTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (crop' Nothing Nothing t) `shouldBe` t2)
it "does not change endTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop' Nothing Nothing t) `shouldBe` t4)
it "does not change ephemeris' when not cropping"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t5 dt
== ephemeris' t t0 t5 dt)
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t4 t5 dt == [])
it "does not change ephemeris when not cropping"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (crop' Nothing Nothing t) ts == ephemeris t ts)
it "returns no ephemeris beyond lower validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t1) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t1 ] in
ephemeris (crop' Nothing Nothing t) ts == [])
it "returns no ephemeris beyond upper validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t4 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t4 , ` addTime ` dt .. t5 ] in
ephemeris (crop' Nothing Nothing t) ts == [])
-- ----------------------------------------------------------------------
spec_croppedStartTime = describe "Cropped trajectory startTime" $ do
it "does not change when cropping before validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t1 t) `shouldBe` t2)
it "does not change when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropEnd t3 t) `shouldBe` t2)
it "changes when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t3 t) `shouldBe` t3)
-- ----------------------------------------------------------------------
spec_croppedEndTime = describe "Cropped trajectory endTime" $ do
it "does not change when cropping after validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop t1 t5 t) `shouldBe` t4)
it "does not change when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropStart t3 t) `shouldBe` t4)
it "changes when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropEnd t3 t) `shouldBe` t3)
spec_badValidity = describe "Trajectory with cropping disjunct from validity" $ do
it "doesn't generate ephemeris when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris' when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
it "doesn't generate ephemeris' when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
-- ----------------------------------------------------------------------
spec_croppedEphemeris' = describe "Cropped trajectory (ephemeris')" $ do
it "does not change when cropping beyond validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' t t0 t5 dt
== ephemeris' (crop t1 t4 t) t0 t5 dt)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is infix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
(ephemeris' (crop t2 t3 t) t0 t5 dt
`isInfixOf` ephemeris' t t0 t5 dt))
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t0, m' `At` t5] in
ephemeris' (crop t2 t3 t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5] in
ephemeris' (crop t2 t3 t) t4 t5 dt `shouldBe` [])
-- ----------------------------------------------------------------------
spec_croppedEphemeris = describe "Cropped trajectory (ephemeris)" $ do
it "does not change when cropping beyond validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris t ts == ephemeris (crop t1 t4 t) ts)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (crop t2 t4 t) ts /= ephemeris t ts)
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
(ephemeris (crop t2 t4 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (cropEnd t2 t) ts /= ephemeris t ts)
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
(ephemeris (cropEnd t2 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
-- let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
not (ephemeris (crop t2 t3 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
-- let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
not (ephemeris (crop t2 t3 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped start and end is infix of uncropped trajectory (ephemeris')"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
-- let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
(ephemeris (crop t2 t3 t) ts
`isInfixOf` ephemeris t ts))
it "returns no ephemeris beyond lower validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = takeWhile (<= t1) $ iterate (`addTime` dt) t0
-- ts = [t0, t0 `addTime` dt..t1]
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
it "returns no ephemeris beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = takeWhile (<= t5) $ iterate (`addTime` dt) t4
ts = [ t4 , ` addTime ` dt .. t5 ]
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
-- ----------------------------------------------------------------------
t0 = mjd 0.0 UT1
t1 = mjd 1 UT1
t2 = mjd 2 UT1
t3 = mjd 3 UT1
t4 = mjd 4 UT1
t5 = mjd 5 UT1
dt = 1 *~ hour :: Time Double
-- Test data, essentially randomized.
testM1 = MEOE { mu = 5.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.7865893064609859 *~meter, f = 0.6398323179864169*~one
, g = 0.0996399428802211 *~one, h = (-0.7813921023837359)*~one
, k = 0.7396666870016642 *~one
, longitude = Long { long = 0.811762241416502*~one }
}
testM2 = MEOE { mu = 4.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.6865893064609859 *~meter, f = 0.2398323179864169*~one
, g = 0.1996399428802211 *~one, h = (-0.0813921023837359)*~one
, k = 0.1396666870016642 *~one
, longitude = Long { long = 2.811762241416502*~one }
}
| null | https://raw.githubusercontent.com/bjornbm/astro/f4fb2c4b739a0a8f68f51aa154285120d2230c30/test/Astro/Trajectory/CroppedSpec.hs | haskell | import Test.QuickCheck (property, (==>))
the test MEOEs are very random. We implement our own 'property'
instead which feed the test MEOEs.
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
let t = ET [m `At` t1, m' `At` t4]; ts = [t0, t0 `addTime` dt..t5] in
ts = [t0, t0 `addTime` dt..t1]
----------------------------------------------------------------------
Test data, essentially randomized. | module Astro.Trajectory.CroppedSpec where
import Test.Hspec
import TestInstances
import Astro.Trajectory
import Astro.Trajectory.Cropped
import Astro.Trajectory.EphemTrajectory
import qualified Prelude
import Numeric.Units.Dimensional.Prelude
import Astro.Time
import Astro.Time.At
import Data.AEq
import Data.List
import Astro.Orbit.MEOE
import Astro.Orbit.Types
main = hspec spec
spec = do
spec_uncropped
spec_croppedStartTime
spec_croppedEndTime
spec_badValidity
spec_croppedEphemeris
spec_croppedEphemeris'
Do n't think using QuickCheck is warranted in this case as
property f = f testM1 testM2
TODO move this spec elsewhere !
spec_uncropped = describe "Uncropped trajectory" $ do
it "does not change startTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (crop' Nothing Nothing t) `shouldBe` t2)
it "does not change endTime when not cropping"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop' Nothing Nothing t) `shouldBe` t4)
it "does not change ephemeris' when not cropping"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t5 dt
== ephemeris' t t0 t5 dt)
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' (crop' Nothing Nothing t) t4 t5 dt == [])
it "does not change ephemeris when not cropping"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (crop' Nothing Nothing t) ts == ephemeris t ts)
it "returns no ephemeris beyond lower validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t1) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t1 ] in
ephemeris (crop' Nothing Nothing t) ts == [])
it "returns no ephemeris beyond upper validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t4 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t4 , ` addTime ` dt .. t5 ] in
ephemeris (crop' Nothing Nothing t) ts == [])
spec_croppedStartTime = describe "Cropped trajectory startTime" $ do
it "does not change when cropping before validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t1 t) `shouldBe` t2)
it "does not change when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropEnd t3 t) `shouldBe` t2)
it "changes when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
startTime (cropStart t3 t) `shouldBe` t3)
spec_croppedEndTime = describe "Cropped trajectory endTime" $ do
it "does not change when cropping after validity"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (crop t1 t5 t) `shouldBe` t4)
it "does not change when cropping startTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropStart t3 t) `shouldBe` t4)
it "changes when cropping endTime"
(let t = ET [undefined `At` t2, undefined `At` t4] in
endTime (cropEnd t3 t) `shouldBe` t3)
spec_badValidity = describe "Trajectory with cropping disjunct from validity" $ do
it "doesn't generate ephemeris when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris t [startTime t, endTime t] `shouldBe` [])
it "doesn't generate ephemeris' when cropped early"
(let t = crop t1 t2 $ ET [undefined `At` t3, undefined `At` t4] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
it "doesn't generate ephemeris' when cropped late"
(let t = crop t3 t4 $ ET [undefined `At` t1, undefined `At` t2] in
ephemeris' t (startTime t) (endTime t) dt `shouldBe` [])
spec_croppedEphemeris' = describe "Cropped trajectory (ephemeris')" $ do
it "does not change when cropping beyond validity"
(property $ \m m' -> let t = ET [m `At` t2, m' `At` t3] in
ephemeris' t t0 t5 dt
== ephemeris' (crop t1 t4 t) t0 t5 dt)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (crop t2 t4 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
/= ephemeris' t t0 t5 dt))
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t3] in
(ephemeris' (cropEnd t2 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isPrefixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
not (ephemeris' (crop t2 t3 t) t0 t5 dt
`isSuffixOf` ephemeris' t t0 t5 dt))
it "with cropped start and end is infix of uncropped trajectory"
(property $ \m m' -> let t = ET [m `At` t1, m' `At` t4] in
(ephemeris' (crop t2 t3 t) t0 t5 dt
`isInfixOf` ephemeris' t t0 t5 dt))
it "returns no ephemeris' beyond lower validity"
(property $ \m m' -> let t = ET [m `At` t0, m' `At` t5] in
ephemeris' (crop t2 t3 t) t0 t1 dt == [])
it "returns no ephemeris' beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5] in
ephemeris' (crop t2 t3 t) t4 t5 dt `shouldBe` [])
spec_croppedEphemeris = describe "Cropped trajectory (ephemeris)" $ do
it "does not change when cropping beyond validity"
(property $ \m m' ->
let t = ET [m `At` t2, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t2 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris t ts == ephemeris (crop t1 t4 t) ts)
it "with cropped startTime is not equal to uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (crop t2 t4 t) ts /= ephemeris t ts)
it "with cropped startTime is suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
(ephemeris (crop t2 t4 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped endTime is not equal to of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
ephemeris (cropEnd t2 t) ts /= ephemeris t ts)
it "with cropped endTime is prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t3]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
let t = ET [ m ` At ` t1 , m ' ` At ` t3 ] ; ts = [ t0 , t0 ` addTime ` dt .. t5 ] in
(ephemeris (cropEnd t2 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not prefix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
not (ephemeris (crop t2 t3 t) ts
`isPrefixOf` ephemeris t ts))
it "with cropped start and end is not suffix of uncropped trajectory"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
not (ephemeris (crop t2 t3 t) ts
`isSuffixOf` ephemeris t ts))
it "with cropped start and end is infix of uncropped trajectory (ephemeris')"
(property $ \m m' ->
let t = ET [m `At` t1, m' `At` t4]; ts = takeWhile (<= t5) $ iterate (`addTime` dt) t0 in
(ephemeris (crop t2 t3 t) ts
`isInfixOf` ephemeris t ts))
it "returns no ephemeris beyond lower validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = takeWhile (<= t1) $ iterate (`addTime` dt) t0
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
it "returns no ephemeris beyond upper validity"
(let t = ET [undefined `At` t0, undefined `At` t5]
ts = takeWhile (<= t5) $ iterate (`addTime` dt) t4
ts = [ t4 , ` addTime ` dt .. t5 ]
in ephemeris (crop t2 t3 t) ts `shouldBe` [])
t0 = mjd 0.0 UT1
t1 = mjd 1 UT1
t2 = mjd 2 UT1
t3 = mjd 3 UT1
t4 = mjd 4 UT1
t5 = mjd 5 UT1
dt = 1 *~ hour :: Time Double
testM1 = MEOE { mu = 5.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.7865893064609859 *~meter, f = 0.6398323179864169*~one
, g = 0.0996399428802211 *~one, h = (-0.7813921023837359)*~one
, k = 0.7396666870016642 *~one
, longitude = Long { long = 0.811762241416502*~one }
}
testM2 = MEOE { mu = 4.5017577174388266e9 *~ (meter^pos3/second^pos2)
, p = 0.6865893064609859 *~meter, f = 0.2398323179864169*~one
, g = 0.1996399428802211 *~one, h = (-0.0813921023837359)*~one
, k = 0.1396666870016642 *~one
, longitude = Long { long = 2.811762241416502*~one }
}
|
da688cd0e962e863537dd0fa4bbedb152d882c35cb4d6fa449e7eaf20028b558 | esl/MongooseIM | mod_inbox_utils.erl | %%%-------------------------------------------------------------------
( C ) 2018 , Erlang - Solutions
%%% @doc
%%%
%%% @end
Created : 30 . Jan 2018 13:22
%%%-------------------------------------------------------------------
-module(mod_inbox_utils).
-include("mod_inbox.hrl").
-include("jlib.hrl").
-type inbox_fun() :: fun((mongooseim:host_type(),
jid:jid(),
jid:jid(),
exml:element(),
mongoose_acc:t()) -> mod_inbox:count_res()).
%%%%%%%%%%%%%%%%%%%
DB Operations shared by mod_inbox_one2one and
-export([maybe_reset_unread_count/5,
reset_unread_count_to_zero/3,
maybe_write_to_inbox/6,
write_to_sender_inbox/5,
write_to_receiver_inbox/5,
clear_inbox/3,
get_reset_markers/1,
if_chat_marker_get_id/2,
has_chat_marker/1,
get_option_write_aff_changes/1,
get_option_remove_on_kicked/1,
extract_attr_jid/1,
maybe_binary_to_positive_integer/1,
encode_rsm_id/2,
decode_rsm_id/1,
binary_to_bool/1,
bool_to_binary/1,
build_inbox_entry_key/2,
build_inbox_result_elements/2,
build_entry_result_elements/2,
all_valid_boxes_for_query/1,
list_single_form_field/3,
calculate_ts_from/2
]).
-ignore_xref([get_reset_markers/1, if_chat_marker_get_id/2]).
-spec maybe_reset_unread_count(HostType :: mongooseim:host_type(),
User :: jid:jid(),
Remote :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok.
maybe_reset_unread_count(HostType, User, Remote, Packet, Acc) ->
ResetMarkers = get_reset_markers(HostType),
case if_chat_marker_get_id(Packet, ResetMarkers) of
undefined ->
ok;
Id ->
TS = mongoose_acc:timestamp(Acc),
reset_unread_count(HostType, User, Remote, Id, TS)
end.
-spec reset_unread_count_to_zero(mongoose_acc:t(), jid:jid(), jid:jid()) -> ok.
reset_unread_count_to_zero(Acc, From, Remote) ->
TS = mongoose_acc:timestamp(Acc),
HostType = mongoose_acc:host_type(Acc),
InboxEntryKey = build_inbox_entry_key(From, Remote),
ok = mod_inbox_backend:reset_unread(HostType, InboxEntryKey, undefined, TS).
-spec reset_unread_count(HostType ::mongooseim:host_type(),
From :: jid:jid(),
Remote :: jid:jid(),
MsgId :: id(),
TS :: integer()) -> ok.
reset_unread_count(HostType, From, Remote, MsgId, TS) ->
InboxEntryKey = build_inbox_entry_key(From, Remote),
ok = mod_inbox_backend:reset_unread(HostType, InboxEntryKey, MsgId, TS).
-spec write_to_sender_inbox(HostType :: mongooseim:host_type(),
Sender :: jid:jid(),
Receiver :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok.
write_to_sender_inbox(HostType, Sender, Receiver, Packet, Acc) ->
MsgId = get_msg_id(Packet),
Timestamp = mongoose_acc:timestamp(Acc),
%% no unread for a user because he writes new messages which assumes he read all previous messages.
Count = 0,
InboxEntryKey = build_inbox_entry_key(Sender, Receiver),
mod_inbox_backend:set_inbox(HostType, InboxEntryKey, Packet, Count, MsgId, Timestamp).
-spec write_to_receiver_inbox(HostType :: mongooseim:host_type(),
Sender :: jid:jid(),
Receiver :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok | {ok, integer()}.
write_to_receiver_inbox(HostType, Sender, Receiver, Packet, Acc) ->
MsgId = get_msg_id(Packet),
Timestamp = mongoose_acc:timestamp(Acc),
InboxEntryKey = build_inbox_entry_key(Receiver, Sender),
mod_inbox_backend:set_inbox_incr_unread(HostType, InboxEntryKey,
Packet, MsgId, Timestamp).
-spec clear_inbox(HostType :: mongooseim:host_type(),
User :: jid:user(),
Server :: jid:server()) -> mod_inbox:write_res().
clear_inbox(HostType, User, Server) when is_binary(User) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
ok = mod_inbox_backend:clear_inbox(HostType, LUser, LServer).
%%%%%%%%%%%%%%%%%%%
%% Helpers
-spec get_reset_markers(HostType :: mongooseim:host_type()) -> list(marker()).
get_reset_markers(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, reset_markers).
-spec if_chat_marker_get_id(Packet :: exml:element(),
Markers :: list(marker())) -> undefined | id().
if_chat_marker_get_id(Packet, Markers) when is_list(Markers) ->
Ids = [if_chat_marker_get_id(Packet, M) || M <- Markers],
Filtered = [El || El <- Ids, El /= undefined],
case Filtered of
[] ->
undefined;
[H | _] ->
H
end;
if_chat_marker_get_id(Packet, Marker) ->
case exml_query:paths(Packet, [{element, Marker}, {attr, <<"id">>}]) of
[Id] ->
Id;
_ ->
undefined
end.
-spec has_chat_marker(Packet :: exml:element()) -> boolean().
has_chat_marker(Packet) ->
mongoose_chat_markers:has_chat_markers(Packet).
-spec maybe_write_to_inbox(HostType, User, Remote, Packet, Acc, WriteF) ->
mod_inbox:count_res() when
HostType ::mongooseim:host_type(),
User :: jid:jid(),
Remote :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t(),
is write_to_receiver_inbox/5 or write_to_sender_inbox/5
WriteF :: inbox_fun().
maybe_write_to_inbox(HostType, User, Remote, Packet, Acc, WriteF) ->
case has_chat_marker(Packet) of
true ->
ok;
false ->
Packet2 = fill_from_attr(Packet, User),
WriteF(HostType, User, Remote, Packet2, Acc)
end.
-spec get_msg_id(Msg :: exml:element()) -> binary().
get_msg_id(#xmlel{name = <<"message">>} = Msg) ->
exml_query:attr(Msg, <<"id">>, <<>>).
-spec fill_from_attr(Msg :: exml:element(), From :: jid:jid()) -> exml:element().
fill_from_attr(Msg = #xmlel{attrs = Attrs}, From) ->
case exml_query:attr(Msg, <<"from">>, undefined) of
undefined ->
FromBin = jid:to_binary(From),
Msg#xmlel{attrs = [{<<"from">>, FromBin} | Attrs]};
_ ->
Msg
end.
-spec get_option_write_aff_changes(HostType :: mongooseim:host_type()) -> boolean().
get_option_write_aff_changes(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, aff_changes).
-spec get_option_remove_on_kicked(HostType :: mongooseim:host_type()) -> boolean().
get_option_remove_on_kicked(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, remove_on_kicked).
extract_attr_jid(ResetStanza) ->
case exml_query:attr(ResetStanza, <<"jid">>) of
undefined ->
{error, <<"jid-required">>};
Value ->
case jid:from_binary(Value) of
error ->
{error, <<"invalid-jid">>};
JID -> JID
end
end.
-spec maybe_binary_to_positive_integer(binary()) -> non_neg_integer() | {error, atom()}.
maybe_binary_to_positive_integer(Bin) ->
try erlang:binary_to_integer(Bin) of
N when N >= 0 -> N;
_ -> {error, non_positive_integer}
catch error:badarg -> {error, 'NaN'}
end.
-spec encode_rsm_id(integer(), binary()) -> binary().
encode_rsm_id(Int, BinJid) ->
BinInt = integer_to_binary(Int),
EncodedJid = base64:encode(BinJid),
<<BinInt/binary, "/", EncodedJid/binary>>.
-spec decode_rsm_id(binary()) -> {integer(), binary()} | error.
decode_rsm_id(Bin) ->
case binary:split(Bin, <<"/">>) of
[BinInt, BinJid] ->
Int = maybe_binary_to_positive_integer(BinInt),
case Int of
Int when is_integer(Int) ->
Jid = base64:decode(BinJid),
{Int, Jid};
_ -> error
end;
_ -> error
end.
-spec binary_to_bool(binary()) -> true | false | error.
binary_to_bool(<<"true">>) -> true;
binary_to_bool(<<"false">>) -> false;
binary_to_bool(_) -> error.
-spec bool_to_binary(integer() | boolean()) -> binary() | error.
bool_to_binary(1) -> <<"true">>;
bool_to_binary(0) -> <<"false">>;
bool_to_binary(true) -> <<"true">>;
bool_to_binary(false) -> <<"false">>;
bool_to_binary(_) -> error.
build_inbox_entry_key(FromJid, ToJid) ->
{LUser, LServer} = jid:to_lus(FromJid),
ToBareJid = jid:nameprep(jid:to_bare_binary(ToJid)),
{LUser, LServer, ToBareJid}.
-spec build_inbox_result_elements(inbox_res(), integer()) -> [exml:element()].
build_inbox_result_elements(#{msg := Content, timestamp := Timestamp, unread_count := UnreadCount,
box := Box, muted_until := MutedUntil,
extra := Extra}, AccTS) ->
[ #xmlel{name = <<"forwarded">>, attrs = [{<<"xmlns">>, ?NS_FORWARD}],
children = [build_delay_el(Timestamp), Content]},
kv_to_el(<<"read">>, mod_inbox_utils:bool_to_binary(0 =:= UnreadCount)),
kv_to_el(<<"box">>, Box),
kv_to_el(<<"archive">>, is_archive(Box)),
kv_to_el(<<"mute">>, maybe_muted_until(MutedUntil, AccTS))
| Extra ].
-spec build_entry_result_elements(entry_properties(), integer()) -> [exml:element()].
build_entry_result_elements(#{box := Box, muted_until := MutedUntil,
unread_count := UnreadCount, extra := Extra}, AccTS) ->
[ kv_to_el(<<"read">>, mod_inbox_utils:bool_to_binary(0 =:= UnreadCount)),
kv_to_el(<<"box">>, Box), kv_to_el(<<"archive">>, is_archive(Box)),
kv_to_el(<<"mute">>, maybe_muted_until(MutedUntil, AccTS))
| Extra ].
-spec kv_to_el(binary(), binary()) -> exml:element().
kv_to_el(Key, Value) ->
#xmlel{name = Key, children = [#xmlcdata{content = Value}]}.
-spec is_archive(binary()) -> binary().
is_archive(<<"archive">>) -> <<"true">>;
is_archive(_) -> <<"false">>.
-spec maybe_muted_until(integer(), integer()) -> binary().
maybe_muted_until(0, _) -> <<"0">>;
maybe_muted_until(MutedUntil, CurrentTS) ->
case CurrentTS =< MutedUntil of
true -> list_to_binary(calendar:system_time_to_rfc3339(MutedUntil, [{offset, "Z"}, {unit, microsecond}]));
false -> <<"0">>
end.
-spec build_delay_el(Timestamp :: integer()) -> exml:element().
build_delay_el(Timestamp) ->
TS = calendar:system_time_to_rfc3339(Timestamp, [{offset, "Z"}, {unit, microsecond}]),
jlib:timestamp_to_xml(TS, undefined, undefined).
all_valid_boxes_for_query(HostType) ->
[<<"all">> | gen_mod:get_module_opt(HostType, mod_inbox, boxes)].
-spec list_single_form_field(Var :: binary(),
Default :: binary(),
Options :: [ Option | {Label, Value}]) -> exml:element() when
Option :: binary(), Label :: binary(), Value :: binary().
list_single_form_field(Var, Default, Options) ->
Value = form_field_value(Default),
#xmlel{
name = <<"field">>,
attrs = [{<<"var">>, Var}, {<<"type">>, <<"list-single">>}],
children = [Value | [ form_field_option(Option) || Option <- Options ]]
}.
-spec form_field_option(Option | {Label, Value}) -> exml:element() when
Option :: binary(), Label :: binary(), Value :: binary().
form_field_option({Label, Value}) ->
#xmlel{
name = <<"option">>,
attrs = [{<<"label">>, Label}],
children = [form_field_value(Value)]
};
form_field_option(Option) ->
form_field_option({Option, Option}).
-spec form_field_value(Value :: binary()) -> exml:element().
form_field_value(Value) ->
#xmlel{name = <<"value">>, children = [#xmlcdata{content = Value}]}.
-spec calculate_ts_from(integer(), non_neg_integer()) -> integer().
calculate_ts_from(Now, Days) ->
8.64e+10 microseconds in a day
Now - DaysInMicroSeconds.
| null | https://raw.githubusercontent.com/esl/MongooseIM/55233591ce2cac2e6c050b4828083bd1e3983117/src/inbox/mod_inbox_utils.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
no unread for a user because he writes new messages which assumes he read all previous messages.
Helpers | ( C ) 2018 , Erlang - Solutions
Created : 30 . Jan 2018 13:22
-module(mod_inbox_utils).
-include("mod_inbox.hrl").
-include("jlib.hrl").
-type inbox_fun() :: fun((mongooseim:host_type(),
jid:jid(),
jid:jid(),
exml:element(),
mongoose_acc:t()) -> mod_inbox:count_res()).
DB Operations shared by mod_inbox_one2one and
-export([maybe_reset_unread_count/5,
reset_unread_count_to_zero/3,
maybe_write_to_inbox/6,
write_to_sender_inbox/5,
write_to_receiver_inbox/5,
clear_inbox/3,
get_reset_markers/1,
if_chat_marker_get_id/2,
has_chat_marker/1,
get_option_write_aff_changes/1,
get_option_remove_on_kicked/1,
extract_attr_jid/1,
maybe_binary_to_positive_integer/1,
encode_rsm_id/2,
decode_rsm_id/1,
binary_to_bool/1,
bool_to_binary/1,
build_inbox_entry_key/2,
build_inbox_result_elements/2,
build_entry_result_elements/2,
all_valid_boxes_for_query/1,
list_single_form_field/3,
calculate_ts_from/2
]).
-ignore_xref([get_reset_markers/1, if_chat_marker_get_id/2]).
-spec maybe_reset_unread_count(HostType :: mongooseim:host_type(),
User :: jid:jid(),
Remote :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok.
maybe_reset_unread_count(HostType, User, Remote, Packet, Acc) ->
ResetMarkers = get_reset_markers(HostType),
case if_chat_marker_get_id(Packet, ResetMarkers) of
undefined ->
ok;
Id ->
TS = mongoose_acc:timestamp(Acc),
reset_unread_count(HostType, User, Remote, Id, TS)
end.
-spec reset_unread_count_to_zero(mongoose_acc:t(), jid:jid(), jid:jid()) -> ok.
reset_unread_count_to_zero(Acc, From, Remote) ->
TS = mongoose_acc:timestamp(Acc),
HostType = mongoose_acc:host_type(Acc),
InboxEntryKey = build_inbox_entry_key(From, Remote),
ok = mod_inbox_backend:reset_unread(HostType, InboxEntryKey, undefined, TS).
-spec reset_unread_count(HostType ::mongooseim:host_type(),
From :: jid:jid(),
Remote :: jid:jid(),
MsgId :: id(),
TS :: integer()) -> ok.
reset_unread_count(HostType, From, Remote, MsgId, TS) ->
InboxEntryKey = build_inbox_entry_key(From, Remote),
ok = mod_inbox_backend:reset_unread(HostType, InboxEntryKey, MsgId, TS).
-spec write_to_sender_inbox(HostType :: mongooseim:host_type(),
Sender :: jid:jid(),
Receiver :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok.
write_to_sender_inbox(HostType, Sender, Receiver, Packet, Acc) ->
MsgId = get_msg_id(Packet),
Timestamp = mongoose_acc:timestamp(Acc),
Count = 0,
InboxEntryKey = build_inbox_entry_key(Sender, Receiver),
mod_inbox_backend:set_inbox(HostType, InboxEntryKey, Packet, Count, MsgId, Timestamp).
-spec write_to_receiver_inbox(HostType :: mongooseim:host_type(),
Sender :: jid:jid(),
Receiver :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t()) -> ok | {ok, integer()}.
write_to_receiver_inbox(HostType, Sender, Receiver, Packet, Acc) ->
MsgId = get_msg_id(Packet),
Timestamp = mongoose_acc:timestamp(Acc),
InboxEntryKey = build_inbox_entry_key(Receiver, Sender),
mod_inbox_backend:set_inbox_incr_unread(HostType, InboxEntryKey,
Packet, MsgId, Timestamp).
-spec clear_inbox(HostType :: mongooseim:host_type(),
User :: jid:user(),
Server :: jid:server()) -> mod_inbox:write_res().
clear_inbox(HostType, User, Server) when is_binary(User) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
ok = mod_inbox_backend:clear_inbox(HostType, LUser, LServer).
-spec get_reset_markers(HostType :: mongooseim:host_type()) -> list(marker()).
get_reset_markers(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, reset_markers).
-spec if_chat_marker_get_id(Packet :: exml:element(),
Markers :: list(marker())) -> undefined | id().
if_chat_marker_get_id(Packet, Markers) when is_list(Markers) ->
Ids = [if_chat_marker_get_id(Packet, M) || M <- Markers],
Filtered = [El || El <- Ids, El /= undefined],
case Filtered of
[] ->
undefined;
[H | _] ->
H
end;
if_chat_marker_get_id(Packet, Marker) ->
case exml_query:paths(Packet, [{element, Marker}, {attr, <<"id">>}]) of
[Id] ->
Id;
_ ->
undefined
end.
-spec has_chat_marker(Packet :: exml:element()) -> boolean().
has_chat_marker(Packet) ->
mongoose_chat_markers:has_chat_markers(Packet).
-spec maybe_write_to_inbox(HostType, User, Remote, Packet, Acc, WriteF) ->
mod_inbox:count_res() when
HostType ::mongooseim:host_type(),
User :: jid:jid(),
Remote :: jid:jid(),
Packet :: exml:element(),
Acc :: mongoose_acc:t(),
is write_to_receiver_inbox/5 or write_to_sender_inbox/5
WriteF :: inbox_fun().
maybe_write_to_inbox(HostType, User, Remote, Packet, Acc, WriteF) ->
case has_chat_marker(Packet) of
true ->
ok;
false ->
Packet2 = fill_from_attr(Packet, User),
WriteF(HostType, User, Remote, Packet2, Acc)
end.
-spec get_msg_id(Msg :: exml:element()) -> binary().
get_msg_id(#xmlel{name = <<"message">>} = Msg) ->
exml_query:attr(Msg, <<"id">>, <<>>).
-spec fill_from_attr(Msg :: exml:element(), From :: jid:jid()) -> exml:element().
fill_from_attr(Msg = #xmlel{attrs = Attrs}, From) ->
case exml_query:attr(Msg, <<"from">>, undefined) of
undefined ->
FromBin = jid:to_binary(From),
Msg#xmlel{attrs = [{<<"from">>, FromBin} | Attrs]};
_ ->
Msg
end.
-spec get_option_write_aff_changes(HostType :: mongooseim:host_type()) -> boolean().
get_option_write_aff_changes(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, aff_changes).
-spec get_option_remove_on_kicked(HostType :: mongooseim:host_type()) -> boolean().
get_option_remove_on_kicked(HostType) ->
gen_mod:get_module_opt(HostType, mod_inbox, remove_on_kicked).
extract_attr_jid(ResetStanza) ->
case exml_query:attr(ResetStanza, <<"jid">>) of
undefined ->
{error, <<"jid-required">>};
Value ->
case jid:from_binary(Value) of
error ->
{error, <<"invalid-jid">>};
JID -> JID
end
end.
-spec maybe_binary_to_positive_integer(binary()) -> non_neg_integer() | {error, atom()}.
maybe_binary_to_positive_integer(Bin) ->
try erlang:binary_to_integer(Bin) of
N when N >= 0 -> N;
_ -> {error, non_positive_integer}
catch error:badarg -> {error, 'NaN'}
end.
-spec encode_rsm_id(integer(), binary()) -> binary().
encode_rsm_id(Int, BinJid) ->
BinInt = integer_to_binary(Int),
EncodedJid = base64:encode(BinJid),
<<BinInt/binary, "/", EncodedJid/binary>>.
-spec decode_rsm_id(binary()) -> {integer(), binary()} | error.
decode_rsm_id(Bin) ->
case binary:split(Bin, <<"/">>) of
[BinInt, BinJid] ->
Int = maybe_binary_to_positive_integer(BinInt),
case Int of
Int when is_integer(Int) ->
Jid = base64:decode(BinJid),
{Int, Jid};
_ -> error
end;
_ -> error
end.
-spec binary_to_bool(binary()) -> true | false | error.
binary_to_bool(<<"true">>) -> true;
binary_to_bool(<<"false">>) -> false;
binary_to_bool(_) -> error.
-spec bool_to_binary(integer() | boolean()) -> binary() | error.
bool_to_binary(1) -> <<"true">>;
bool_to_binary(0) -> <<"false">>;
bool_to_binary(true) -> <<"true">>;
bool_to_binary(false) -> <<"false">>;
bool_to_binary(_) -> error.
build_inbox_entry_key(FromJid, ToJid) ->
{LUser, LServer} = jid:to_lus(FromJid),
ToBareJid = jid:nameprep(jid:to_bare_binary(ToJid)),
{LUser, LServer, ToBareJid}.
-spec build_inbox_result_elements(inbox_res(), integer()) -> [exml:element()].
build_inbox_result_elements(#{msg := Content, timestamp := Timestamp, unread_count := UnreadCount,
box := Box, muted_until := MutedUntil,
extra := Extra}, AccTS) ->
[ #xmlel{name = <<"forwarded">>, attrs = [{<<"xmlns">>, ?NS_FORWARD}],
children = [build_delay_el(Timestamp), Content]},
kv_to_el(<<"read">>, mod_inbox_utils:bool_to_binary(0 =:= UnreadCount)),
kv_to_el(<<"box">>, Box),
kv_to_el(<<"archive">>, is_archive(Box)),
kv_to_el(<<"mute">>, maybe_muted_until(MutedUntil, AccTS))
| Extra ].
-spec build_entry_result_elements(entry_properties(), integer()) -> [exml:element()].
build_entry_result_elements(#{box := Box, muted_until := MutedUntil,
unread_count := UnreadCount, extra := Extra}, AccTS) ->
[ kv_to_el(<<"read">>, mod_inbox_utils:bool_to_binary(0 =:= UnreadCount)),
kv_to_el(<<"box">>, Box), kv_to_el(<<"archive">>, is_archive(Box)),
kv_to_el(<<"mute">>, maybe_muted_until(MutedUntil, AccTS))
| Extra ].
-spec kv_to_el(binary(), binary()) -> exml:element().
kv_to_el(Key, Value) ->
#xmlel{name = Key, children = [#xmlcdata{content = Value}]}.
-spec is_archive(binary()) -> binary().
is_archive(<<"archive">>) -> <<"true">>;
is_archive(_) -> <<"false">>.
-spec maybe_muted_until(integer(), integer()) -> binary().
maybe_muted_until(0, _) -> <<"0">>;
maybe_muted_until(MutedUntil, CurrentTS) ->
case CurrentTS =< MutedUntil of
true -> list_to_binary(calendar:system_time_to_rfc3339(MutedUntil, [{offset, "Z"}, {unit, microsecond}]));
false -> <<"0">>
end.
-spec build_delay_el(Timestamp :: integer()) -> exml:element().
build_delay_el(Timestamp) ->
TS = calendar:system_time_to_rfc3339(Timestamp, [{offset, "Z"}, {unit, microsecond}]),
jlib:timestamp_to_xml(TS, undefined, undefined).
all_valid_boxes_for_query(HostType) ->
[<<"all">> | gen_mod:get_module_opt(HostType, mod_inbox, boxes)].
-spec list_single_form_field(Var :: binary(),
Default :: binary(),
Options :: [ Option | {Label, Value}]) -> exml:element() when
Option :: binary(), Label :: binary(), Value :: binary().
list_single_form_field(Var, Default, Options) ->
Value = form_field_value(Default),
#xmlel{
name = <<"field">>,
attrs = [{<<"var">>, Var}, {<<"type">>, <<"list-single">>}],
children = [Value | [ form_field_option(Option) || Option <- Options ]]
}.
-spec form_field_option(Option | {Label, Value}) -> exml:element() when
Option :: binary(), Label :: binary(), Value :: binary().
form_field_option({Label, Value}) ->
#xmlel{
name = <<"option">>,
attrs = [{<<"label">>, Label}],
children = [form_field_value(Value)]
};
form_field_option(Option) ->
form_field_option({Option, Option}).
-spec form_field_value(Value :: binary()) -> exml:element().
form_field_value(Value) ->
#xmlel{name = <<"value">>, children = [#xmlcdata{content = Value}]}.
-spec calculate_ts_from(integer(), non_neg_integer()) -> integer().
calculate_ts_from(Now, Days) ->
8.64e+10 microseconds in a day
Now - DaysInMicroSeconds.
|
78e47a546532e4309c5ce0f51e29cf45799944363d8c13f247cb5dde0e2dc347 | achirkin/vulkan | VK_AMD_shader_ballot.hs | # OPTIONS_HADDOCK not - home #
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE MagicHash #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE Strict #-}
{-# LANGUAGE ViewPatterns #-}
module Graphics.Vulkan.Ext.VK_AMD_shader_ballot
* Vulkan extension : @VK_AMD_shader_ballot@
-- |
--
-- supported: @vulkan@
--
contact :
--
author :
--
-- type: @device@
--
-- Extension number: @38@
VK_AMD_SHADER_BALLOT_SPEC_VERSION,
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION,
VK_AMD_SHADER_BALLOT_EXTENSION_NAME,
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION = 1
type VK_AMD_SHADER_BALLOT_SPEC_VERSION = 1
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME <-
(is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME -> True)
where
VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= _VK_AMD_SHADER_BALLOT_EXTENSION_NAME
# INLINE _ VK_AMD_SHADER_BALLOT_EXTENSION_NAME #
_VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString
_VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= Ptr "VK_AMD_shader_ballot\NUL"#
# INLINE is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME #
is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString -> Bool
is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_AMD_SHADER_BALLOT_EXTENSION_NAME
type VK_AMD_SHADER_BALLOT_EXTENSION_NAME = "VK_AMD_shader_ballot"
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Ext/VK_AMD_shader_ballot.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE Strict #
# LANGUAGE ViewPatterns #
|
supported: @vulkan@
type: @device@
Extension number: @38@ | # OPTIONS_HADDOCK not - home #
# LANGUAGE PatternSynonyms #
module Graphics.Vulkan.Ext.VK_AMD_shader_ballot
* Vulkan extension : @VK_AMD_shader_ballot@
contact :
author :
VK_AMD_SHADER_BALLOT_SPEC_VERSION,
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION,
VK_AMD_SHADER_BALLOT_EXTENSION_NAME,
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_AMD_SHADER_BALLOT_SPEC_VERSION = 1
type VK_AMD_SHADER_BALLOT_SPEC_VERSION = 1
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString
pattern VK_AMD_SHADER_BALLOT_EXTENSION_NAME <-
(is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME -> True)
where
VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= _VK_AMD_SHADER_BALLOT_EXTENSION_NAME
# INLINE _ VK_AMD_SHADER_BALLOT_EXTENSION_NAME #
_VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString
_VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= Ptr "VK_AMD_shader_ballot\NUL"#
# INLINE is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME #
is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME :: CString -> Bool
is_VK_AMD_SHADER_BALLOT_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_AMD_SHADER_BALLOT_EXTENSION_NAME
type VK_AMD_SHADER_BALLOT_EXTENSION_NAME = "VK_AMD_shader_ballot"
|
c1faa6355527959e93b24f2ffec8e6aa30b744a0b84bcb690154a998298fb84e | shortishly/haystack | haystack_balance_random.erl | Copyright ( c ) 2016 < >
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(haystack_balance_random).
-export([pick/2]).
pick(Hostname, Path) ->
haystack_metric:increment(
#{hostname => Hostname,
module => ?MODULE,
path => Path}),
case dns_node:find(dns_name:labels(Hostname), in, srv) of
not_found ->
not_found;
Matches ->
random:seed(
erlang:phash2(node()),
erlang:monotonic_time(),
erlang:unique_integer()),
(pick_one_from(Matches))#{path => Path}
end.
pick_one_from(#{data := #{target := Target, port := Port}}) ->
[#{data := Address} | _] = dns_node:find(Target, in, a),
#{host => inet:ntoa(Address), port => Port};
pick_one_from(Matches) ->
pick_one_from(lists:nth(random:uniform(length(Matches)), Matches)).
| null | https://raw.githubusercontent.com/shortishly/haystack/7ff0d737dcd90adf60c861b2cf755aee1355e555/src/haystack_balance_random.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2016 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(haystack_balance_random).
-export([pick/2]).
pick(Hostname, Path) ->
haystack_metric:increment(
#{hostname => Hostname,
module => ?MODULE,
path => Path}),
case dns_node:find(dns_name:labels(Hostname), in, srv) of
not_found ->
not_found;
Matches ->
random:seed(
erlang:phash2(node()),
erlang:monotonic_time(),
erlang:unique_integer()),
(pick_one_from(Matches))#{path => Path}
end.
pick_one_from(#{data := #{target := Target, port := Port}}) ->
[#{data := Address} | _] = dns_node:find(Target, in, a),
#{host => inet:ntoa(Address), port => Port};
pick_one_from(Matches) ->
pick_one_from(lists:nth(random:uniform(length(Matches)), Matches)).
|
49d25ee6586b5fa75e6e41e71b480bcea509454a803349cbd4afc3fbf960b131 | kayceesrk/ocaml5-tutorial | msg_passing.ml | let r = Atomic.make None
let sender () = Atomic.set r (Some "Hello")
let rec receiver () =
match Atomic.get r with
| None -> Domain.cpu_relax (); receiver ()
| Some m -> print_endline m
let main () =
let s = Domain.spawn sender in
let d = Domain.spawn receiver in
Domain.join s;
Domain.join d
let _ = main ()
| null | https://raw.githubusercontent.com/kayceesrk/ocaml5-tutorial/36989cd5ffe42d7fc0b2710443b612f27bc9af5d/src/msg_passing.ml | ocaml | let r = Atomic.make None
let sender () = Atomic.set r (Some "Hello")
let rec receiver () =
match Atomic.get r with
| None -> Domain.cpu_relax (); receiver ()
| Some m -> print_endline m
let main () =
let s = Domain.spawn sender in
let d = Domain.spawn receiver in
Domain.join s;
Domain.join d
let _ = main ()
|
|
f26790bcc99a5176b521c019094519cbde4e7aab271c31111f5af607cbb702e6 | hipsleek/hipsleek | ex1.ml | type point2d = float * float
[@@deriving show]
let x = (2.0,3.0);;
print_endline (show_point2d x);;
(* ocamlfind ocamlc -package ppx_deriving.std -o ex1 ppx/ex1.ml *)
| null | https://raw.githubusercontent.com/hipsleek/hipsleek/596f7fa7f67444c8309da2ca86ba4c47d376618c/ppx/ex1.ml | ocaml | ocamlfind ocamlc -package ppx_deriving.std -o ex1 ppx/ex1.ml | type point2d = float * float
[@@deriving show]
let x = (2.0,3.0);;
print_endline (show_point2d x);;
|
528e65a3348a4a66eacdea00ff6db03b977c6933f6e884c0e092038ffcd3d409 | sadiqj/ocaml-esp32 | odoc_misc.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2001 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
let no_blanks s =
let len = String.length s in
let buf = Buffer.create len in
for i = 0 to len - 1 do
match s.[i] with
' ' | '\n' | '\t' | '\r' -> ()
| c -> Buffer.add_char buf c
done;
Buffer.contents buf
let input_file_as_string nom =
let chanin = open_in_bin nom in
let len = 1024 in
let s = Bytes.create len in
let buf = Buffer.create len in
let rec iter () =
try
let n = input chanin s 0 len in
if n = 0 then
()
else
(
Buffer.add_subbytes buf s 0 n;
iter ()
)
with
End_of_file -> ()
in
iter ();
close_in chanin;
Buffer.contents buf
let split_string s chars =
let len = String.length s in
let rec iter acc pos =
if pos >= len then
match acc with
"" -> []
| _ -> [acc]
else
if List.mem s.[pos] chars then
match acc with
"" -> iter "" (pos + 1)
| _ -> acc :: (iter "" (pos + 1))
else
iter (Printf.sprintf "%s%c" acc s.[pos]) (pos + 1)
in
iter "" 0
let split_with_blanks s = split_string s [' ' ; '\n' ; '\r' ; '\t' ]
let list_concat sep =
let rec iter = function
[] -> []
| [h] -> [h]
| h :: q -> h :: sep :: iter q
in
iter
let rec string_of_longident li =
match li with
| Longident.Lident s -> s
| Longident.Ldot(li, s) -> string_of_longident li ^ "." ^ s
| Longident.Lapply(l1, l2) ->
string_of_longident l1 ^ "(" ^ string_of_longident l2 ^ ")"
let get_fields type_expr =
let (fields, _) = Ctype.flatten_fields (Ctype.object_fields type_expr) in
List.fold_left
(fun acc -> fun (label, field_kind, typ) ->
match field_kind with
Types.Fabsent ->
acc
| _ ->
if label = "*dummy method*" then
acc
else
acc @ [label, typ]
)
[]
fields
let rec string_of_text t =
let rec iter t_ele =
match t_ele with
| Odoc_types.Raw s
| Odoc_types.Code s
| Odoc_types.CodePre s
| Odoc_types.Verbatim s -> s
| Odoc_types.Bold t
| Odoc_types.Italic t
| Odoc_types.Center t
| Odoc_types.Left t
| Odoc_types.Right t
| Odoc_types.Emphasize t -> string_of_text t
| Odoc_types.List l ->
(String.concat ""
(List.map (fun t -> "\n- "^(string_of_text t)) l))^
"\n"
| Odoc_types.Enum l ->
let rec f n = function
[] -> "\n"
| t :: q ->
"\n"^(string_of_int n)^". "^(string_of_text t)^
(f (n + 1) q)
in
f 1 l
| Odoc_types.Newline -> "\n"
| Odoc_types.Block t -> "\t"^(string_of_text t)^"\n"
| Odoc_types.Title (_, _, t) -> "\n"^(string_of_text t)^"\n"
| Odoc_types.Latex s -> "{% "^s^" %}"
| Odoc_types.Link (s, t) ->
"["^s^"]"^(string_of_text t)
| Odoc_types.Ref (_name, _, Some text) ->
Printf.sprintf "[%s]" (string_of_text text)
| Odoc_types.Ref (name, _, None) ->
iter (Odoc_types.Code name)
| Odoc_types.Superscript t ->
"^{"^(string_of_text t)^"}"
| Odoc_types.Subscript t ->
"^{"^(string_of_text t)^"}"
| Odoc_types.Module_list l ->
string_of_text
(list_concat (Odoc_types.Raw ", ")
(List.map (fun s -> Odoc_types.Code s) l)
)
| Odoc_types.Index_list ->
""
| Odoc_types.Custom (_, t) -> string_of_text t
| Odoc_types.Target _ -> ""
in
String.concat "" (List.map iter t)
let string_of_author_list l =
match l with
[] ->
""
| _ ->
"* "^Odoc_messages.authors^":\n"^
(String.concat ", " l)^
"\n"
let string_of_version_opt v_opt =
match v_opt with
None -> ""
| Some v -> Odoc_messages.version^": "^v^"\n"
let string_of_since_opt s_opt =
match s_opt with
None -> ""
| Some s -> Odoc_messages.since^" "^s^"\n"
let string_of_raised_exceptions l =
match l with
[] -> ""
| (s, t) :: [] -> Odoc_messages.raises^" "^s^" "^(string_of_text t)^"\n"
| _ ->
Odoc_messages.raises^"\n"^
(String.concat ""
(List.map
(fun (ex, desc) -> "- "^ex^" "^(string_of_text desc)^"\n")
l
)
)^"\n"
let string_of_see (see_ref, t) =
let t_ref =
match see_ref with
Odoc_types.See_url s -> [ Odoc_types.Link (s, t) ]
| Odoc_types.See_file s -> (Odoc_types.Code s) :: (Odoc_types.Raw " ") :: t
| Odoc_types.See_doc s -> (Odoc_types.Italic [Odoc_types.Raw s]) :: (Odoc_types.Raw " ") :: t
in
string_of_text t_ref
let string_of_sees l =
match l with
[] -> ""
| see :: [] -> Odoc_messages.see_also^" "^(string_of_see see)^" \n"
| _ ->
Odoc_messages.see_also^"\n"^
(String.concat ""
(List.map
(fun see -> "- "^(string_of_see see)^"\n")
l
)
)^"\n"
let string_of_return_opt return_opt =
match return_opt with
None -> ""
| Some s -> Odoc_messages.returns^" "^(string_of_text s)^"\n"
let string_of_info i =
let module M = Odoc_types in
(match i.M.i_deprecated with
None -> ""
| Some d -> Odoc_messages.deprecated^"! "^(string_of_text d)^"\n")^
(match i.M.i_desc with
None -> ""
| Some d when d = [Odoc_types.Raw ""] -> ""
| Some d -> (string_of_text d)^"\n"
)^
(string_of_author_list i.M.i_authors)^
(string_of_version_opt i.M.i_version)^
(string_of_since_opt i.M.i_since)^
(string_of_raised_exceptions i.M.i_raised_exceptions)^
(string_of_return_opt i.M.i_return_value)
let apply_opt f v_opt =
match v_opt with
None -> None
| Some v -> Some (f v)
let string_of_date ?(absolute=false) ?(hour=true) d =
let add_0 s = if String.length s < 2 then "0"^s else s in
let t = (if absolute then Unix.gmtime else Unix.localtime) d in
(string_of_int (t.Unix.tm_year + 1900))^"-"^
(add_0 (string_of_int (t.Unix.tm_mon + 1)))^"-"^
(add_0 (string_of_int t.Unix.tm_mday))^
(
if hour then
" "^
(add_0 (string_of_int t.Unix.tm_hour))^":"^
(add_0 (string_of_int t.Unix.tm_min))
else
""
)
let current_date =
let time =
try
float_of_string (Sys.getenv "SOURCE_DATE_EPOCH")
with
Not_found -> Unix.time ()
in string_of_date ~absolute: true ~hour: false time
let rec text_list_concat sep l =
match l with
[] -> []
| [t] -> t
| t :: q ->
t @ (sep :: (text_list_concat sep q))
let rec text_no_title_no_list t =
let iter t_ele =
match t_ele with
| Odoc_types.Title (_,_,t) -> text_no_title_no_list t
| Odoc_types.List l
| Odoc_types.Enum l ->
(Odoc_types.Raw " ") ::
(text_list_concat
(Odoc_types.Raw ", ")
(List.map text_no_title_no_list l))
| Odoc_types.Raw _
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _
| Odoc_types.Ref _
| Odoc_types.Target _ -> [t_ele]
| Odoc_types.Newline -> [Odoc_types.Newline]
| Odoc_types.Block t -> [Odoc_types.Block (text_no_title_no_list t)]
| Odoc_types.Bold t -> [Odoc_types.Bold (text_no_title_no_list t)]
| Odoc_types.Italic t -> [Odoc_types.Italic (text_no_title_no_list t)]
| Odoc_types.Center t -> [Odoc_types.Center (text_no_title_no_list t)]
| Odoc_types.Left t -> [Odoc_types.Left (text_no_title_no_list t)]
| Odoc_types.Right t -> [Odoc_types.Right (text_no_title_no_list t)]
| Odoc_types.Emphasize t -> [Odoc_types.Emphasize (text_no_title_no_list t)]
| Odoc_types.Latex s -> [Odoc_types.Latex s]
| Odoc_types.Link (s, t) -> [Odoc_types.Link (s, (text_no_title_no_list t))]
| Odoc_types.Superscript t -> [Odoc_types.Superscript (text_no_title_no_list t)]
| Odoc_types.Subscript t -> [Odoc_types.Subscript (text_no_title_no_list t)]
| Odoc_types.Module_list l ->
list_concat (Odoc_types.Raw ", ")
(List.map
(fun s -> Odoc_types.Ref (s, Some Odoc_types.RK_module, None))
l
)
| Odoc_types.Index_list -> []
| Odoc_types.Custom (s,t) -> [Odoc_types.Custom (s, text_no_title_no_list t)]
in
List.flatten (List.map iter t)
let get_titles_in_text t =
let l = ref [] in
let rec iter_ele ele =
match ele with
| Odoc_types.Title (n,lopt,t) -> l := (n,lopt,t) :: !l
| Odoc_types.List l
| Odoc_types.Enum l -> List.iter iter_text l
| Odoc_types.Raw _
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _
| Odoc_types.Ref _ -> ()
| Odoc_types.Newline -> ()
| Odoc_types.Block t
| Odoc_types.Bold t
| Odoc_types.Italic t
| Odoc_types.Center t
| Odoc_types.Left t
| Odoc_types.Right t
| Odoc_types.Emphasize t -> iter_text t
| Odoc_types.Latex _ -> ()
| Odoc_types.Link (_, t)
| Odoc_types.Superscript t
| Odoc_types.Subscript t -> iter_text t
| Odoc_types.Module_list _ -> ()
| Odoc_types.Index_list -> ()
| Odoc_types.Custom (_, t) -> iter_text t
| Odoc_types.Target _ -> ()
and iter_text txt =
List.iter iter_ele txt
in
iter_text t;
List.rev !l
let text_concat (sep : Odoc_types.text) l =
let rec iter = function
[] -> []
| [last] -> last
| h :: q -> h @ sep @ (iter q)
in
iter l
(*********************************************************)
let rec get_before_dot s =
try
let len = String.length s in
let n = String.index s '.' in
if n + 1 >= len then
(* The dot is the last character *)
(true, s, "")
else
match s.[n+1] with
' ' | '\n' | '\r' | '\t' ->
(true, String.sub s 0 (n+1),
String.sub s (n+1) (len - n - 1))
| _ ->
let b, s2, s_after = get_before_dot (String.sub s (n + 1) (len - n - 1)) in
(b, (String.sub s 0 (n+1))^s2, s_after)
with
Not_found -> (false, s, "")
let rec first_sentence_text t =
match t with
[] -> (false, [], [])
| ele :: q ->
let (stop, ele2, ele3_opt) = first_sentence_text_ele ele in
if stop then
(stop, [ele2],
match ele3_opt with None -> q | Some e -> e :: q)
else
let (stop2, q2, rest) = first_sentence_text q in
(stop2, ele2 :: q2, rest)
and first_sentence_text_ele text_ele =
match text_ele with
| Odoc_types.Raw s ->
let b, s2, s_after = get_before_dot s in
(b, Odoc_types.Raw s2, Some (Odoc_types.Raw s_after))
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _ -> (false, text_ele, None)
| Odoc_types.Bold t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Bold t2, Some (Odoc_types.Bold t3))
| Odoc_types.Italic t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Italic t2, Some (Odoc_types.Italic t3))
| Odoc_types.Center t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Center t2, Some (Odoc_types.Center t3))
| Odoc_types.Left t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Left t2, Some (Odoc_types.Left t3))
| Odoc_types.Right t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Right t2, Some (Odoc_types.Right t3))
| Odoc_types.Emphasize t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Emphasize t2, Some (Odoc_types.Emphasize t3))
| Odoc_types.Block t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Block t2, Some (Odoc_types.Block t3))
| Odoc_types.Title (n, l_opt, t) ->
let (b, t2, t3) = first_sentence_text t in
(b,
Odoc_types.Title (n, l_opt, t2),
Some (Odoc_types.Title (n, l_opt, t3)))
| Odoc_types.Newline ->
(true, Odoc_types.Raw "", Some Odoc_types.Newline)
| Odoc_types.List _
| Odoc_types.Enum _
| Odoc_types.Latex _
| Odoc_types.Link _
| Odoc_types.Ref _
| Odoc_types.Superscript _
| Odoc_types.Subscript _
| Odoc_types.Module_list _
| Odoc_types.Index_list -> (false, text_ele, None)
| Odoc_types.Custom _
| Odoc_types.Target _ -> (false, text_ele, None)
let first_sentence_of_text t =
let (_,t2,_) = first_sentence_text t in
t2
let first_sentence_and_rest_of_text t =
let (_,t1, t2) = first_sentence_text t in
(t1, t2)
let remove_ending_newline s =
let len = String.length s in
if len <= 0 then
s
else
match s.[len-1] with
'\n' -> String.sub s 0 (len-1)
| _ -> s
let search_string_backward ~pat =
let lenp = String.length pat in
let rec iter s =
let len = String.length s in
match compare len lenp with
-1 -> raise Not_found
| 0 -> if pat = s then 0 else raise Not_found
| _ ->
let pos = len - lenp in
let s2 = String.sub s pos lenp in
if s2 = pat then
pos
else
iter (String.sub s 0 pos)
in
fun ~s -> iter s
(*********************************************************)
let create_index_lists elements string_of_ele =
let rec f current acc0 acc1 acc2 = function
[] -> (acc0 :: acc1) @ [acc2]
| ele :: q ->
let s = string_of_ele ele in
match s with
"" -> f current acc0 acc1 (acc2 @ [ele]) q
| _ ->
let first = Char.uppercase_ascii s.[0] in
match first with
'A' .. 'Z' ->
if current = first then
f current acc0 acc1 (acc2 @ [ele]) q
else
f first acc0 (acc1 @ [acc2]) [ele] q
| _ ->
f current (acc0 @ [ele]) acc1 acc2 q
in
f '_' [] [] [] elements
(*** for labels *)
let is_optional = Btype.is_optional
let label_name = Btype.label_name
let remove_option typ =
let rec iter t =
match t with
| Types.Tconstr(path, [ty], _) when Path.same path Predef.path_option -> ty.Types.desc
| Types.Tconstr _
| Types.Tvar _
| Types.Tunivar _
| Types.Tpoly _
| Types.Tarrow _
| Types.Ttuple _
| Types.Tobject _
| Types.Tfield _
| Types.Tnil
| Types.Tvariant _
| Types.Tpackage _ -> t
| Types.Tlink t2
| Types.Tsubst t2 -> iter t2.Types.desc
in
{ typ with Types.desc = iter typ.Types.desc }
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/ocamldoc/odoc_misc.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
*******************************************************
The dot is the last character
*******************************************************
** for labels | , projet Cristal , INRIA Rocquencourt
Copyright 2001 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
let no_blanks s =
let len = String.length s in
let buf = Buffer.create len in
for i = 0 to len - 1 do
match s.[i] with
' ' | '\n' | '\t' | '\r' -> ()
| c -> Buffer.add_char buf c
done;
Buffer.contents buf
let input_file_as_string nom =
let chanin = open_in_bin nom in
let len = 1024 in
let s = Bytes.create len in
let buf = Buffer.create len in
let rec iter () =
try
let n = input chanin s 0 len in
if n = 0 then
()
else
(
Buffer.add_subbytes buf s 0 n;
iter ()
)
with
End_of_file -> ()
in
iter ();
close_in chanin;
Buffer.contents buf
let split_string s chars =
let len = String.length s in
let rec iter acc pos =
if pos >= len then
match acc with
"" -> []
| _ -> [acc]
else
if List.mem s.[pos] chars then
match acc with
"" -> iter "" (pos + 1)
| _ -> acc :: (iter "" (pos + 1))
else
iter (Printf.sprintf "%s%c" acc s.[pos]) (pos + 1)
in
iter "" 0
let split_with_blanks s = split_string s [' ' ; '\n' ; '\r' ; '\t' ]
let list_concat sep =
let rec iter = function
[] -> []
| [h] -> [h]
| h :: q -> h :: sep :: iter q
in
iter
let rec string_of_longident li =
match li with
| Longident.Lident s -> s
| Longident.Ldot(li, s) -> string_of_longident li ^ "." ^ s
| Longident.Lapply(l1, l2) ->
string_of_longident l1 ^ "(" ^ string_of_longident l2 ^ ")"
let get_fields type_expr =
let (fields, _) = Ctype.flatten_fields (Ctype.object_fields type_expr) in
List.fold_left
(fun acc -> fun (label, field_kind, typ) ->
match field_kind with
Types.Fabsent ->
acc
| _ ->
if label = "*dummy method*" then
acc
else
acc @ [label, typ]
)
[]
fields
let rec string_of_text t =
let rec iter t_ele =
match t_ele with
| Odoc_types.Raw s
| Odoc_types.Code s
| Odoc_types.CodePre s
| Odoc_types.Verbatim s -> s
| Odoc_types.Bold t
| Odoc_types.Italic t
| Odoc_types.Center t
| Odoc_types.Left t
| Odoc_types.Right t
| Odoc_types.Emphasize t -> string_of_text t
| Odoc_types.List l ->
(String.concat ""
(List.map (fun t -> "\n- "^(string_of_text t)) l))^
"\n"
| Odoc_types.Enum l ->
let rec f n = function
[] -> "\n"
| t :: q ->
"\n"^(string_of_int n)^". "^(string_of_text t)^
(f (n + 1) q)
in
f 1 l
| Odoc_types.Newline -> "\n"
| Odoc_types.Block t -> "\t"^(string_of_text t)^"\n"
| Odoc_types.Title (_, _, t) -> "\n"^(string_of_text t)^"\n"
| Odoc_types.Latex s -> "{% "^s^" %}"
| Odoc_types.Link (s, t) ->
"["^s^"]"^(string_of_text t)
| Odoc_types.Ref (_name, _, Some text) ->
Printf.sprintf "[%s]" (string_of_text text)
| Odoc_types.Ref (name, _, None) ->
iter (Odoc_types.Code name)
| Odoc_types.Superscript t ->
"^{"^(string_of_text t)^"}"
| Odoc_types.Subscript t ->
"^{"^(string_of_text t)^"}"
| Odoc_types.Module_list l ->
string_of_text
(list_concat (Odoc_types.Raw ", ")
(List.map (fun s -> Odoc_types.Code s) l)
)
| Odoc_types.Index_list ->
""
| Odoc_types.Custom (_, t) -> string_of_text t
| Odoc_types.Target _ -> ""
in
String.concat "" (List.map iter t)
let string_of_author_list l =
match l with
[] ->
""
| _ ->
"* "^Odoc_messages.authors^":\n"^
(String.concat ", " l)^
"\n"
let string_of_version_opt v_opt =
match v_opt with
None -> ""
| Some v -> Odoc_messages.version^": "^v^"\n"
let string_of_since_opt s_opt =
match s_opt with
None -> ""
| Some s -> Odoc_messages.since^" "^s^"\n"
let string_of_raised_exceptions l =
match l with
[] -> ""
| (s, t) :: [] -> Odoc_messages.raises^" "^s^" "^(string_of_text t)^"\n"
| _ ->
Odoc_messages.raises^"\n"^
(String.concat ""
(List.map
(fun (ex, desc) -> "- "^ex^" "^(string_of_text desc)^"\n")
l
)
)^"\n"
let string_of_see (see_ref, t) =
let t_ref =
match see_ref with
Odoc_types.See_url s -> [ Odoc_types.Link (s, t) ]
| Odoc_types.See_file s -> (Odoc_types.Code s) :: (Odoc_types.Raw " ") :: t
| Odoc_types.See_doc s -> (Odoc_types.Italic [Odoc_types.Raw s]) :: (Odoc_types.Raw " ") :: t
in
string_of_text t_ref
let string_of_sees l =
match l with
[] -> ""
| see :: [] -> Odoc_messages.see_also^" "^(string_of_see see)^" \n"
| _ ->
Odoc_messages.see_also^"\n"^
(String.concat ""
(List.map
(fun see -> "- "^(string_of_see see)^"\n")
l
)
)^"\n"
let string_of_return_opt return_opt =
match return_opt with
None -> ""
| Some s -> Odoc_messages.returns^" "^(string_of_text s)^"\n"
let string_of_info i =
let module M = Odoc_types in
(match i.M.i_deprecated with
None -> ""
| Some d -> Odoc_messages.deprecated^"! "^(string_of_text d)^"\n")^
(match i.M.i_desc with
None -> ""
| Some d when d = [Odoc_types.Raw ""] -> ""
| Some d -> (string_of_text d)^"\n"
)^
(string_of_author_list i.M.i_authors)^
(string_of_version_opt i.M.i_version)^
(string_of_since_opt i.M.i_since)^
(string_of_raised_exceptions i.M.i_raised_exceptions)^
(string_of_return_opt i.M.i_return_value)
let apply_opt f v_opt =
match v_opt with
None -> None
| Some v -> Some (f v)
let string_of_date ?(absolute=false) ?(hour=true) d =
let add_0 s = if String.length s < 2 then "0"^s else s in
let t = (if absolute then Unix.gmtime else Unix.localtime) d in
(string_of_int (t.Unix.tm_year + 1900))^"-"^
(add_0 (string_of_int (t.Unix.tm_mon + 1)))^"-"^
(add_0 (string_of_int t.Unix.tm_mday))^
(
if hour then
" "^
(add_0 (string_of_int t.Unix.tm_hour))^":"^
(add_0 (string_of_int t.Unix.tm_min))
else
""
)
let current_date =
let time =
try
float_of_string (Sys.getenv "SOURCE_DATE_EPOCH")
with
Not_found -> Unix.time ()
in string_of_date ~absolute: true ~hour: false time
let rec text_list_concat sep l =
match l with
[] -> []
| [t] -> t
| t :: q ->
t @ (sep :: (text_list_concat sep q))
let rec text_no_title_no_list t =
let iter t_ele =
match t_ele with
| Odoc_types.Title (_,_,t) -> text_no_title_no_list t
| Odoc_types.List l
| Odoc_types.Enum l ->
(Odoc_types.Raw " ") ::
(text_list_concat
(Odoc_types.Raw ", ")
(List.map text_no_title_no_list l))
| Odoc_types.Raw _
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _
| Odoc_types.Ref _
| Odoc_types.Target _ -> [t_ele]
| Odoc_types.Newline -> [Odoc_types.Newline]
| Odoc_types.Block t -> [Odoc_types.Block (text_no_title_no_list t)]
| Odoc_types.Bold t -> [Odoc_types.Bold (text_no_title_no_list t)]
| Odoc_types.Italic t -> [Odoc_types.Italic (text_no_title_no_list t)]
| Odoc_types.Center t -> [Odoc_types.Center (text_no_title_no_list t)]
| Odoc_types.Left t -> [Odoc_types.Left (text_no_title_no_list t)]
| Odoc_types.Right t -> [Odoc_types.Right (text_no_title_no_list t)]
| Odoc_types.Emphasize t -> [Odoc_types.Emphasize (text_no_title_no_list t)]
| Odoc_types.Latex s -> [Odoc_types.Latex s]
| Odoc_types.Link (s, t) -> [Odoc_types.Link (s, (text_no_title_no_list t))]
| Odoc_types.Superscript t -> [Odoc_types.Superscript (text_no_title_no_list t)]
| Odoc_types.Subscript t -> [Odoc_types.Subscript (text_no_title_no_list t)]
| Odoc_types.Module_list l ->
list_concat (Odoc_types.Raw ", ")
(List.map
(fun s -> Odoc_types.Ref (s, Some Odoc_types.RK_module, None))
l
)
| Odoc_types.Index_list -> []
| Odoc_types.Custom (s,t) -> [Odoc_types.Custom (s, text_no_title_no_list t)]
in
List.flatten (List.map iter t)
let get_titles_in_text t =
let l = ref [] in
let rec iter_ele ele =
match ele with
| Odoc_types.Title (n,lopt,t) -> l := (n,lopt,t) :: !l
| Odoc_types.List l
| Odoc_types.Enum l -> List.iter iter_text l
| Odoc_types.Raw _
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _
| Odoc_types.Ref _ -> ()
| Odoc_types.Newline -> ()
| Odoc_types.Block t
| Odoc_types.Bold t
| Odoc_types.Italic t
| Odoc_types.Center t
| Odoc_types.Left t
| Odoc_types.Right t
| Odoc_types.Emphasize t -> iter_text t
| Odoc_types.Latex _ -> ()
| Odoc_types.Link (_, t)
| Odoc_types.Superscript t
| Odoc_types.Subscript t -> iter_text t
| Odoc_types.Module_list _ -> ()
| Odoc_types.Index_list -> ()
| Odoc_types.Custom (_, t) -> iter_text t
| Odoc_types.Target _ -> ()
and iter_text txt =
List.iter iter_ele txt
in
iter_text t;
List.rev !l
let text_concat (sep : Odoc_types.text) l =
let rec iter = function
[] -> []
| [last] -> last
| h :: q -> h @ sep @ (iter q)
in
iter l
let rec get_before_dot s =
try
let len = String.length s in
let n = String.index s '.' in
if n + 1 >= len then
(true, s, "")
else
match s.[n+1] with
' ' | '\n' | '\r' | '\t' ->
(true, String.sub s 0 (n+1),
String.sub s (n+1) (len - n - 1))
| _ ->
let b, s2, s_after = get_before_dot (String.sub s (n + 1) (len - n - 1)) in
(b, (String.sub s 0 (n+1))^s2, s_after)
with
Not_found -> (false, s, "")
let rec first_sentence_text t =
match t with
[] -> (false, [], [])
| ele :: q ->
let (stop, ele2, ele3_opt) = first_sentence_text_ele ele in
if stop then
(stop, [ele2],
match ele3_opt with None -> q | Some e -> e :: q)
else
let (stop2, q2, rest) = first_sentence_text q in
(stop2, ele2 :: q2, rest)
and first_sentence_text_ele text_ele =
match text_ele with
| Odoc_types.Raw s ->
let b, s2, s_after = get_before_dot s in
(b, Odoc_types.Raw s2, Some (Odoc_types.Raw s_after))
| Odoc_types.Code _
| Odoc_types.CodePre _
| Odoc_types.Verbatim _ -> (false, text_ele, None)
| Odoc_types.Bold t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Bold t2, Some (Odoc_types.Bold t3))
| Odoc_types.Italic t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Italic t2, Some (Odoc_types.Italic t3))
| Odoc_types.Center t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Center t2, Some (Odoc_types.Center t3))
| Odoc_types.Left t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Left t2, Some (Odoc_types.Left t3))
| Odoc_types.Right t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Right t2, Some (Odoc_types.Right t3))
| Odoc_types.Emphasize t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Emphasize t2, Some (Odoc_types.Emphasize t3))
| Odoc_types.Block t ->
let (b, t2, t3) = first_sentence_text t in
(b, Odoc_types.Block t2, Some (Odoc_types.Block t3))
| Odoc_types.Title (n, l_opt, t) ->
let (b, t2, t3) = first_sentence_text t in
(b,
Odoc_types.Title (n, l_opt, t2),
Some (Odoc_types.Title (n, l_opt, t3)))
| Odoc_types.Newline ->
(true, Odoc_types.Raw "", Some Odoc_types.Newline)
| Odoc_types.List _
| Odoc_types.Enum _
| Odoc_types.Latex _
| Odoc_types.Link _
| Odoc_types.Ref _
| Odoc_types.Superscript _
| Odoc_types.Subscript _
| Odoc_types.Module_list _
| Odoc_types.Index_list -> (false, text_ele, None)
| Odoc_types.Custom _
| Odoc_types.Target _ -> (false, text_ele, None)
let first_sentence_of_text t =
let (_,t2,_) = first_sentence_text t in
t2
let first_sentence_and_rest_of_text t =
let (_,t1, t2) = first_sentence_text t in
(t1, t2)
let remove_ending_newline s =
let len = String.length s in
if len <= 0 then
s
else
match s.[len-1] with
'\n' -> String.sub s 0 (len-1)
| _ -> s
let search_string_backward ~pat =
let lenp = String.length pat in
let rec iter s =
let len = String.length s in
match compare len lenp with
-1 -> raise Not_found
| 0 -> if pat = s then 0 else raise Not_found
| _ ->
let pos = len - lenp in
let s2 = String.sub s pos lenp in
if s2 = pat then
pos
else
iter (String.sub s 0 pos)
in
fun ~s -> iter s
let create_index_lists elements string_of_ele =
let rec f current acc0 acc1 acc2 = function
[] -> (acc0 :: acc1) @ [acc2]
| ele :: q ->
let s = string_of_ele ele in
match s with
"" -> f current acc0 acc1 (acc2 @ [ele]) q
| _ ->
let first = Char.uppercase_ascii s.[0] in
match first with
'A' .. 'Z' ->
if current = first then
f current acc0 acc1 (acc2 @ [ele]) q
else
f first acc0 (acc1 @ [acc2]) [ele] q
| _ ->
f current (acc0 @ [ele]) acc1 acc2 q
in
f '_' [] [] [] elements
let is_optional = Btype.is_optional
let label_name = Btype.label_name
let remove_option typ =
let rec iter t =
match t with
| Types.Tconstr(path, [ty], _) when Path.same path Predef.path_option -> ty.Types.desc
| Types.Tconstr _
| Types.Tvar _
| Types.Tunivar _
| Types.Tpoly _
| Types.Tarrow _
| Types.Ttuple _
| Types.Tobject _
| Types.Tfield _
| Types.Tnil
| Types.Tvariant _
| Types.Tpackage _ -> t
| Types.Tlink t2
| Types.Tsubst t2 -> iter t2.Types.desc
in
{ typ with Types.desc = iter typ.Types.desc }
|
ce4e7e860f90184cde9fc533b4809cbbed6e2de2adbf6f7461fa6aa0445b5bdb | geophf/1HaskellADay | Solution.hs | {-# LANGUAGE OverloadedStrings #-}
module Y2018.M07.D10.Solution where
-
So , yesterday we explored JSON structure
( EVERYTHING IS A MAP ! ... EXCEPT WHAT ISN'T , BUT OKAY ! )
Today we 're going to explore TWO ( much smaller ) JSON structures and transform
one to another .
Because , like XML , JSON is all about transformation , !
-
So, yesterday we explored JSON structure
(EVERYTHING IS A MAP! ... EXCEPT WHAT ISN'T, BUT OKAY!)
Today we're going to explore TWO (much smaller) JSON structures and transform
one to another.
Because, like XML, JSON is all about transformation, baybee!
--}
import Data.Aeson
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Foldable (toList)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Scientific (toRealFloat)
import Data.Time
-- the input JSON (being output from an analysis tool)
exDir, input :: FilePath
exDir = "Y2018/M07/D10/"
input = "output.json"
-- yeah, the input is the output of the analysis tool. Deal.
{--
The input is in the following format:
Map EntityName { wiki_info: Wiki, scores: [related articles], queryEnt: Int }
where:
--}
data Wiki = Wikt { wname, wtitle, wurl :: String,
wtext, wsum, wimg :: Maybe String }
deriving Show
instance FromJSON Wiki where
parseJSON (Object o) =
Wikt <$> o .: "Entity" <*> o .: "Page_title" <*> o .: "WikiURL"
<*> o .: "Full_text" <*> o .: "WikiSummary" <*> o .: "WikiImg"
type EntityName = String
type Input = Map EntityName Analysis
-- and out Analysis is a composition of the wiki info, scores, and query
data Analysis = Ysis { wikt :: Wiki, scores :: [Value], query :: Double }
deriving Show
-
instance FromJSON Analysis where
parseJSON ( Object o ) =
< $ > o . : " wiki_info " < * > o . : " scores " < * > o . : " query_entity_score "
-- So now we can parse our input JSON as a map
readInputJSON : : FilePath - > IO Input
readInputJSON = fmap ( fromJust . decode ) . BL.readFile
Now , here 's a thing :
" big brother " : {
" scores " : " " ,
" query_entity_score " : " "
} ,
wut . So much for well - structured JSON . How do we deal with this ? I do n't know .
I think what we have to do is to stage the parsing into ProtoAnalysis then
convert ProtoAnalysis to Analysis iff it has wiki_info . Let 's try that .
-
instance FromJSON Analysis where
parseJSON (Object o) =
Ysis <$> o .: "wiki_info" <*> o .: "scores" <*> o .: "query_entity_score"
-- So now we can parse our input JSON as a map
readInputJSON :: FilePath -> IO Input
readInputJSON = fmap (fromJust . decode) . BL.readFile
Now, here's a thing:
"big brother": {
"scores": "",
"query_entity_score": ""
},
wut. So much for well-structured JSON. How do we deal with this? I don't know.
I think what we have to do is to stage the parsing into ProtoAnalysis then
convert ProtoAnalysis to Analysis iff it has wiki_info. Let's try that.
--}
data ProtoAnalysis = PA { paWik :: Maybe Wiki, paScores, paQuery :: Value }
deriving Show
instance FromJSON ProtoAnalysis where
parseJSON (Object o) =
PA <$> o .:? "wiki_info" <*> o .: "scores" <*> o .: "query_entity_score"
readProto :: FilePath -> IO (Map EntityName ProtoAnalysis)
readProto = fmap (fromJust . decode) . BL.readFile
That worked . Now we convert a Proto to Analysis
proto2analysis :: ProtoAnalysis -> Maybe Analysis
proto2analysis (PA Nothing _ _) = Nothing
proto2analysis (PA (Just wikt) (Array arr) (Number n)) =
Just (Ysis wikt (toList arr) (toRealFloat n))
-- then we sequence the result
readInputJSON :: FilePath -> IO Input
readInputJSON = fmap (Map.mapMaybe proto2analysis) . readProto
-
> > > ( exDir + + input )
> > > take 4 ( Map.keys jasn )
[ " arlington","arlington national cemetery","chuck prichard","danny russel " ]
> > > length jasn
26
-
>>> jasn <- readInputJSON (exDir ++ input)
>>> take 4 (Map.keys jasn)
["arlington","arlington national cemetery","chuck prichard","danny russel"]
>>> length jasn
26
--}
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2018/M07/D10/Solution.hs | haskell | # LANGUAGE OverloadedStrings #
}
the input JSON (being output from an analysis tool)
yeah, the input is the output of the analysis tool. Deal.
-
The input is in the following format:
Map EntityName { wiki_info: Wiki, scores: [related articles], queryEnt: Int }
where:
-
and out Analysis is a composition of the wiki info, scores, and query
So now we can parse our input JSON as a map
So now we can parse our input JSON as a map
}
then we sequence the result
} |
module Y2018.M07.D10.Solution where
-
So , yesterday we explored JSON structure
( EVERYTHING IS A MAP ! ... EXCEPT WHAT ISN'T , BUT OKAY ! )
Today we 're going to explore TWO ( much smaller ) JSON structures and transform
one to another .
Because , like XML , JSON is all about transformation , !
-
So, yesterday we explored JSON structure
(EVERYTHING IS A MAP! ... EXCEPT WHAT ISN'T, BUT OKAY!)
Today we're going to explore TWO (much smaller) JSON structures and transform
one to another.
Because, like XML, JSON is all about transformation, baybee!
import Data.Aeson
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Foldable (toList)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Scientific (toRealFloat)
import Data.Time
exDir, input :: FilePath
exDir = "Y2018/M07/D10/"
input = "output.json"
data Wiki = Wikt { wname, wtitle, wurl :: String,
wtext, wsum, wimg :: Maybe String }
deriving Show
instance FromJSON Wiki where
parseJSON (Object o) =
Wikt <$> o .: "Entity" <*> o .: "Page_title" <*> o .: "WikiURL"
<*> o .: "Full_text" <*> o .: "WikiSummary" <*> o .: "WikiImg"
type EntityName = String
type Input = Map EntityName Analysis
data Analysis = Ysis { wikt :: Wiki, scores :: [Value], query :: Double }
deriving Show
-
instance FromJSON Analysis where
parseJSON ( Object o ) =
< $ > o . : " wiki_info " < * > o . : " scores " < * > o . : " query_entity_score "
readInputJSON : : FilePath - > IO Input
readInputJSON = fmap ( fromJust . decode ) . BL.readFile
Now , here 's a thing :
" big brother " : {
" scores " : " " ,
" query_entity_score " : " "
} ,
wut . So much for well - structured JSON . How do we deal with this ? I do n't know .
I think what we have to do is to stage the parsing into ProtoAnalysis then
convert ProtoAnalysis to Analysis iff it has wiki_info . Let 's try that .
-
instance FromJSON Analysis where
parseJSON (Object o) =
Ysis <$> o .: "wiki_info" <*> o .: "scores" <*> o .: "query_entity_score"
readInputJSON :: FilePath -> IO Input
readInputJSON = fmap (fromJust . decode) . BL.readFile
Now, here's a thing:
"big brother": {
"scores": "",
"query_entity_score": ""
},
wut. So much for well-structured JSON. How do we deal with this? I don't know.
I think what we have to do is to stage the parsing into ProtoAnalysis then
convert ProtoAnalysis to Analysis iff it has wiki_info. Let's try that.
data ProtoAnalysis = PA { paWik :: Maybe Wiki, paScores, paQuery :: Value }
deriving Show
instance FromJSON ProtoAnalysis where
parseJSON (Object o) =
PA <$> o .:? "wiki_info" <*> o .: "scores" <*> o .: "query_entity_score"
readProto :: FilePath -> IO (Map EntityName ProtoAnalysis)
readProto = fmap (fromJust . decode) . BL.readFile
That worked . Now we convert a Proto to Analysis
proto2analysis :: ProtoAnalysis -> Maybe Analysis
proto2analysis (PA Nothing _ _) = Nothing
proto2analysis (PA (Just wikt) (Array arr) (Number n)) =
Just (Ysis wikt (toList arr) (toRealFloat n))
readInputJSON :: FilePath -> IO Input
readInputJSON = fmap (Map.mapMaybe proto2analysis) . readProto
-
> > > ( exDir + + input )
> > > take 4 ( Map.keys jasn )
[ " arlington","arlington national cemetery","chuck prichard","danny russel " ]
> > > length jasn
26
-
>>> jasn <- readInputJSON (exDir ++ input)
>>> take 4 (Map.keys jasn)
["arlington","arlington national cemetery","chuck prichard","danny russel"]
>>> length jasn
26
|
899396d261514088c0dd9610d46f380dbff4b3260d7024ff9d279faef84f9d3f | haskell/cabal | GenUtils.hs | # LANGUAGE DeriveFoldable #
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE FunctionalDependencies #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module GenUtils where
import Control.Lens (each, ix, (%~), (&))
import Data.Char (toUpper)
import Data.Maybe (fromMaybe)
import Data.Proxy (Proxy (..))
import Data.Text (Text)
import GHC.Generics (Generic)
import qualified Data.Algorithm.Diff as Diff
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Zinza as Z
-------------------------------------------------------------------------------
-- License List version
-------------------------------------------------------------------------------
| SPDX license list version
data SPDXLicenseListVersion
= SPDXLicenseListVersion_3_0
| SPDXLicenseListVersion_3_2
| SPDXLicenseListVersion_3_6
| SPDXLicenseListVersion_3_9
| SPDXLicenseListVersion_3_10
| SPDXLicenseListVersion_3_16
deriving (Eq, Ord, Show, Enum, Bounded)
allVers :: Set.Set SPDXLicenseListVersion
allVers = Set.fromList [minBound .. maxBound]
prettyVer :: SPDXLicenseListVersion -> Text
prettyVer SPDXLicenseListVersion_3_16 = "SPDX License List 3.16"
prettyVer SPDXLicenseListVersion_3_10 = "SPDX License List 3.10"
prettyVer SPDXLicenseListVersion_3_9 = "SPDX License List 3.9"
prettyVer SPDXLicenseListVersion_3_6 = "SPDX License List 3.6"
prettyVer SPDXLicenseListVersion_3_2 = "SPDX License List 3.2"
prettyVer SPDXLicenseListVersion_3_0 = "SPDX License List 3.0"
suffixVer :: SPDXLicenseListVersion -> String
suffixVer SPDXLicenseListVersion_3_16 = "_3_16"
suffixVer SPDXLicenseListVersion_3_10 = "_3_10"
suffixVer SPDXLicenseListVersion_3_9 = "_3_9"
suffixVer SPDXLicenseListVersion_3_6 = "_3_6"
suffixVer SPDXLicenseListVersion_3_2 = "_3_2"
suffixVer SPDXLicenseListVersion_3_0 = "_3_0"
-------------------------------------------------------------------------------
-- Per version
-------------------------------------------------------------------------------
data PerV a = PerV a a a a a a
deriving (Show, Functor, Foldable, Traversable)
class Functor f => Representable i f | f -> i where
index :: i -> f a -> a
tabulate :: (i -> a) -> f a
instance Representable SPDXLicenseListVersion PerV where
index SPDXLicenseListVersion_3_0 (PerV x _ _ _ _ _) = x
index SPDXLicenseListVersion_3_2 (PerV _ x _ _ _ _) = x
index SPDXLicenseListVersion_3_6 (PerV _ _ x _ _ _) = x
index SPDXLicenseListVersion_3_9 (PerV _ _ _ x _ _) = x
index SPDXLicenseListVersion_3_10 (PerV _ _ _ _ x _) = x
index SPDXLicenseListVersion_3_16 (PerV _ _ _ _ _ x) = x
tabulate f = PerV
(f SPDXLicenseListVersion_3_0)
(f SPDXLicenseListVersion_3_2)
(f SPDXLicenseListVersion_3_6)
(f SPDXLicenseListVersion_3_9)
(f SPDXLicenseListVersion_3_10)
(f SPDXLicenseListVersion_3_16)
-------------------------------------------------------------------------------
-- Sorting
-------------------------------------------------------------------------------
newtype OrdT = OrdT Text deriving (Eq)
instance Ord OrdT where
compare (OrdT a) (OrdT b)
| a == b = EQ
| a `T.isPrefixOf` b = GT
| b `T.isPrefixOf` a = LT
| otherwise = compare a b
-------------------------------------------------------------------------------
Commons
-------------------------------------------------------------------------------
header :: String
header = "-- This file is generated. See Makefile's spdx rule"
-------------------------------------------------------------------------------
-- Tools
-------------------------------------------------------------------------------
combine
:: forall a b tag. (Ord b, Ord tag, Enum tag, Bounded tag)
=> (a -> b)
-> (tag -> [a])
-> [(a, Set.Set tag)]
combine f t
= map addTags
$ foldr process [] [ minBound .. maxBound ]
where
unDiff :: Diff.Diff a -> a
unDiff (Diff.First a) = a
unDiff (Diff.Second a) = a
unDiff (Diff.Both _ a) = a -- important we prefer latter versions!
addTags :: a -> (a, Set.Set tag)
addTags a = (a, fromMaybe Set.empty (Map.lookup (f a) tags))
process :: tag -> [a] -> [a]
process tag as = map unDiff $ Diff.getDiffBy (\x y -> f x == f y) (t tag) as
tags :: Map.Map b (Set.Set tag)
tags = Map.fromListWith Set.union
[ (f a, Set.singleton tag)
| tag <- [ minBound .. maxBound ]
, a <- t tag
]
ordNubOn :: Ord b => (a -> b) -> [a] -> [a]
ordNubOn f = go Set.empty where
go _ [] = []
go past (a:as)
| b `Set.member` past = go past as
| otherwise = a : go (Set.insert b past) as
where
b = f a
textShow :: Text -> Text
textShow = T.pack . show
toConstructorName :: Text -> Text
toConstructorName t = t
& each %~ f
& ix 0 %~ toUpper
& special
where
f '.' = '_'
f '-' = '_'
f '+' = '\''
f c = c
special :: Text -> Text
special "0BSD" = "NullBSD"
special "389_exception" = "DS389_exception"
special u = u
mkList :: [Text] -> Text
mkList [] = " []"
mkList (x:xs) =
" [ " <> x <> "\n"
<> foldMap (\x' -> " , " <> x' <> "\n") xs
<> " ]"
-------------------------------------------------------------------------------
Zinza inputs
-------------------------------------------------------------------------------
data Input = Input
{ inputLicenseIds :: Text
, inputLicenses :: [InputLicense]
, inputLicenseList_all :: Text
, inputLicenseList_perv :: PerV Text
}
deriving (Show, Generic)
instance Z.Zinza Input where
toType = Z.genericToTypeSFP
toValue = Z.genericToValueSFP
fromValue = Z.genericFromValueSFP
data InputLicense = InputLicense
{ ilConstructor :: Text
, ilId :: Text
, ilName :: Text
, ilIsOsiApproved :: Bool
, ilIsFsfLibre :: Bool
}
deriving (Show, Generic)
instance Z.Zinza InputLicense where
toType = Z.genericToTypeSFP
toValue = Z.genericToValueSFP
fromValue = Z.genericFromValueSFP
instance Z.Zinza a => Z.Zinza (PerV a) where
toType _ = Z.TyRecord $ Map.fromList
[ ("v" ++ suffixVer v, ("index " ++ show v, Z.toType (Proxy :: Proxy a)))
| v <- [ minBound .. maxBound ]
]
toValue x = Z.VRecord $ Map.fromList
[ ("v" ++ suffixVer v, Z.toValue (index v x))
| v <- [ minBound .. maxBound ]
]
fromValue = error "fromExpr @PerV not implemented"
| null | https://raw.githubusercontent.com/haskell/cabal/c21dbcd2a9d54962eb39f598dfce2d012ff7fd1c/cabal-dev-scripts/src/GenUtils.hs | haskell | # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveGeneric #
# LANGUAGE DeriveTraversable #
# LANGUAGE OverloadedStrings #
# LANGUAGE ScopedTypeVariables #
-----------------------------------------------------------------------------
License List version
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Per version
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Sorting
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Tools
-----------------------------------------------------------------------------
important we prefer latter versions!
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | # LANGUAGE DeriveFoldable #
# LANGUAGE FunctionalDependencies #
module GenUtils where
import Control.Lens (each, ix, (%~), (&))
import Data.Char (toUpper)
import Data.Maybe (fromMaybe)
import Data.Proxy (Proxy (..))
import Data.Text (Text)
import GHC.Generics (Generic)
import qualified Data.Algorithm.Diff as Diff
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Zinza as Z
| SPDX license list version
data SPDXLicenseListVersion
= SPDXLicenseListVersion_3_0
| SPDXLicenseListVersion_3_2
| SPDXLicenseListVersion_3_6
| SPDXLicenseListVersion_3_9
| SPDXLicenseListVersion_3_10
| SPDXLicenseListVersion_3_16
deriving (Eq, Ord, Show, Enum, Bounded)
allVers :: Set.Set SPDXLicenseListVersion
allVers = Set.fromList [minBound .. maxBound]
prettyVer :: SPDXLicenseListVersion -> Text
prettyVer SPDXLicenseListVersion_3_16 = "SPDX License List 3.16"
prettyVer SPDXLicenseListVersion_3_10 = "SPDX License List 3.10"
prettyVer SPDXLicenseListVersion_3_9 = "SPDX License List 3.9"
prettyVer SPDXLicenseListVersion_3_6 = "SPDX License List 3.6"
prettyVer SPDXLicenseListVersion_3_2 = "SPDX License List 3.2"
prettyVer SPDXLicenseListVersion_3_0 = "SPDX License List 3.0"
suffixVer :: SPDXLicenseListVersion -> String
suffixVer SPDXLicenseListVersion_3_16 = "_3_16"
suffixVer SPDXLicenseListVersion_3_10 = "_3_10"
suffixVer SPDXLicenseListVersion_3_9 = "_3_9"
suffixVer SPDXLicenseListVersion_3_6 = "_3_6"
suffixVer SPDXLicenseListVersion_3_2 = "_3_2"
suffixVer SPDXLicenseListVersion_3_0 = "_3_0"
data PerV a = PerV a a a a a a
deriving (Show, Functor, Foldable, Traversable)
class Functor f => Representable i f | f -> i where
index :: i -> f a -> a
tabulate :: (i -> a) -> f a
instance Representable SPDXLicenseListVersion PerV where
index SPDXLicenseListVersion_3_0 (PerV x _ _ _ _ _) = x
index SPDXLicenseListVersion_3_2 (PerV _ x _ _ _ _) = x
index SPDXLicenseListVersion_3_6 (PerV _ _ x _ _ _) = x
index SPDXLicenseListVersion_3_9 (PerV _ _ _ x _ _) = x
index SPDXLicenseListVersion_3_10 (PerV _ _ _ _ x _) = x
index SPDXLicenseListVersion_3_16 (PerV _ _ _ _ _ x) = x
tabulate f = PerV
(f SPDXLicenseListVersion_3_0)
(f SPDXLicenseListVersion_3_2)
(f SPDXLicenseListVersion_3_6)
(f SPDXLicenseListVersion_3_9)
(f SPDXLicenseListVersion_3_10)
(f SPDXLicenseListVersion_3_16)
newtype OrdT = OrdT Text deriving (Eq)
instance Ord OrdT where
compare (OrdT a) (OrdT b)
| a == b = EQ
| a `T.isPrefixOf` b = GT
| b `T.isPrefixOf` a = LT
| otherwise = compare a b
Commons
header :: String
header = "-- This file is generated. See Makefile's spdx rule"
combine
:: forall a b tag. (Ord b, Ord tag, Enum tag, Bounded tag)
=> (a -> b)
-> (tag -> [a])
-> [(a, Set.Set tag)]
combine f t
= map addTags
$ foldr process [] [ minBound .. maxBound ]
where
unDiff :: Diff.Diff a -> a
unDiff (Diff.First a) = a
unDiff (Diff.Second a) = a
addTags :: a -> (a, Set.Set tag)
addTags a = (a, fromMaybe Set.empty (Map.lookup (f a) tags))
process :: tag -> [a] -> [a]
process tag as = map unDiff $ Diff.getDiffBy (\x y -> f x == f y) (t tag) as
tags :: Map.Map b (Set.Set tag)
tags = Map.fromListWith Set.union
[ (f a, Set.singleton tag)
| tag <- [ minBound .. maxBound ]
, a <- t tag
]
ordNubOn :: Ord b => (a -> b) -> [a] -> [a]
ordNubOn f = go Set.empty where
go _ [] = []
go past (a:as)
| b `Set.member` past = go past as
| otherwise = a : go (Set.insert b past) as
where
b = f a
textShow :: Text -> Text
textShow = T.pack . show
toConstructorName :: Text -> Text
toConstructorName t = t
& each %~ f
& ix 0 %~ toUpper
& special
where
f '.' = '_'
f '-' = '_'
f '+' = '\''
f c = c
special :: Text -> Text
special "0BSD" = "NullBSD"
special "389_exception" = "DS389_exception"
special u = u
mkList :: [Text] -> Text
mkList [] = " []"
mkList (x:xs) =
" [ " <> x <> "\n"
<> foldMap (\x' -> " , " <> x' <> "\n") xs
<> " ]"
Zinza inputs
data Input = Input
{ inputLicenseIds :: Text
, inputLicenses :: [InputLicense]
, inputLicenseList_all :: Text
, inputLicenseList_perv :: PerV Text
}
deriving (Show, Generic)
instance Z.Zinza Input where
toType = Z.genericToTypeSFP
toValue = Z.genericToValueSFP
fromValue = Z.genericFromValueSFP
data InputLicense = InputLicense
{ ilConstructor :: Text
, ilId :: Text
, ilName :: Text
, ilIsOsiApproved :: Bool
, ilIsFsfLibre :: Bool
}
deriving (Show, Generic)
instance Z.Zinza InputLicense where
toType = Z.genericToTypeSFP
toValue = Z.genericToValueSFP
fromValue = Z.genericFromValueSFP
instance Z.Zinza a => Z.Zinza (PerV a) where
toType _ = Z.TyRecord $ Map.fromList
[ ("v" ++ suffixVer v, ("index " ++ show v, Z.toType (Proxy :: Proxy a)))
| v <- [ minBound .. maxBound ]
]
toValue x = Z.VRecord $ Map.fromList
[ ("v" ++ suffixVer v, Z.toValue (index v x))
| v <- [ minBound .. maxBound ]
]
fromValue = error "fromExpr @PerV not implemented"
|
1ab4639464a427f614e7bdf6d55034ec818f6c74bfb726ae2bf0523f38cca089 | silkapp/rest | Import.hs | {-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module Restexample.Client.Test.Import where
import Rest.Client.Internal
import qualified Restexample.Client.Test as Test
import qualified Rest.Types.Void
type Identifier = String
readId :: Identifier -> [String]
readId x = ["it", showUrl x]
byIt ::
ApiStateC m => String -> m (ApiResponse Rest.Types.Void.Void ())
byIt string
= let rHeaders
= [(hAccept, "text/json"), (hContentType, "text/plain")]
request
= makeReq "GET" "v1.0.0"
[["test"], ["import"], ["it"], [showUrl string]]
[]
rHeaders
""
in doRequest fromJSON (const ()) request
do_ ::
ApiStateC m =>
Identifier -> m (ApiResponse Rest.Types.Void.Void ())
do_ import_
= let rHeaders
= [(hAccept, "text/json"), (hContentType, "text/plain")]
request
= makeReq "POST" "v1.0.0"
[["test"], ["import"], readId import_, ["do"]]
[]
rHeaders
""
in doRequest fromJSON (const ()) request | null | https://raw.githubusercontent.com/silkapp/rest/f0462fc36709407f236f57064d8e37c77bdf8a79/rest-example/client/src/Restexample/Client/Test/Import.hs | haskell | # LANGUAGE OverloadedStrings # | # OPTIONS_GHC -fno - warn - unused - imports #
module Restexample.Client.Test.Import where
import Rest.Client.Internal
import qualified Restexample.Client.Test as Test
import qualified Rest.Types.Void
type Identifier = String
readId :: Identifier -> [String]
readId x = ["it", showUrl x]
byIt ::
ApiStateC m => String -> m (ApiResponse Rest.Types.Void.Void ())
byIt string
= let rHeaders
= [(hAccept, "text/json"), (hContentType, "text/plain")]
request
= makeReq "GET" "v1.0.0"
[["test"], ["import"], ["it"], [showUrl string]]
[]
rHeaders
""
in doRequest fromJSON (const ()) request
do_ ::
ApiStateC m =>
Identifier -> m (ApiResponse Rest.Types.Void.Void ())
do_ import_
= let rHeaders
= [(hAccept, "text/json"), (hContentType, "text/plain")]
request
= makeReq "POST" "v1.0.0"
[["test"], ["import"], readId import_, ["do"]]
[]
rHeaders
""
in doRequest fromJSON (const ()) request |
bc2530dae642ae877293740a89c87532cacdef2107c1a58d70843bd7f640c2ae | jorinvo/letsdo.events | web.clj | (ns lde.web
(:require
[clojure.spec.alpha :as s]
[clojure.java.io :as io]
[reitit.ring :as ring]
[reitit.coercion.spec :as spec-coercion]
[reitit.ring.coercion :as ring-coericion]
[reitit.ring.middleware.parameters :refer [parameters-middleware]]
[reitit.ring.middleware.multipart :as multipart]
[ring.middleware.multipart-params.byte-array :refer [byte-array-store]]
[ring.middleware.keyword-params :refer [wrap-keyword-params]]
[ring.middleware.session :refer [wrap-session]]
[ring.middleware.session.cookie :refer [cookie-store]]
[ring.util.response :as response]
[lde.core.settings :as settings]
[lde.core.topic :as topic]
[lde.core.event :as event]
[lde.web.util :refer [image-mime-types]]
[lde.web.middleware :as middleware]
[lde.web.css :as css]
[lde.web.error :as error]
[lde.web.pages.topic :as topic-page]
[lde.web.pages.event :as event-page]
[lde.web.pages.home :as home]
[lde.web.pages.login :as login]
[lde.web.forms.topic :as topic-form]
[lde.web.forms.event :as event-form]
[clj-honeycomb.middleware.ring :refer [with-honeycomb-event]])
(:import [java.util.regex Pattern]))
(def cookie-expiration-in-seconds (* 30 24 60 60))
(defn req-str [s]
(and (string? s)
(< 0 (count s))))
(defn opt-date [s]
(and (string? s)
(or (empty? s)
(re-matches #"^\d{4}-\d{2}-\d{2}$" s))))
(defn opt-time [s]
(and (string? s)
(or (empty? s)
(re-matches #"^\d{2}:\d{2}$" s))))
(s/def ::max-attendees
(s/and string?
(s/or :empty empty?
:number #(re-matches #"^[1-9][0-9]*$" %))))
(defn file-max-size [size]
(fn [{b :bytes}]
(> size (count b))))
(s/def ::image
(s/and multipart/bytes-part
(file-max-size (* 5 1024 1024))
(s/or :empty-multipart-file (fn [{b :bytes}] (empty? b))
:multipart-img (fn [{t :content-type}] (contains? image-mime-types t)))))
(s/def ::name string?)
(s/def ::link string?)
(s/def ::password
(s/and string? (s/or :empty empty?
:strong-enough #(<= 8 (count %)))))
(s/def ::login-form
(s/keys :req-un [:lde.config/email ::password]))
(s/def ::signup-form
(s/keys :req-un [::name :lde.config/email ::password ::link]))
(s/def ::goto string?)
(s/def ::goto-query
(s/keys :opt-un [::goto]))
(s/def ::token string?)
(s/def ::email-login-query
(s/keys :req-un [::token]
:opt-un [::goto]))
(s/def ::whats #{"upcoming" "new" "mine"})
(s/def ::overview-query (s/keys :opt-un [::whats]))
(s/def ::invite-form
(s/keys :req-un [:lde.config/email]))
(defn routes []
[["/css/main.css" {:get css/handler}]
["/js/script.js" {:get (fn [_] (-> (response/resource-response "public/js/script.js")
(update :body slurp)))}]
["/" {:get home/handler}]
["/login"
["" {:get {:handler login/handler
:parameters {:query ::goto-query}}
:post {:handler login/post-login
:parameters {:query ::goto-query
:form ::login-form}}}]
["/email" {:get {:handler login/email
:parameters {:query ::email-login-query}}}]
["/email-confirm" {:get login/email-confirm}]]
["/signup" {:get {:handler login/handler
:parameters {:query ::goto-query}}
:post {:handler login/post-signup
:parameters {:query ::goto-query
:form ::signup-form}}}]
["/logout" {:get {:handler login/logout
:parameters {:query ::goto-query}}}]
["/new" {:middleware [middleware/authorize-user]
:get topic-page/new
:post {:handler topic-form/post
:parameters {:multipart {:name req-str
:description string?
:type #(contains? topic/types (keyword %))
:visibility #(contains? topic/visibilities (keyword %))
:image ::image}}}}]
["/accept"
["" {:get (constantly (response/redirect "/" :permanent-redirect))}]
["/:topic" {:middleware [middleware/load-topic]
:get topic-form/accept-invite}]]
["/for"
["" {:get (constantly (response/redirect "/" :permanent-redirect))}]
["/:topic" {:middleware [middleware/load-topic
middleware/authorize-topic-read]}
["" {:get {:handler topic-page/overview
:parameters {:query ::overview-query}}}]
["/edit" {:middleware [middleware/authorize-topic-edit]
:get topic-page/edit
:post {:handler topic-form/post-edit
:parameters {:multipart {:name req-str
:description string?
:type #(contains? topic/types (keyword %))
:visibility #(contains? topic/visibilities (keyword %))
:image ::image
:delete-image string?}}}}]
["/delete" {:middleware [middleware/authorize-topic-edit]
:post topic-form/delete}]
["/join" {:get topic-form/accept-invite}]
["/invites"
["" {:middleware [middleware/authorize-topic-edit]
:get topic-page/list-invites
:post {:handler topic-form/post-invite
:parameters {:form ::invite-form}}}]
["/:invite/delete" {:post topic-form/post-delete-invite}]]
["/new" {:middleware [middleware/authorize-user]
:get event-page/new
:post {:handler event-form/post
:parameters {:multipart {:name req-str
:description req-str
:intention #(contains? event/intentions (keyword %))
:start-date opt-date
:start-time opt-time
:end-date opt-date
:end-time opt-time
:max-attendees ::max-attendees
:location string?
:image ::image}}}}]
["/about"
["" {:get (fn [{{t :topic} :path-params}]
(response/redirect (str "/for/" t) :permanent-redirect))}]
["/:event" {:middleware [middleware/load-event]}
["" {:get event-page/get}]
["/edit" {:middleware [middleware/authorize-event-edit]
:get event-page/edit
:post {:handler event-form/post-edit
:parameters {:multipart {:name req-str
:description req-str
:start-date opt-date
:start-time opt-time
:end-date opt-date
:end-time opt-time
:max-attendees ::max-attendees
:location string?
:image ::image
:delete-image string?}}}}]
["/organize" {:middleware [middleware/authorize-user]
:get event-form/organize
:post event-form/organize}]
["/join" {:middleware [middleware/authorize-user]
:get event-form/join
:post event-form/join}]
["/leave" {:middleware [middleware/authorize-user]
:get event-form/leave
:post event-form/leave}]
["/delete" {:middleware [middleware/authorize-event-edit]
:get event-form/delete
:post event-form/delete}]]]]]])
(defn make-context-middleware [ctx]
(fn [handler]
(fn [req]
(handler (assoc req :ctx ctx)))))
(defn make-session-middleware [ctx]
(let [store (cookie-store {:key (settings/get-cookie-secret ctx)})]
(fn [handler]
(wrap-session handler {:store store
:cookie-name "letsdoevents-session"
:cookie-attr {:domain (-> ctx :config :public-base-url io/as-url .getHost)
:secure true
:max-age cookie-expiration-in-seconds
:same-site :strict}}))))
(defn init [ctx]
(ring/ring-handler
(ring/router
(routes)
{:data {:coercion spec-coercion/coercion
:middleware [ring-coericion/coerce-exceptions-middleware
ring-coericion/coerce-request-middleware
ring-coericion/coerce-response-middleware
(multipart/create-multipart-middleware {:store (byte-array-store)})]}})
(ring/routes
(ring/redirect-trailing-slash-handler)
(ring/create-default-handler {:not-found (constantly (error/render {:status 404
:title "404 - Not found"}
ctx))
:method-not-allowed (constantly (error/render {:status 403
:title "403 - Method not allowed"}
ctx))}))
{:middleware [(when (-> ctx :config :honeycomb) with-honeycomb-event)
parameters-middleware
wrap-keyword-params
(make-session-middleware ctx)
(make-context-middleware ctx)]}))
| null | https://raw.githubusercontent.com/jorinvo/letsdo.events/4cd2a5d401d37524c0bac265f48923ab5f91b220/src/lde/web.clj | clojure | (ns lde.web
(:require
[clojure.spec.alpha :as s]
[clojure.java.io :as io]
[reitit.ring :as ring]
[reitit.coercion.spec :as spec-coercion]
[reitit.ring.coercion :as ring-coericion]
[reitit.ring.middleware.parameters :refer [parameters-middleware]]
[reitit.ring.middleware.multipart :as multipart]
[ring.middleware.multipart-params.byte-array :refer [byte-array-store]]
[ring.middleware.keyword-params :refer [wrap-keyword-params]]
[ring.middleware.session :refer [wrap-session]]
[ring.middleware.session.cookie :refer [cookie-store]]
[ring.util.response :as response]
[lde.core.settings :as settings]
[lde.core.topic :as topic]
[lde.core.event :as event]
[lde.web.util :refer [image-mime-types]]
[lde.web.middleware :as middleware]
[lde.web.css :as css]
[lde.web.error :as error]
[lde.web.pages.topic :as topic-page]
[lde.web.pages.event :as event-page]
[lde.web.pages.home :as home]
[lde.web.pages.login :as login]
[lde.web.forms.topic :as topic-form]
[lde.web.forms.event :as event-form]
[clj-honeycomb.middleware.ring :refer [with-honeycomb-event]])
(:import [java.util.regex Pattern]))
(def cookie-expiration-in-seconds (* 30 24 60 60))
(defn req-str [s]
(and (string? s)
(< 0 (count s))))
(defn opt-date [s]
(and (string? s)
(or (empty? s)
(re-matches #"^\d{4}-\d{2}-\d{2}$" s))))
(defn opt-time [s]
(and (string? s)
(or (empty? s)
(re-matches #"^\d{2}:\d{2}$" s))))
(s/def ::max-attendees
(s/and string?
(s/or :empty empty?
:number #(re-matches #"^[1-9][0-9]*$" %))))
(defn file-max-size [size]
(fn [{b :bytes}]
(> size (count b))))
(s/def ::image
(s/and multipart/bytes-part
(file-max-size (* 5 1024 1024))
(s/or :empty-multipart-file (fn [{b :bytes}] (empty? b))
:multipart-img (fn [{t :content-type}] (contains? image-mime-types t)))))
(s/def ::name string?)
(s/def ::link string?)
(s/def ::password
(s/and string? (s/or :empty empty?
:strong-enough #(<= 8 (count %)))))
(s/def ::login-form
(s/keys :req-un [:lde.config/email ::password]))
(s/def ::signup-form
(s/keys :req-un [::name :lde.config/email ::password ::link]))
(s/def ::goto string?)
(s/def ::goto-query
(s/keys :opt-un [::goto]))
(s/def ::token string?)
(s/def ::email-login-query
(s/keys :req-un [::token]
:opt-un [::goto]))
(s/def ::whats #{"upcoming" "new" "mine"})
(s/def ::overview-query (s/keys :opt-un [::whats]))
(s/def ::invite-form
(s/keys :req-un [:lde.config/email]))
(defn routes []
[["/css/main.css" {:get css/handler}]
["/js/script.js" {:get (fn [_] (-> (response/resource-response "public/js/script.js")
(update :body slurp)))}]
["/" {:get home/handler}]
["/login"
["" {:get {:handler login/handler
:parameters {:query ::goto-query}}
:post {:handler login/post-login
:parameters {:query ::goto-query
:form ::login-form}}}]
["/email" {:get {:handler login/email
:parameters {:query ::email-login-query}}}]
["/email-confirm" {:get login/email-confirm}]]
["/signup" {:get {:handler login/handler
:parameters {:query ::goto-query}}
:post {:handler login/post-signup
:parameters {:query ::goto-query
:form ::signup-form}}}]
["/logout" {:get {:handler login/logout
:parameters {:query ::goto-query}}}]
["/new" {:middleware [middleware/authorize-user]
:get topic-page/new
:post {:handler topic-form/post
:parameters {:multipart {:name req-str
:description string?
:type #(contains? topic/types (keyword %))
:visibility #(contains? topic/visibilities (keyword %))
:image ::image}}}}]
["/accept"
["" {:get (constantly (response/redirect "/" :permanent-redirect))}]
["/:topic" {:middleware [middleware/load-topic]
:get topic-form/accept-invite}]]
["/for"
["" {:get (constantly (response/redirect "/" :permanent-redirect))}]
["/:topic" {:middleware [middleware/load-topic
middleware/authorize-topic-read]}
["" {:get {:handler topic-page/overview
:parameters {:query ::overview-query}}}]
["/edit" {:middleware [middleware/authorize-topic-edit]
:get topic-page/edit
:post {:handler topic-form/post-edit
:parameters {:multipart {:name req-str
:description string?
:type #(contains? topic/types (keyword %))
:visibility #(contains? topic/visibilities (keyword %))
:image ::image
:delete-image string?}}}}]
["/delete" {:middleware [middleware/authorize-topic-edit]
:post topic-form/delete}]
["/join" {:get topic-form/accept-invite}]
["/invites"
["" {:middleware [middleware/authorize-topic-edit]
:get topic-page/list-invites
:post {:handler topic-form/post-invite
:parameters {:form ::invite-form}}}]
["/:invite/delete" {:post topic-form/post-delete-invite}]]
["/new" {:middleware [middleware/authorize-user]
:get event-page/new
:post {:handler event-form/post
:parameters {:multipart {:name req-str
:description req-str
:intention #(contains? event/intentions (keyword %))
:start-date opt-date
:start-time opt-time
:end-date opt-date
:end-time opt-time
:max-attendees ::max-attendees
:location string?
:image ::image}}}}]
["/about"
["" {:get (fn [{{t :topic} :path-params}]
(response/redirect (str "/for/" t) :permanent-redirect))}]
["/:event" {:middleware [middleware/load-event]}
["" {:get event-page/get}]
["/edit" {:middleware [middleware/authorize-event-edit]
:get event-page/edit
:post {:handler event-form/post-edit
:parameters {:multipart {:name req-str
:description req-str
:start-date opt-date
:start-time opt-time
:end-date opt-date
:end-time opt-time
:max-attendees ::max-attendees
:location string?
:image ::image
:delete-image string?}}}}]
["/organize" {:middleware [middleware/authorize-user]
:get event-form/organize
:post event-form/organize}]
["/join" {:middleware [middleware/authorize-user]
:get event-form/join
:post event-form/join}]
["/leave" {:middleware [middleware/authorize-user]
:get event-form/leave
:post event-form/leave}]
["/delete" {:middleware [middleware/authorize-event-edit]
:get event-form/delete
:post event-form/delete}]]]]]])
(defn make-context-middleware [ctx]
(fn [handler]
(fn [req]
(handler (assoc req :ctx ctx)))))
(defn make-session-middleware [ctx]
(let [store (cookie-store {:key (settings/get-cookie-secret ctx)})]
(fn [handler]
(wrap-session handler {:store store
:cookie-name "letsdoevents-session"
:cookie-attr {:domain (-> ctx :config :public-base-url io/as-url .getHost)
:secure true
:max-age cookie-expiration-in-seconds
:same-site :strict}}))))
(defn init [ctx]
(ring/ring-handler
(ring/router
(routes)
{:data {:coercion spec-coercion/coercion
:middleware [ring-coericion/coerce-exceptions-middleware
ring-coericion/coerce-request-middleware
ring-coericion/coerce-response-middleware
(multipart/create-multipart-middleware {:store (byte-array-store)})]}})
(ring/routes
(ring/redirect-trailing-slash-handler)
(ring/create-default-handler {:not-found (constantly (error/render {:status 404
:title "404 - Not found"}
ctx))
:method-not-allowed (constantly (error/render {:status 403
:title "403 - Method not allowed"}
ctx))}))
{:middleware [(when (-> ctx :config :honeycomb) with-honeycomb-event)
parameters-middleware
wrap-keyword-params
(make-session-middleware ctx)
(make-context-middleware ctx)]}))
|
|
2bd5751c327ff51e99b9ab2a49fe171d1d03c775a13f3e6bf657e6c0fefc13dd | Octachron/olivine | wayland.ml | type display
type surface
let wl_display: display Ctypes.structure Ctypes.typ =
Ctypes.structure "wl_display"
let wl_surface: surface Ctypes.structure Ctypes.typ =
Ctypes.structure "wl_surface"
| null | https://raw.githubusercontent.com/Octachron/olivine/e93df595ad1e8bad5a8af689bac7d150753ab9fb/wsl/wayland.ml | ocaml | type display
type surface
let wl_display: display Ctypes.structure Ctypes.typ =
Ctypes.structure "wl_display"
let wl_surface: surface Ctypes.structure Ctypes.typ =
Ctypes.structure "wl_surface"
|
|
76eee2d7c08621ca40aa1d8aca3c6ed3cd7c01001a406d3379af8309ab0d0604 | gedge-platform/gedge-platform | lager_handler_watcher.erl | Copyright ( c ) 2011 - 2012 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%% @doc A process that does a gen_event:add_sup_handler and attempts to re-add
%% event handlers when they exit.
@private
-module(lager_handler_watcher).
-behaviour(gen_server).
-include("lager.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-export([pop_until/2]).
-endif.
%% callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([start_link/3, start/3]).
-record(state, {
module :: atom(),
config :: any(),
sink :: pid() | atom()
}).
start_link(Sink, Module, Config) ->
gen_server:start_link(?MODULE, [Sink, Module, Config], []).
start(Sink, Module, Config) ->
gen_server:start(?MODULE, [Sink, Module, Config], []).
init([Sink, Module, Config]) ->
process_flag(trap_exit, true),
install_handler(Sink, Module, Config),
{ok, #state{sink=Sink, module=Module, config=Config}}.
handle_call(_Call, _From, State) ->
{reply, ok, State}.
handle_cast(_Request, State) ->
{noreply, State}.
handle_info({gen_event_EXIT, Module, normal}, #state{module=Module} = State) ->
{stop, normal, State};
handle_info({gen_event_EXIT, Module, shutdown}, #state{module=Module} = State) ->
{stop, normal, State};
handle_info({gen_event_EXIT, Module, {'EXIT', {kill_me, [_KillerHWM, KillerReinstallAfter]}}},
#state{module=Module, sink=Sink, config = Config} = State) ->
%% Brutally kill the manager but stay alive to restore settings.
%%
SinkPid here means the gen_event process . Handlers * all * live inside the
same gen_event process space , so when the Pid is killed , * all * of the
%% pending log messages in its mailbox will die too.
SinkPid = whereis(Sink),
unlink(SinkPid),
{message_queue_len, Len} = process_info(SinkPid, message_queue_len),
error_logger:error_msg("Killing sink ~p, current message_queue_len:~p~n", [Sink, Len]),
exit(SinkPid, kill),
_ = timer:apply_after(KillerReinstallAfter, lager_app, start_handler, [Sink, Module, Config]),
{stop, normal, State};
handle_info({gen_event_EXIT, Module, Reason}, #state{module=Module,
config=Config, sink=Sink} = State) ->
case lager:log(error, self(), "Lager event handler ~p exited with reason ~s",
[Module, error_logger_lager_h:format_reason(Reason)]) of
ok ->
install_handler(Sink, Module, Config);
{error, _} ->
%% lager is not working, so installing a handler won't work
ok
end,
{noreply, State};
handle_info(reinstall_handler, #state{module=Module, config=Config, sink=Sink} = State) ->
install_handler(Sink, Module, Config),
{noreply, State};
handle_info({reboot, Sink}, State) ->
_ = lager_app:boot(Sink),
{noreply, State};
handle_info(stop, State) ->
{stop, normal, State};
handle_info({'EXIT', _Pid, killed}, #state{module=Module, config=Config, sink=Sink} = State) ->
Tmr = application:get_env(lager, killer_reinstall_after, 5000),
_ = timer:apply_after(Tmr, lager_app, start_handler, [Sink, Module, Config]),
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% internal
install_handler(Sink, lager_backend_throttle, Config) ->
%% The lager_backend_throttle needs to know to which sink it is
%% attached, hence this admittedly ugly workaround. Handlers are
%% sensitive to the structure of the configuration sent to `init',
%% sadly, so it's not trivial to add a configuration item to be
ignored to backends without breaking 3rd party handlers .
install_handler2(Sink, lager_backend_throttle, [{sink, Sink}|Config]);
install_handler(Sink, Module, Config) ->
install_handler2(Sink, Module, Config).
%% private
install_handler2(Sink, Module, Config) ->
case gen_event:add_sup_handler(Sink, Module, Config) of
ok ->
?INT_LOG(debug, "Lager installed handler ~p into ~p", [Module, Sink]),
lager:update_loglevel_config(Sink),
ok;
{error, {fatal, Reason}} ->
?INT_LOG(error, "Lager fatally failed to install handler ~p into"
" ~p, NOT retrying: ~p", [Module, Sink, Reason]),
%% tell ourselves to stop
self() ! stop,
ok;
Error ->
%% try to reinstall it later
?INT_LOG(error, "Lager failed to install handler ~p into"
" ~p, retrying later : ~p", [Module, Sink, Error]),
erlang:send_after(5000, self(), reinstall_handler),
ok
end.
-ifdef(TEST).
from_now(Seconds) ->
{Mega, Secs, Micro} = os:timestamp(),
{Mega, Secs + Seconds, Micro}.
reinstall_on_initial_failure_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_test_backend, info}, {lager_crash_backend, [from_now(2), undefined]}]),
application:set_env(lager, error_logger_redirect, false),
application:unset_env(lager, crash_log),
lager:start(),
try
{_Level, _Time, Message, _Metadata} = lager_test_backend:pop(),
?assertMatch("Lager failed to install handler lager_crash_backend into lager_event, retrying later :"++_, lists:flatten(Message)),
timer:sleep(6000),
lager_test_backend:flush(),
?assertEqual(0, lager_test_backend:count()),
?assert(lists:member(lager_crash_backend, gen_event:which_handlers(lager_event)))
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
reinstall_on_runtime_failure_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_test_backend, info}, {lager_crash_backend, [undefined, from_now(5)]}]),
application:set_env(lager, error_logger_redirect, false),
application:unset_env(lager, crash_log),
lager:start(),
try
?assert(lists:member(lager_crash_backend, gen_event:which_handlers(lager_event))),
timer:sleep(6000),
pop_until("Lager event handler lager_crash_backend exited with reason crash", fun lists:flatten/1),
pop_until("Lager failed to install handler lager_crash_backend into lager_event, retrying later",
fun(Msg) -> string:substr(lists:flatten(Msg), 1, 84) end),
?assertEqual(false, lists:member(lager_crash_backend, gen_event:which_handlers(lager_event)))
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
reinstall_handlers_after_killer_hwm_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_manager_killer, [1000, 5000]}]),
application:set_env(lager, error_logger_redirect, false),
application:set_env(lager, killer_reinstall_after, 5000),
application:unset_env(lager, crash_log),
lager:start(),
lager:trace_file("foo", [{foo, "bar"}], error),
L = length(gen_event:which_handlers(lager_event)),
try
lager_manager_killer:kill_me(),
timer:sleep(6000),
?assertEqual(L, length(gen_event:which_handlers(lager_event))),
file:delete("foo")
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
pop_until(String, Fun) ->
try_backend_pop(lager_test_backend:pop(), String, Fun).
try_backend_pop(undefined, String, _Fun) ->
throw("Not found: " ++ String);
try_backend_pop({_Severity, _Date, Msg, _Metadata}, String, Fun) ->
case Fun(Msg) of
String ->
ok;
_ ->
try_backend_pop(lager_test_backend:pop(), String, Fun)
end.
-endif.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/lager/src/lager_handler_watcher.erl | erlang |
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
@doc A process that does a gen_event:add_sup_handler and attempts to re-add
event handlers when they exit.
callbacks
Brutally kill the manager but stay alive to restore settings.
pending log messages in its mailbox will die too.
lager is not working, so installing a handler won't work
internal
The lager_backend_throttle needs to know to which sink it is
attached, hence this admittedly ugly workaround. Handlers are
sensitive to the structure of the configuration sent to `init',
sadly, so it's not trivial to add a configuration item to be
private
tell ourselves to stop
try to reinstall it later | Copyright ( c ) 2011 - 2012 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@private
-module(lager_handler_watcher).
-behaviour(gen_server).
-include("lager.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-export([pop_until/2]).
-endif.
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2,
code_change/3]).
-export([start_link/3, start/3]).
-record(state, {
module :: atom(),
config :: any(),
sink :: pid() | atom()
}).
start_link(Sink, Module, Config) ->
gen_server:start_link(?MODULE, [Sink, Module, Config], []).
start(Sink, Module, Config) ->
gen_server:start(?MODULE, [Sink, Module, Config], []).
init([Sink, Module, Config]) ->
process_flag(trap_exit, true),
install_handler(Sink, Module, Config),
{ok, #state{sink=Sink, module=Module, config=Config}}.
handle_call(_Call, _From, State) ->
{reply, ok, State}.
handle_cast(_Request, State) ->
{noreply, State}.
handle_info({gen_event_EXIT, Module, normal}, #state{module=Module} = State) ->
{stop, normal, State};
handle_info({gen_event_EXIT, Module, shutdown}, #state{module=Module} = State) ->
{stop, normal, State};
handle_info({gen_event_EXIT, Module, {'EXIT', {kill_me, [_KillerHWM, KillerReinstallAfter]}}},
#state{module=Module, sink=Sink, config = Config} = State) ->
SinkPid here means the gen_event process . Handlers * all * live inside the
same gen_event process space , so when the Pid is killed , * all * of the
SinkPid = whereis(Sink),
unlink(SinkPid),
{message_queue_len, Len} = process_info(SinkPid, message_queue_len),
error_logger:error_msg("Killing sink ~p, current message_queue_len:~p~n", [Sink, Len]),
exit(SinkPid, kill),
_ = timer:apply_after(KillerReinstallAfter, lager_app, start_handler, [Sink, Module, Config]),
{stop, normal, State};
handle_info({gen_event_EXIT, Module, Reason}, #state{module=Module,
config=Config, sink=Sink} = State) ->
case lager:log(error, self(), "Lager event handler ~p exited with reason ~s",
[Module, error_logger_lager_h:format_reason(Reason)]) of
ok ->
install_handler(Sink, Module, Config);
{error, _} ->
ok
end,
{noreply, State};
handle_info(reinstall_handler, #state{module=Module, config=Config, sink=Sink} = State) ->
install_handler(Sink, Module, Config),
{noreply, State};
handle_info({reboot, Sink}, State) ->
_ = lager_app:boot(Sink),
{noreply, State};
handle_info(stop, State) ->
{stop, normal, State};
handle_info({'EXIT', _Pid, killed}, #state{module=Module, config=Config, sink=Sink} = State) ->
Tmr = application:get_env(lager, killer_reinstall_after, 5000),
_ = timer:apply_after(Tmr, lager_app, start_handler, [Sink, Module, Config]),
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
install_handler(Sink, lager_backend_throttle, Config) ->
ignored to backends without breaking 3rd party handlers .
install_handler2(Sink, lager_backend_throttle, [{sink, Sink}|Config]);
install_handler(Sink, Module, Config) ->
install_handler2(Sink, Module, Config).
install_handler2(Sink, Module, Config) ->
case gen_event:add_sup_handler(Sink, Module, Config) of
ok ->
?INT_LOG(debug, "Lager installed handler ~p into ~p", [Module, Sink]),
lager:update_loglevel_config(Sink),
ok;
{error, {fatal, Reason}} ->
?INT_LOG(error, "Lager fatally failed to install handler ~p into"
" ~p, NOT retrying: ~p", [Module, Sink, Reason]),
self() ! stop,
ok;
Error ->
?INT_LOG(error, "Lager failed to install handler ~p into"
" ~p, retrying later : ~p", [Module, Sink, Error]),
erlang:send_after(5000, self(), reinstall_handler),
ok
end.
-ifdef(TEST).
from_now(Seconds) ->
{Mega, Secs, Micro} = os:timestamp(),
{Mega, Secs + Seconds, Micro}.
reinstall_on_initial_failure_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_test_backend, info}, {lager_crash_backend, [from_now(2), undefined]}]),
application:set_env(lager, error_logger_redirect, false),
application:unset_env(lager, crash_log),
lager:start(),
try
{_Level, _Time, Message, _Metadata} = lager_test_backend:pop(),
?assertMatch("Lager failed to install handler lager_crash_backend into lager_event, retrying later :"++_, lists:flatten(Message)),
timer:sleep(6000),
lager_test_backend:flush(),
?assertEqual(0, lager_test_backend:count()),
?assert(lists:member(lager_crash_backend, gen_event:which_handlers(lager_event)))
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
reinstall_on_runtime_failure_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_test_backend, info}, {lager_crash_backend, [undefined, from_now(5)]}]),
application:set_env(lager, error_logger_redirect, false),
application:unset_env(lager, crash_log),
lager:start(),
try
?assert(lists:member(lager_crash_backend, gen_event:which_handlers(lager_event))),
timer:sleep(6000),
pop_until("Lager event handler lager_crash_backend exited with reason crash", fun lists:flatten/1),
pop_until("Lager failed to install handler lager_crash_backend into lager_event, retrying later",
fun(Msg) -> string:substr(lists:flatten(Msg), 1, 84) end),
?assertEqual(false, lists:member(lager_crash_backend, gen_event:which_handlers(lager_event)))
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
reinstall_handlers_after_killer_hwm_test_() ->
{timeout, 60000,
[
fun() ->
error_logger:tty(false),
application:load(lager),
application:set_env(lager, handlers, [{lager_manager_killer, [1000, 5000]}]),
application:set_env(lager, error_logger_redirect, false),
application:set_env(lager, killer_reinstall_after, 5000),
application:unset_env(lager, crash_log),
lager:start(),
lager:trace_file("foo", [{foo, "bar"}], error),
L = length(gen_event:which_handlers(lager_event)),
try
lager_manager_killer:kill_me(),
timer:sleep(6000),
?assertEqual(L, length(gen_event:which_handlers(lager_event))),
file:delete("foo")
after
application:stop(lager),
application:stop(goldrush),
error_logger:tty(true)
end
end
]
}.
pop_until(String, Fun) ->
try_backend_pop(lager_test_backend:pop(), String, Fun).
try_backend_pop(undefined, String, _Fun) ->
throw("Not found: " ++ String);
try_backend_pop({_Severity, _Date, Msg, _Metadata}, String, Fun) ->
case Fun(Msg) of
String ->
ok;
_ ->
try_backend_pop(lager_test_backend:pop(), String, Fun)
end.
-endif.
|
4ab71ea4cc31592c79d7f0d7e327119966d02112c12e0ece20fd9d07d0d8ab50 | semilin/layoup | aptlf.lisp |
(MAKE-LAYOUT :NAME "aptlf" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/aptlf.lisp | lisp |
(MAKE-LAYOUT :NAME "aptlf" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX NIL
:KEYBOARD NIL) |
|
ddc44999d7a85a60b9c60191d22dfd9aecc176e4acac1f588fd819780710265b | sentenai/reinforce | CartPoleV0.hs | --------------------------------------------------------------------------------
-- |
Module : Environment .
-- Copyright : (c) Sentenai 2017
-- License : BSD3
Maintainer :
-- Stability : experimental
-- Portability: non-portable
--
-- Environment description:
> A pole is attached by an un - actuated joint to a cart , which moves along a
-- > frictionless track. The system is controlled by applying a force of +1 or -1
-- > to the cart. The pendulum starts upright, and the goal is to prevent it from
-- > falling over. A reward of +1 is provided for every timestep that the pole
> remains upright . The episode ends when the pole is more than 15 degrees from
> vertical , or the cart moves more than 2.4 units from the center .
--
-- -v0
--------------------------------------------------------------------------------
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -Wno - orphans #
module Environments.Gym.ClassicControl.CartPoleV0
( Action(..)
, I.Runner
, StateCP(..)
, Environment
, EnvironmentT
, Environments.Gym.ClassicControl.CartPoleV0.runEnvironment
, Environments.Gym.ClassicControl.CartPoleV0.runEnvironmentT
, Environments.Gym.ClassicControl.CartPoleV0.runDefaultEnvironment
, Environments.Gym.ClassicControl.CartPoleV0.runDefaultEnvironmentT
) where
import Control.Monad.IO.Class
import Control.Exception.Safe
import Data.CartPole
import Control.MonadEnv (MonadEnv(..), Reward)
import Environments.Gym.Internal (GymEnvironmentT)
import qualified Environments.Gym.Internal as I
import OpenAI.Gym (GymEnv(CartPoleV0))
import Servant.Client (BaseUrl)
import Network.HTTP.Client (Manager)
-- ========================================================================= --
| to ' Environments . Gym . Internal . GymEnvironmentT ' with type dependencies
type EnvironmentT t = GymEnvironmentT StateCP Action t
| to ' EnvironmentT ' in IO
type Environment = EnvironmentT IO
| to ' Environments . Gym . Internal.runEnvironmentT '
runEnvironmentT :: MonadIO t => Manager -> BaseUrl -> I.RunnerT StateCP Action t x
runEnvironmentT = I.runEnvironmentT CartPoleV0
| to ' Environments . Gym . Internal.runEnvironment ' in IO
runEnvironment :: Manager -> BaseUrl -> I.RunnerT StateCP Action IO x
runEnvironment = I.runEnvironmentT CartPoleV0
| to ' Environments . Gym . '
runDefaultEnvironmentT :: MonadIO t => I.RunnerT StateCP Action t x
runDefaultEnvironmentT = I.runDefaultEnvironmentT CartPoleV0
| to ' Environments . Gym . Internal.runDefaultEnvironment ' in IO
runDefaultEnvironment :: I.RunnerT StateCP Action IO x
runDefaultEnvironment = I.runDefaultEnvironmentT CartPoleV0
instance (MonadIO t, MonadThrow t) => MonadEnv (EnvironmentT t) StateCP Action Reward where
reset = I._reset
step = I._step
| null | https://raw.githubusercontent.com/sentenai/reinforce/03fdeea14c606f4fe2390863778c99ebe1f0a7ee/reinforce-environments-gym/src/Environments/Gym/ClassicControl/CartPoleV0.hs | haskell | ------------------------------------------------------------------------------
|
Copyright : (c) Sentenai 2017
License : BSD3
Stability : experimental
Portability: non-portable
Environment description:
> frictionless track. The system is controlled by applying a force of +1 or -1
> to the cart. The pendulum starts upright, and the goal is to prevent it from
> falling over. A reward of +1 is provided for every timestep that the pole
-v0
------------------------------------------------------------------------------
========================================================================= -- | Module : Environment .
Maintainer :
> A pole is attached by an un - actuated joint to a cart , which moves along a
> remains upright . The episode ends when the pole is more than 15 degrees from
> vertical , or the cart moves more than 2.4 units from the center .
# LANGUAGE FlexibleInstances #
# OPTIONS_GHC -Wno - orphans #
module Environments.Gym.ClassicControl.CartPoleV0
( Action(..)
, I.Runner
, StateCP(..)
, Environment
, EnvironmentT
, Environments.Gym.ClassicControl.CartPoleV0.runEnvironment
, Environments.Gym.ClassicControl.CartPoleV0.runEnvironmentT
, Environments.Gym.ClassicControl.CartPoleV0.runDefaultEnvironment
, Environments.Gym.ClassicControl.CartPoleV0.runDefaultEnvironmentT
) where
import Control.Monad.IO.Class
import Control.Exception.Safe
import Data.CartPole
import Control.MonadEnv (MonadEnv(..), Reward)
import Environments.Gym.Internal (GymEnvironmentT)
import qualified Environments.Gym.Internal as I
import OpenAI.Gym (GymEnv(CartPoleV0))
import Servant.Client (BaseUrl)
import Network.HTTP.Client (Manager)
| to ' Environments . Gym . Internal . GymEnvironmentT ' with type dependencies
type EnvironmentT t = GymEnvironmentT StateCP Action t
| to ' EnvironmentT ' in IO
type Environment = EnvironmentT IO
| to ' Environments . Gym . Internal.runEnvironmentT '
runEnvironmentT :: MonadIO t => Manager -> BaseUrl -> I.RunnerT StateCP Action t x
runEnvironmentT = I.runEnvironmentT CartPoleV0
| to ' Environments . Gym . Internal.runEnvironment ' in IO
runEnvironment :: Manager -> BaseUrl -> I.RunnerT StateCP Action IO x
runEnvironment = I.runEnvironmentT CartPoleV0
| to ' Environments . Gym . '
runDefaultEnvironmentT :: MonadIO t => I.RunnerT StateCP Action t x
runDefaultEnvironmentT = I.runDefaultEnvironmentT CartPoleV0
| to ' Environments . Gym . Internal.runDefaultEnvironment ' in IO
runDefaultEnvironment :: I.RunnerT StateCP Action IO x
runDefaultEnvironment = I.runDefaultEnvironmentT CartPoleV0
instance (MonadIO t, MonadThrow t) => MonadEnv (EnvironmentT t) StateCP Action Reward where
reset = I._reset
step = I._step
|
3a54ee4b853cec6ca5ee55bff2dd3f62ed7724021c72220d4b4b73154ae0a7fd | mzp/coq-ruby | nametab.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : nametab.ml 10664 2008 - 03 - 14 11:27:37Z soubiran $
open Util
open Pp
open Names
open Libnames
open Nameops
open Declarations
exception GlobalizationError of qualid
exception GlobalizationConstantError of qualid
let error_global_not_found_loc loc q =
Stdpp.raise_with_loc loc (GlobalizationError q)
let error_global_constant_not_found_loc loc q =
Stdpp.raise_with_loc loc (GlobalizationConstantError q)
let error_global_not_found q = raise (GlobalizationError q)
(* The visibility can be registered either
- for all suffixes not shorter then a given int - when the object
is loaded inside a module
or
- for a precise suffix, when the module containing (the module
containing ...) the object is open (imported)
*)
type visibility = Until of int | Exactly of int
(* Data structure for nametabs *******************************************)
(* This module type will be instantiated by [section_path] of [dir_path] *)
(* The [repr] function is assumed to return the reversed list of idents. *)
module type UserName = sig
type t
val to_string : t -> string
val repr : t -> identifier * module_ident list
end
A [ ' a t ] is a map from [ user_name ] to [ ' a ] , with possible lookup by
partially qualified names of type [ qualid ] . The mapping of
partially qualified names to [ ' a ] is determined by the [ visibility ]
parameter of [ push ] .
The [ shortest_qualid ] function given a user_name Coq . A.B.x , tries
to find the shortest among x , B.x , A.B.x and Coq . A.B.x that denotes
the same object .
partially qualified names of type [qualid]. The mapping of
partially qualified names to ['a] is determined by the [visibility]
parameter of [push].
The [shortest_qualid] function given a user_name Coq.A.B.x, tries
to find the shortest among x, B.x, A.B.x and Coq.A.B.x that denotes
the same object.
*)
module type NAMETREE = sig
type 'a t
type user_name
val empty : 'a t
val push : visibility -> user_name -> 'a -> 'a t -> 'a t
val locate : qualid -> 'a t -> 'a
val find : user_name -> 'a t -> 'a
val exists : user_name -> 'a t -> bool
val user_name : qualid -> 'a t -> user_name
val shortest_qualid : Idset.t -> user_name -> 'a t -> qualid
val find_prefixes : qualid -> 'a t -> 'a list
end
module Make(U:UserName) : NAMETREE with type user_name = U.t
=
struct
type user_name = U.t
type 'a path_status =
Nothing
| Relative of user_name * 'a
| Absolute of user_name * 'a
(* Dictionaries of short names *)
type 'a nametree = ('a path_status * 'a nametree ModIdmap.t)
type 'a t = 'a nametree Idmap.t
let empty = Idmap.empty
[ push_until ] is used to register [ Until vis ] visibility and
[ push_exactly ] to [ Exactly vis ] and [ push_tree ] chooses the right one
[push_exactly] to [Exactly vis] and [push_tree] chooses the right one*)
let rec push_until uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
let this =
if level <= 0 then
match current with
| Absolute (n,_) ->
(* This is an absolute name, we must keep it
otherwise it may become unaccessible forever *)
Flags.if_verbose
warning ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!");
current
| Nothing
| Relative _ -> Relative (uname,o)
else current
in
let ptab' = push_until uname o (level-1) mc path in
(this, ModIdmap.add modid ptab' dirmap)
| [] ->
match current with
| Absolute (uname',o') ->
if o'=o then begin
assert (uname=uname');
current, dirmap
we are putting the same thing for the second time :)
end
else
(* This is an absolute name, we must keep it otherwise it may
become unaccessible forever *)
(* But ours is also absolute! This is an error! *)
error ("Cannot mask the absolute name \""
^ U.to_string uname' ^ "\"!")
| Nothing
| Relative _ -> Absolute (uname,o), dirmap
let rec push_exactly uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
if level = 0 then
let this =
match current with
| Absolute (n,_) ->
(* This is an absolute name, we must keep it
otherwise it may become unaccessible forever *)
Flags.if_verbose
warning ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!");
current
| Nothing
| Relative _ -> Relative (uname,o)
in
(this, dirmap)
else (* not right level *)
let ptab' = push_exactly uname o (level-1) mc path in
(current, ModIdmap.add modid ptab' dirmap)
| [] ->
anomaly "Prefix longer than path! Impossible!"
let push visibility uname o tab =
let id,dir = U.repr uname in
let ptab =
try Idmap.find id tab
with Not_found -> (Nothing, ModIdmap.empty)
in
let ptab' = match visibility with
| Until i -> push_until uname o (i-1) ptab dir
| Exactly i -> push_exactly uname o (i-1) ptab dir
in
Idmap.add id ptab' tab
let rec search (current,modidtab) = function
| modid :: path -> search (ModIdmap.find modid modidtab) path
| [] -> current
let find_node qid tab =
let (dir,id) = repr_qualid qid in
search (Idmap.find id tab) (repr_dirpath dir)
let locate qid tab =
let o = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> o
| Nothing -> raise Not_found
in
o
let user_name qid tab =
let uname = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> uname
| Nothing -> raise Not_found
in
uname
let find uname tab =
let id,l = U.repr uname in
match search (Idmap.find id tab) l with
Absolute (_,o) -> o
| _ -> raise Not_found
let exists uname tab =
try
let _ = find uname tab in
true
with
Not_found -> false
let shortest_qualid ctx uname tab =
let id,dir = U.repr uname in
let hidden = Idset.mem id ctx in
let rec find_uname pos dir (path,tab) = match path with
| Absolute (u,_) | Relative (u,_)
when u=uname && not(pos=[] && hidden) -> List.rev pos
| _ ->
match dir with
[] -> raise Not_found
| id::dir -> find_uname (id::pos) dir (ModIdmap.find id tab)
in
let ptab = Idmap.find id tab in
let found_dir = find_uname [] dir ptab in
make_qualid (make_dirpath found_dir) id
let push_node node l =
match node with
| Absolute (_,o) | Relative (_,o) when not (List.mem o l) -> o::l
| _ -> l
let rec flatten_idmap tab l =
ModIdmap.fold (fun _ (current,idtab) l ->
flatten_idmap idtab (push_node current l)) tab l
let rec search_prefixes (current,modidtab) = function
| modid :: path -> search_prefixes (ModIdmap.find modid modidtab) path
| [] -> List.rev (flatten_idmap modidtab (push_node current []))
let find_prefixes qid tab =
try
let (dir,id) = repr_qualid qid in
search_prefixes (Idmap.find id tab) (repr_dirpath dir)
with Not_found -> []
end
(* Global name tables *************************************************)
module SpTab = Make (struct
type t = section_path
let to_string = string_of_path
let repr sp =
let dir,id = repr_path sp in
id, (repr_dirpath dir)
end)
type ccitab = extended_global_reference SpTab.t
let the_ccitab = ref (SpTab.empty : ccitab)
type kntab = kernel_name SpTab.t
let the_tactictab = ref (SpTab.empty : kntab)
type mptab = module_path SpTab.t
let the_modtypetab = ref (SpTab.empty : mptab)
type objtab = unit SpTab.t
let the_objtab = ref (SpTab.empty : objtab)
module DirTab = Make(struct
type t = dir_path
let to_string = string_of_dirpath
let repr dir = match repr_dirpath dir with
| [] -> anomaly "Empty dirpath"
| id::l -> (id,l)
end)
(* If we have a (closed) module M having a submodule N, than N does not
have the entry in [the_dirtab]. *)
type dirtab = global_dir_reference DirTab.t
let the_dirtab = ref (DirTab.empty : dirtab)
(* Reversed name tables ***************************************************)
(* This table translates extended_global_references back to section paths *)
module Globrevtab = Map.Make(struct
type t=extended_global_reference
let compare = compare
end)
type globrevtab = section_path Globrevtab.t
let the_globrevtab = ref (Globrevtab.empty : globrevtab)
type mprevtab = dir_path MPmap.t
let the_modrevtab = ref (MPmap.empty : mprevtab)
type mptrevtab = section_path MPmap.t
let the_modtyperevtab = ref (MPmap.empty : mptrevtab)
type knrevtab = section_path KNmap.t
let the_tacticrevtab = ref (KNmap.empty : knrevtab)
(* Push functions *********************************************************)
This is for permanent constructions ( never discharged -- but with
possibly limited visibility , i.e. Theorem , Lemma , Definition , Axiom ,
Parameter but also Remark and Fact )
possibly limited visibility, i.e. Theorem, Lemma, Definition, Axiom,
Parameter but also Remark and Fact) *)
let push_xref visibility sp xref =
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
match visibility with
| Until _ ->
if Globrevtab.mem xref !the_globrevtab then
()
else
the_globrevtab := Globrevtab.add xref sp !the_globrevtab
| _ -> ()
let push_cci visibility sp ref =
push_xref visibility sp (TrueGlobal ref)
This is for Syntactic Definitions
let push_syntactic_definition visibility sp kn =
push_xref visibility sp (SyntacticDef kn)
let push = push_cci
let push_modtype vis sp kn =
the_modtypetab := SpTab.push vis sp kn !the_modtypetab;
the_modtyperevtab := MPmap.add kn sp !the_modtyperevtab
(* This is for tactic definition names *)
let push_tactic vis sp kn =
the_tactictab := SpTab.push vis sp kn !the_tactictab;
the_tacticrevtab := KNmap.add kn sp !the_tacticrevtab
This is for dischargeable non - cci objects ( removed at the end of the
section -- i.e. Hints , Grammar ... )
section -- i.e. Hints, Grammar ...) *) (* --> Unused *)
let push_object visibility sp =
the_objtab := SpTab.push visibility sp () !the_objtab
(* This is to remember absolute Section/Module names and to avoid redundancy *)
let push_dir vis dir dir_ref =
the_dirtab := DirTab.push vis dir dir_ref !the_dirtab;
match dir_ref with
DirModule (_,(mp,_)) -> the_modrevtab := MPmap.add mp dir !the_modrevtab
| _ -> ()
(* Locate functions *******************************************************)
(* This should be used when syntactic definitions are allowed *)
let extended_locate qid = SpTab.locate qid !the_ccitab
(* This should be used when no syntactic definitions is expected *)
let locate qid = match extended_locate qid with
| TrueGlobal ref -> ref
| SyntacticDef _ -> raise Not_found
let full_name_cci qid = SpTab.user_name qid !the_ccitab
let locate_syntactic_definition qid = match extended_locate qid with
| TrueGlobal _ -> raise Not_found
| SyntacticDef kn -> kn
let locate_modtype qid = SpTab.locate qid !the_modtypetab
let full_name_modtype qid = SpTab.user_name qid !the_modtypetab
let locate_obj qid = SpTab.user_name qid !the_objtab
type ltac_constant = kernel_name
let locate_tactic qid = SpTab.locate qid !the_tactictab
let full_name_tactic qid = SpTab.user_name qid !the_tactictab
let locate_dir qid = DirTab.locate qid !the_dirtab
let locate_module qid =
match locate_dir qid with
| DirModule (_,(mp,_)) -> mp
| _ -> raise Not_found
let full_name_module qid =
match locate_dir qid with
| DirModule (dir,_) -> dir
| _ -> raise Not_found
let locate_section qid =
match locate_dir qid with
| DirOpenSection (dir, _)
| DirClosedSection dir -> dir
| _ -> raise Not_found
let locate_all qid =
List.fold_right (fun a l -> match a with TrueGlobal a -> a::l | _ -> l)
(SpTab.find_prefixes qid !the_ccitab) []
let extended_locate_all qid = SpTab.find_prefixes qid !the_ccitab
(* Derived functions *)
let locate_constant qid =
match extended_locate qid with
| TrueGlobal (ConstRef kn) -> kn
| _ -> raise Not_found
let locate_mind qid =
match extended_locate qid with
| TrueGlobal (IndRef (kn,0)) -> kn
| _ -> raise Not_found
let absolute_reference sp =
match SpTab.find sp !the_ccitab with
| TrueGlobal ref -> ref
| _ -> raise Not_found
let locate_in_absolute_module dir id =
absolute_reference (make_path dir id)
let global r =
let (loc,qid) = qualid_of_reference r in
try match extended_locate qid with
| TrueGlobal ref -> ref
| SyntacticDef _ ->
user_err_loc (loc,"global",
str "Unexpected reference to a notation: " ++
pr_qualid qid)
with Not_found ->
error_global_not_found_loc loc qid
(* Exists functions ********************************************************)
let exists_cci sp = SpTab.exists sp !the_ccitab
let exists_dir dir = DirTab.exists dir !the_dirtab
let exists_section = exists_dir
let exists_module = exists_dir
let exists_modtype sp = SpTab.exists sp !the_modtypetab
let exists_tactic sp = SpTab.exists sp !the_tactictab
(* Reverse locate functions ***********************************************)
let sp_of_global ref =
match ref with
| VarRef id -> make_path empty_dirpath id
| _ -> Globrevtab.find (TrueGlobal ref) !the_globrevtab
let id_of_global ref =
let (_,id) = repr_path (sp_of_global ref) in
id
let sp_of_syntactic_definition kn =
Globrevtab.find (SyntacticDef kn) !the_globrevtab
let dir_of_mp mp =
MPmap.find mp !the_modrevtab
Shortest qualid functions * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let shortest_qualid_of_global ctx ref =
match ref with
| VarRef id -> make_qualid empty_dirpath id
| _ ->
let sp = Globrevtab.find (TrueGlobal ref) !the_globrevtab in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_syndef ctx kn =
let sp = sp_of_syntactic_definition kn in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_module mp =
let dir = MPmap.find mp !the_modrevtab in
DirTab.shortest_qualid Idset.empty dir !the_dirtab
let shortest_qualid_of_modtype kn =
let sp = MPmap.find kn !the_modtyperevtab in
SpTab.shortest_qualid Idset.empty sp !the_modtypetab
let shortest_qualid_of_tactic kn =
let sp = KNmap.find kn !the_tacticrevtab in
SpTab.shortest_qualid Idset.empty sp !the_tactictab
let pr_global_env env ref =
Il est important let - in , car les streams s'évaluent
paresseusement : l'évaluation pour capturer
l'éventuelle levée d'une exception ( le cas échoit dans le debugger )
paresseusement : il faut forcer l'évaluation pour capturer
l'éventuelle levée d'une exception (le cas échoit dans le debugger) *)
let s = string_of_qualid (shortest_qualid_of_global env ref) in
(str s)
let inductive_of_reference r =
match global r with
| IndRef ind -> ind
| ref ->
user_err_loc (loc_of_reference r,"global_inductive",
pr_reference r ++ spc () ++ str "is not an inductive type")
(********************************************************************)
(********************************************************************)
(* Registration of tables as a global table and rollback *)
type frozen = ccitab * dirtab * objtab * kntab * kntab
* globrevtab * mprevtab * knrevtab * knrevtab
let init () =
the_ccitab := SpTab.empty;
the_dirtab := DirTab.empty;
the_objtab := SpTab.empty;
the_modtypetab := SpTab.empty;
the_tactictab := SpTab.empty;
the_globrevtab := Globrevtab.empty;
the_modrevtab := MPmap.empty;
the_modtyperevtab := MPmap.empty;
the_tacticrevtab := KNmap.empty
let freeze () =
!the_ccitab,
!the_dirtab,
!the_objtab,
!the_modtypetab,
!the_tactictab,
!the_globrevtab,
!the_modrevtab,
!the_modtyperevtab,
!the_tacticrevtab
let unfreeze (ccit,dirt,objt,mtyt,tact,globr,modr,mtyr,tacr) =
the_ccitab := ccit;
the_dirtab := dirt;
the_objtab := objt;
the_modtypetab := mtyt;
the_tactictab := tact;
the_globrevtab := globr;
the_modrevtab := modr;
the_modtyperevtab := mtyr;
the_tacticrevtab := tacr
let _ =
Summary.declare_summary "names"
{ Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init;
Summary.survive_module = false;
Summary.survive_section = false }
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/library/nametab.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
The visibility can be registered either
- for all suffixes not shorter then a given int - when the object
is loaded inside a module
or
- for a precise suffix, when the module containing (the module
containing ...) the object is open (imported)
Data structure for nametabs ******************************************
This module type will be instantiated by [section_path] of [dir_path]
The [repr] function is assumed to return the reversed list of idents.
Dictionaries of short names
This is an absolute name, we must keep it
otherwise it may become unaccessible forever
This is an absolute name, we must keep it otherwise it may
become unaccessible forever
But ours is also absolute! This is an error!
This is an absolute name, we must keep it
otherwise it may become unaccessible forever
not right level
Global name tables ************************************************
If we have a (closed) module M having a submodule N, than N does not
have the entry in [the_dirtab].
Reversed name tables **************************************************
This table translates extended_global_references back to section paths
Push functions ********************************************************
This is for tactic definition names
--> Unused
This is to remember absolute Section/Module names and to avoid redundancy
Locate functions ******************************************************
This should be used when syntactic definitions are allowed
This should be used when no syntactic definitions is expected
Derived functions
Exists functions *******************************************************
Reverse locate functions **********************************************
******************************************************************
******************************************************************
Registration of tables as a global table and rollback | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : nametab.ml 10664 2008 - 03 - 14 11:27:37Z soubiran $
open Util
open Pp
open Names
open Libnames
open Nameops
open Declarations
exception GlobalizationError of qualid
exception GlobalizationConstantError of qualid
let error_global_not_found_loc loc q =
Stdpp.raise_with_loc loc (GlobalizationError q)
let error_global_constant_not_found_loc loc q =
Stdpp.raise_with_loc loc (GlobalizationConstantError q)
let error_global_not_found q = raise (GlobalizationError q)
type visibility = Until of int | Exactly of int
module type UserName = sig
type t
val to_string : t -> string
val repr : t -> identifier * module_ident list
end
A [ ' a t ] is a map from [ user_name ] to [ ' a ] , with possible lookup by
partially qualified names of type [ qualid ] . The mapping of
partially qualified names to [ ' a ] is determined by the [ visibility ]
parameter of [ push ] .
The [ shortest_qualid ] function given a user_name Coq . A.B.x , tries
to find the shortest among x , B.x , A.B.x and Coq . A.B.x that denotes
the same object .
partially qualified names of type [qualid]. The mapping of
partially qualified names to ['a] is determined by the [visibility]
parameter of [push].
The [shortest_qualid] function given a user_name Coq.A.B.x, tries
to find the shortest among x, B.x, A.B.x and Coq.A.B.x that denotes
the same object.
*)
module type NAMETREE = sig
type 'a t
type user_name
val empty : 'a t
val push : visibility -> user_name -> 'a -> 'a t -> 'a t
val locate : qualid -> 'a t -> 'a
val find : user_name -> 'a t -> 'a
val exists : user_name -> 'a t -> bool
val user_name : qualid -> 'a t -> user_name
val shortest_qualid : Idset.t -> user_name -> 'a t -> qualid
val find_prefixes : qualid -> 'a t -> 'a list
end
module Make(U:UserName) : NAMETREE with type user_name = U.t
=
struct
type user_name = U.t
type 'a path_status =
Nothing
| Relative of user_name * 'a
| Absolute of user_name * 'a
type 'a nametree = ('a path_status * 'a nametree ModIdmap.t)
type 'a t = 'a nametree Idmap.t
let empty = Idmap.empty
[ push_until ] is used to register [ Until vis ] visibility and
[ push_exactly ] to [ Exactly vis ] and [ push_tree ] chooses the right one
[push_exactly] to [Exactly vis] and [push_tree] chooses the right one*)
let rec push_until uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
let this =
if level <= 0 then
match current with
| Absolute (n,_) ->
Flags.if_verbose
warning ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!");
current
| Nothing
| Relative _ -> Relative (uname,o)
else current
in
let ptab' = push_until uname o (level-1) mc path in
(this, ModIdmap.add modid ptab' dirmap)
| [] ->
match current with
| Absolute (uname',o') ->
if o'=o then begin
assert (uname=uname');
current, dirmap
we are putting the same thing for the second time :)
end
else
error ("Cannot mask the absolute name \""
^ U.to_string uname' ^ "\"!")
| Nothing
| Relative _ -> Absolute (uname,o), dirmap
let rec push_exactly uname o level (current,dirmap) = function
| modid :: path ->
let mc =
try ModIdmap.find modid dirmap
with Not_found -> (Nothing, ModIdmap.empty)
in
if level = 0 then
let this =
match current with
| Absolute (n,_) ->
Flags.if_verbose
warning ("Trying to mask the absolute name \""
^ U.to_string n ^ "\"!");
current
| Nothing
| Relative _ -> Relative (uname,o)
in
(this, dirmap)
let ptab' = push_exactly uname o (level-1) mc path in
(current, ModIdmap.add modid ptab' dirmap)
| [] ->
anomaly "Prefix longer than path! Impossible!"
let push visibility uname o tab =
let id,dir = U.repr uname in
let ptab =
try Idmap.find id tab
with Not_found -> (Nothing, ModIdmap.empty)
in
let ptab' = match visibility with
| Until i -> push_until uname o (i-1) ptab dir
| Exactly i -> push_exactly uname o (i-1) ptab dir
in
Idmap.add id ptab' tab
let rec search (current,modidtab) = function
| modid :: path -> search (ModIdmap.find modid modidtab) path
| [] -> current
let find_node qid tab =
let (dir,id) = repr_qualid qid in
search (Idmap.find id tab) (repr_dirpath dir)
let locate qid tab =
let o = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> o
| Nothing -> raise Not_found
in
o
let user_name qid tab =
let uname = match find_node qid tab with
| Absolute (uname,o) | Relative (uname,o) -> uname
| Nothing -> raise Not_found
in
uname
let find uname tab =
let id,l = U.repr uname in
match search (Idmap.find id tab) l with
Absolute (_,o) -> o
| _ -> raise Not_found
let exists uname tab =
try
let _ = find uname tab in
true
with
Not_found -> false
let shortest_qualid ctx uname tab =
let id,dir = U.repr uname in
let hidden = Idset.mem id ctx in
let rec find_uname pos dir (path,tab) = match path with
| Absolute (u,_) | Relative (u,_)
when u=uname && not(pos=[] && hidden) -> List.rev pos
| _ ->
match dir with
[] -> raise Not_found
| id::dir -> find_uname (id::pos) dir (ModIdmap.find id tab)
in
let ptab = Idmap.find id tab in
let found_dir = find_uname [] dir ptab in
make_qualid (make_dirpath found_dir) id
let push_node node l =
match node with
| Absolute (_,o) | Relative (_,o) when not (List.mem o l) -> o::l
| _ -> l
let rec flatten_idmap tab l =
ModIdmap.fold (fun _ (current,idtab) l ->
flatten_idmap idtab (push_node current l)) tab l
let rec search_prefixes (current,modidtab) = function
| modid :: path -> search_prefixes (ModIdmap.find modid modidtab) path
| [] -> List.rev (flatten_idmap modidtab (push_node current []))
let find_prefixes qid tab =
try
let (dir,id) = repr_qualid qid in
search_prefixes (Idmap.find id tab) (repr_dirpath dir)
with Not_found -> []
end
module SpTab = Make (struct
type t = section_path
let to_string = string_of_path
let repr sp =
let dir,id = repr_path sp in
id, (repr_dirpath dir)
end)
type ccitab = extended_global_reference SpTab.t
let the_ccitab = ref (SpTab.empty : ccitab)
type kntab = kernel_name SpTab.t
let the_tactictab = ref (SpTab.empty : kntab)
type mptab = module_path SpTab.t
let the_modtypetab = ref (SpTab.empty : mptab)
type objtab = unit SpTab.t
let the_objtab = ref (SpTab.empty : objtab)
module DirTab = Make(struct
type t = dir_path
let to_string = string_of_dirpath
let repr dir = match repr_dirpath dir with
| [] -> anomaly "Empty dirpath"
| id::l -> (id,l)
end)
type dirtab = global_dir_reference DirTab.t
let the_dirtab = ref (DirTab.empty : dirtab)
module Globrevtab = Map.Make(struct
type t=extended_global_reference
let compare = compare
end)
type globrevtab = section_path Globrevtab.t
let the_globrevtab = ref (Globrevtab.empty : globrevtab)
type mprevtab = dir_path MPmap.t
let the_modrevtab = ref (MPmap.empty : mprevtab)
type mptrevtab = section_path MPmap.t
let the_modtyperevtab = ref (MPmap.empty : mptrevtab)
type knrevtab = section_path KNmap.t
let the_tacticrevtab = ref (KNmap.empty : knrevtab)
This is for permanent constructions ( never discharged -- but with
possibly limited visibility , i.e. Theorem , Lemma , Definition , Axiom ,
Parameter but also Remark and Fact )
possibly limited visibility, i.e. Theorem, Lemma, Definition, Axiom,
Parameter but also Remark and Fact) *)
let push_xref visibility sp xref =
the_ccitab := SpTab.push visibility sp xref !the_ccitab;
match visibility with
| Until _ ->
if Globrevtab.mem xref !the_globrevtab then
()
else
the_globrevtab := Globrevtab.add xref sp !the_globrevtab
| _ -> ()
let push_cci visibility sp ref =
push_xref visibility sp (TrueGlobal ref)
This is for Syntactic Definitions
let push_syntactic_definition visibility sp kn =
push_xref visibility sp (SyntacticDef kn)
let push = push_cci
let push_modtype vis sp kn =
the_modtypetab := SpTab.push vis sp kn !the_modtypetab;
the_modtyperevtab := MPmap.add kn sp !the_modtyperevtab
let push_tactic vis sp kn =
the_tactictab := SpTab.push vis sp kn !the_tactictab;
the_tacticrevtab := KNmap.add kn sp !the_tacticrevtab
This is for dischargeable non - cci objects ( removed at the end of the
section -- i.e. Hints , Grammar ... )
let push_object visibility sp =
the_objtab := SpTab.push visibility sp () !the_objtab
let push_dir vis dir dir_ref =
the_dirtab := DirTab.push vis dir dir_ref !the_dirtab;
match dir_ref with
DirModule (_,(mp,_)) -> the_modrevtab := MPmap.add mp dir !the_modrevtab
| _ -> ()
let extended_locate qid = SpTab.locate qid !the_ccitab
let locate qid = match extended_locate qid with
| TrueGlobal ref -> ref
| SyntacticDef _ -> raise Not_found
let full_name_cci qid = SpTab.user_name qid !the_ccitab
let locate_syntactic_definition qid = match extended_locate qid with
| TrueGlobal _ -> raise Not_found
| SyntacticDef kn -> kn
let locate_modtype qid = SpTab.locate qid !the_modtypetab
let full_name_modtype qid = SpTab.user_name qid !the_modtypetab
let locate_obj qid = SpTab.user_name qid !the_objtab
type ltac_constant = kernel_name
let locate_tactic qid = SpTab.locate qid !the_tactictab
let full_name_tactic qid = SpTab.user_name qid !the_tactictab
let locate_dir qid = DirTab.locate qid !the_dirtab
let locate_module qid =
match locate_dir qid with
| DirModule (_,(mp,_)) -> mp
| _ -> raise Not_found
let full_name_module qid =
match locate_dir qid with
| DirModule (dir,_) -> dir
| _ -> raise Not_found
let locate_section qid =
match locate_dir qid with
| DirOpenSection (dir, _)
| DirClosedSection dir -> dir
| _ -> raise Not_found
let locate_all qid =
List.fold_right (fun a l -> match a with TrueGlobal a -> a::l | _ -> l)
(SpTab.find_prefixes qid !the_ccitab) []
let extended_locate_all qid = SpTab.find_prefixes qid !the_ccitab
let locate_constant qid =
match extended_locate qid with
| TrueGlobal (ConstRef kn) -> kn
| _ -> raise Not_found
let locate_mind qid =
match extended_locate qid with
| TrueGlobal (IndRef (kn,0)) -> kn
| _ -> raise Not_found
let absolute_reference sp =
match SpTab.find sp !the_ccitab with
| TrueGlobal ref -> ref
| _ -> raise Not_found
let locate_in_absolute_module dir id =
absolute_reference (make_path dir id)
let global r =
let (loc,qid) = qualid_of_reference r in
try match extended_locate qid with
| TrueGlobal ref -> ref
| SyntacticDef _ ->
user_err_loc (loc,"global",
str "Unexpected reference to a notation: " ++
pr_qualid qid)
with Not_found ->
error_global_not_found_loc loc qid
let exists_cci sp = SpTab.exists sp !the_ccitab
let exists_dir dir = DirTab.exists dir !the_dirtab
let exists_section = exists_dir
let exists_module = exists_dir
let exists_modtype sp = SpTab.exists sp !the_modtypetab
let exists_tactic sp = SpTab.exists sp !the_tactictab
let sp_of_global ref =
match ref with
| VarRef id -> make_path empty_dirpath id
| _ -> Globrevtab.find (TrueGlobal ref) !the_globrevtab
let id_of_global ref =
let (_,id) = repr_path (sp_of_global ref) in
id
let sp_of_syntactic_definition kn =
Globrevtab.find (SyntacticDef kn) !the_globrevtab
let dir_of_mp mp =
MPmap.find mp !the_modrevtab
Shortest qualid functions * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let shortest_qualid_of_global ctx ref =
match ref with
| VarRef id -> make_qualid empty_dirpath id
| _ ->
let sp = Globrevtab.find (TrueGlobal ref) !the_globrevtab in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_syndef ctx kn =
let sp = sp_of_syntactic_definition kn in
SpTab.shortest_qualid ctx sp !the_ccitab
let shortest_qualid_of_module mp =
let dir = MPmap.find mp !the_modrevtab in
DirTab.shortest_qualid Idset.empty dir !the_dirtab
let shortest_qualid_of_modtype kn =
let sp = MPmap.find kn !the_modtyperevtab in
SpTab.shortest_qualid Idset.empty sp !the_modtypetab
let shortest_qualid_of_tactic kn =
let sp = KNmap.find kn !the_tacticrevtab in
SpTab.shortest_qualid Idset.empty sp !the_tactictab
let pr_global_env env ref =
Il est important let - in , car les streams s'évaluent
paresseusement : l'évaluation pour capturer
l'éventuelle levée d'une exception ( le cas échoit dans le debugger )
paresseusement : il faut forcer l'évaluation pour capturer
l'éventuelle levée d'une exception (le cas échoit dans le debugger) *)
let s = string_of_qualid (shortest_qualid_of_global env ref) in
(str s)
let inductive_of_reference r =
match global r with
| IndRef ind -> ind
| ref ->
user_err_loc (loc_of_reference r,"global_inductive",
pr_reference r ++ spc () ++ str "is not an inductive type")
type frozen = ccitab * dirtab * objtab * kntab * kntab
* globrevtab * mprevtab * knrevtab * knrevtab
let init () =
the_ccitab := SpTab.empty;
the_dirtab := DirTab.empty;
the_objtab := SpTab.empty;
the_modtypetab := SpTab.empty;
the_tactictab := SpTab.empty;
the_globrevtab := Globrevtab.empty;
the_modrevtab := MPmap.empty;
the_modtyperevtab := MPmap.empty;
the_tacticrevtab := KNmap.empty
let freeze () =
!the_ccitab,
!the_dirtab,
!the_objtab,
!the_modtypetab,
!the_tactictab,
!the_globrevtab,
!the_modrevtab,
!the_modtyperevtab,
!the_tacticrevtab
let unfreeze (ccit,dirt,objt,mtyt,tact,globr,modr,mtyr,tacr) =
the_ccitab := ccit;
the_dirtab := dirt;
the_objtab := objt;
the_modtypetab := mtyt;
the_tactictab := tact;
the_globrevtab := globr;
the_modrevtab := modr;
the_modtyperevtab := mtyr;
the_tacticrevtab := tacr
let _ =
Summary.declare_summary "names"
{ Summary.freeze_function = freeze;
Summary.unfreeze_function = unfreeze;
Summary.init_function = init;
Summary.survive_module = false;
Summary.survive_section = false }
|
a8dc8f98a0025bf095b06870edd9f7ace40a2988e390d247b58afb4c3029558d | jordwalke/rehp | bdd.ml | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ I d : bdd.ml 7017 2005 - 08 - 12 09:22:04Z xleroy $
Translated to by
Original code written in SML by ...
type bdd =
| One
| Zero
| Node of bdd * int * int * bdd
let rec eval bdd vars =
match bdd with
| Zero -> false
| One -> true
| Node (l, v, _, h) -> if vars.(v) then eval h vars else eval l vars
let getId bdd = match bdd with Node (_, _, id, _) -> id | Zero -> 0 | One -> 1
let initSize_1 = (8 * 1024) - 1
let nodeC = ref 1
let sz_1 = ref initSize_1
let htab = ref (Array.make (!sz_1 + 1) [])
let n_items = ref 0
let hashVal x y v = (x lsl 1) + y + (v lsl 2)
let resize newSize =
let arr = !htab in
let newSz_1 = newSize - 1 in
let newArr = Array.make newSize [] in
let rec copyBucket bucket =
match bucket with
| [] -> ()
| n :: ns -> (
match n with
| Node (l, v, _, h) ->
let ind = hashVal (getId l) (getId h) v land newSz_1 in
newArr.(ind) <- n :: newArr.(ind);
copyBucket ns
| _ -> assert false )
in
for n = 0 to !sz_1 do
copyBucket arr.(n)
done;
htab := newArr;
sz_1 := newSz_1
let insert idl idh v ind bucket newNode =
if !n_items <= !sz_1
then (
!htab.(ind) <- newNode :: bucket;
incr n_items )
else (
resize (!sz_1 + !sz_1 + 2);
let ind = hashVal idl idh v land !sz_1 in
!htab.(ind) <- newNode :: !htab.(ind) )
let resetUnique () =
sz_1 := initSize_1;
htab := Array.make (!sz_1 + 1) [];
n_items := 0;
nodeC := 1
let mkNode low v high =
let idl = getId low in
let idh = getId high in
if idl = idh
then low
else
let ind = hashVal idl idh v land !sz_1 in
let bucket = !htab.(ind) in
let rec lookup b =
match b with
| [] ->
let n = Node (low, v, (incr nodeC; !nodeC), high) in
insert (getId low) (getId high) v ind bucket n;
n
| n :: ns -> (
match n with
| Node (l, v', _id, h) ->
if v = v' && idl = getId l && idh = getId h then n else lookup ns
| _ -> assert false )
in
lookup bucket
type ordering =
| LESS
| EQUAL
| GREATER
let cmpVar (x : int) (y : int) = if x < y then LESS else if x > y then GREATER else EQUAL
let zero = Zero
let one = One
let mkVar x = mkNode zero x one
let cacheSize = 1999
let andslot1 = Array.make cacheSize 0
let andslot2 = Array.make cacheSize 0
let andslot3 = Array.make cacheSize zero
let xorslot1 = Array.make cacheSize 0
let xorslot2 = Array.make cacheSize 0
let xorslot3 = Array.make cacheSize zero
let notslot1 = Array.make cacheSize 0
let notslot2 = Array.make cacheSize one
let hash x y = ((x lsl 1) + y) mod cacheSize
let rec not n =
match n with
| Zero -> One
| One -> Zero
| Node (l, v, id, r) ->
let h = id mod cacheSize in
if id = notslot1.(h)
then notslot2.(h)
else
let f = mkNode (not l) v (not r) in
notslot1.(h) <- id; notslot2.(h) <- f; f
let rec and2 n1 n2 =
match n1 with
| Node (l1, v1, i1, r1) -> (
match n2 with
| Node (l2, v2, i2, r2) ->
let h = hash i1 i2 in
if i1 = andslot1.(h) && i2 = andslot2.(h)
then andslot3.(h)
else
let f =
match cmpVar v1 v2 with
| EQUAL -> mkNode (and2 l1 l2) v1 (and2 r1 r2)
| LESS -> mkNode (and2 l1 n2) v1 (and2 r1 n2)
| GREATER -> mkNode (and2 n1 l2) v2 (and2 n1 r2)
in
andslot1.(h) <- i1; andslot2.(h) <- i2; andslot3.(h) <- f; f
| Zero -> Zero
| One -> n1 )
| Zero -> Zero
| One -> n2
let rec xor n1 n2 =
match n1 with
| Node (l1, v1, i1, r1) -> (
match n2 with
| Node (l2, v2, i2, r2) ->
let h = hash i1 i2 in
if i1 = andslot1.(h) && i2 = andslot2.(h)
then andslot3.(h)
else
let f =
match cmpVar v1 v2 with
| EQUAL -> mkNode (xor l1 l2) v1 (xor r1 r2)
| LESS -> mkNode (xor l1 n2) v1 (xor r1 n2)
| GREATER -> mkNode (xor n1 l2) v2 (xor n1 r2)
in
andslot1.(h) <- i1; andslot2.(h) <- i2; andslot3.(h) <- f; f
| Zero -> n1
| One -> not n1 )
| Zero -> n2
| One -> not n2
let hwb n =
let rec h i j =
if i = j
then mkVar i
else xor (and2 (not (mkVar j)) (h i (j - 1))) (and2 (mkVar j) (g i (j - 1)))
and g i j =
if i = j
then mkVar i
else xor (and2 (not (mkVar i)) (h (i + 1) j)) (and2 (mkVar i) (g (i + 1) j))
in
h 0 (n - 1)
(* Testing *)
let seed = ref 0
let random () =
seed := (!seed * 25173) + 17431;
!seed land 1 > 0
let random_vars n =
let vars = Array.make n false in
for i = 0 to n - 1 do
vars.(i) <- random ()
done;
vars
let test_hwb bdd vars =
We should have
eval bdd vars = vars.(n-1 ) if n > 0
eval bdd vars = false if n = 0
where n is the number of " true " elements in vars .
eval bdd vars = vars.(n-1) if n > 0
eval bdd vars = false if n = 0
where n is the number of "true" elements in vars. *)
let ntrue = ref 0 in
for i = 0 to Array.length vars - 1 do
if vars.(i) then incr ntrue
done;
eval bdd vars = if !ntrue > 0 then vars.(!ntrue - 1) else false
let main () =
let n = if Array.length Sys.argv >= 2 then int_of_string Sys.argv.(1) else 22 in
let ntests = if Array.length Sys.argv >= 3 then int_of_string Sys.argv.(2) else 100 in
let bdd = hwb n in
let succeeded = ref true in
for _ = 1 to ntests do
succeeded := !succeeded && test_hwb bdd (random_vars n)
done;
assert !succeeded
if ! succeeded
then print_string " OK\n "
else print_string " " ;
Format.eprintf " % d@. " ! nodeC ;
exit 0
if !succeeded
then print_string "OK\n"
else print_string "FAILED\n";
Format.eprintf "%d@." !nodeC;
exit 0
*)
let _ = main ()
| null | https://raw.githubusercontent.com/jordwalke/rehp/f122b94f0a3f06410ddba59e3c9c603b33aadabf/benchmarks/sources/ml/bdd.ml | ocaml | *********************************************************************
Objective Caml
*********************************************************************
Testing | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ I d : bdd.ml 7017 2005 - 08 - 12 09:22:04Z xleroy $
Translated to by
Original code written in SML by ...
type bdd =
| One
| Zero
| Node of bdd * int * int * bdd
let rec eval bdd vars =
match bdd with
| Zero -> false
| One -> true
| Node (l, v, _, h) -> if vars.(v) then eval h vars else eval l vars
let getId bdd = match bdd with Node (_, _, id, _) -> id | Zero -> 0 | One -> 1
let initSize_1 = (8 * 1024) - 1
let nodeC = ref 1
let sz_1 = ref initSize_1
let htab = ref (Array.make (!sz_1 + 1) [])
let n_items = ref 0
let hashVal x y v = (x lsl 1) + y + (v lsl 2)
let resize newSize =
let arr = !htab in
let newSz_1 = newSize - 1 in
let newArr = Array.make newSize [] in
let rec copyBucket bucket =
match bucket with
| [] -> ()
| n :: ns -> (
match n with
| Node (l, v, _, h) ->
let ind = hashVal (getId l) (getId h) v land newSz_1 in
newArr.(ind) <- n :: newArr.(ind);
copyBucket ns
| _ -> assert false )
in
for n = 0 to !sz_1 do
copyBucket arr.(n)
done;
htab := newArr;
sz_1 := newSz_1
let insert idl idh v ind bucket newNode =
if !n_items <= !sz_1
then (
!htab.(ind) <- newNode :: bucket;
incr n_items )
else (
resize (!sz_1 + !sz_1 + 2);
let ind = hashVal idl idh v land !sz_1 in
!htab.(ind) <- newNode :: !htab.(ind) )
let resetUnique () =
sz_1 := initSize_1;
htab := Array.make (!sz_1 + 1) [];
n_items := 0;
nodeC := 1
let mkNode low v high =
let idl = getId low in
let idh = getId high in
if idl = idh
then low
else
let ind = hashVal idl idh v land !sz_1 in
let bucket = !htab.(ind) in
let rec lookup b =
match b with
| [] ->
let n = Node (low, v, (incr nodeC; !nodeC), high) in
insert (getId low) (getId high) v ind bucket n;
n
| n :: ns -> (
match n with
| Node (l, v', _id, h) ->
if v = v' && idl = getId l && idh = getId h then n else lookup ns
| _ -> assert false )
in
lookup bucket
type ordering =
| LESS
| EQUAL
| GREATER
let cmpVar (x : int) (y : int) = if x < y then LESS else if x > y then GREATER else EQUAL
let zero = Zero
let one = One
let mkVar x = mkNode zero x one
let cacheSize = 1999
let andslot1 = Array.make cacheSize 0
let andslot2 = Array.make cacheSize 0
let andslot3 = Array.make cacheSize zero
let xorslot1 = Array.make cacheSize 0
let xorslot2 = Array.make cacheSize 0
let xorslot3 = Array.make cacheSize zero
let notslot1 = Array.make cacheSize 0
let notslot2 = Array.make cacheSize one
let hash x y = ((x lsl 1) + y) mod cacheSize
let rec not n =
match n with
| Zero -> One
| One -> Zero
| Node (l, v, id, r) ->
let h = id mod cacheSize in
if id = notslot1.(h)
then notslot2.(h)
else
let f = mkNode (not l) v (not r) in
notslot1.(h) <- id; notslot2.(h) <- f; f
let rec and2 n1 n2 =
match n1 with
| Node (l1, v1, i1, r1) -> (
match n2 with
| Node (l2, v2, i2, r2) ->
let h = hash i1 i2 in
if i1 = andslot1.(h) && i2 = andslot2.(h)
then andslot3.(h)
else
let f =
match cmpVar v1 v2 with
| EQUAL -> mkNode (and2 l1 l2) v1 (and2 r1 r2)
| LESS -> mkNode (and2 l1 n2) v1 (and2 r1 n2)
| GREATER -> mkNode (and2 n1 l2) v2 (and2 n1 r2)
in
andslot1.(h) <- i1; andslot2.(h) <- i2; andslot3.(h) <- f; f
| Zero -> Zero
| One -> n1 )
| Zero -> Zero
| One -> n2
let rec xor n1 n2 =
match n1 with
| Node (l1, v1, i1, r1) -> (
match n2 with
| Node (l2, v2, i2, r2) ->
let h = hash i1 i2 in
if i1 = andslot1.(h) && i2 = andslot2.(h)
then andslot3.(h)
else
let f =
match cmpVar v1 v2 with
| EQUAL -> mkNode (xor l1 l2) v1 (xor r1 r2)
| LESS -> mkNode (xor l1 n2) v1 (xor r1 n2)
| GREATER -> mkNode (xor n1 l2) v2 (xor n1 r2)
in
andslot1.(h) <- i1; andslot2.(h) <- i2; andslot3.(h) <- f; f
| Zero -> n1
| One -> not n1 )
| Zero -> n2
| One -> not n2
let hwb n =
let rec h i j =
if i = j
then mkVar i
else xor (and2 (not (mkVar j)) (h i (j - 1))) (and2 (mkVar j) (g i (j - 1)))
and g i j =
if i = j
then mkVar i
else xor (and2 (not (mkVar i)) (h (i + 1) j)) (and2 (mkVar i) (g (i + 1) j))
in
h 0 (n - 1)
let seed = ref 0
let random () =
seed := (!seed * 25173) + 17431;
!seed land 1 > 0
let random_vars n =
let vars = Array.make n false in
for i = 0 to n - 1 do
vars.(i) <- random ()
done;
vars
let test_hwb bdd vars =
We should have
eval bdd vars = vars.(n-1 ) if n > 0
eval bdd vars = false if n = 0
where n is the number of " true " elements in vars .
eval bdd vars = vars.(n-1) if n > 0
eval bdd vars = false if n = 0
where n is the number of "true" elements in vars. *)
let ntrue = ref 0 in
for i = 0 to Array.length vars - 1 do
if vars.(i) then incr ntrue
done;
eval bdd vars = if !ntrue > 0 then vars.(!ntrue - 1) else false
let main () =
let n = if Array.length Sys.argv >= 2 then int_of_string Sys.argv.(1) else 22 in
let ntests = if Array.length Sys.argv >= 3 then int_of_string Sys.argv.(2) else 100 in
let bdd = hwb n in
let succeeded = ref true in
for _ = 1 to ntests do
succeeded := !succeeded && test_hwb bdd (random_vars n)
done;
assert !succeeded
if ! succeeded
then print_string " OK\n "
else print_string " " ;
Format.eprintf " % d@. " ! nodeC ;
exit 0
if !succeeded
then print_string "OK\n"
else print_string "FAILED\n";
Format.eprintf "%d@." !nodeC;
exit 0
*)
let _ = main ()
|
7762d02a194be377efea84deabd38d70dc01e5810a6aa1263f9a7a74d2c6217f | gfngfn/SATySFi | optionState.mli |
open MyUtil
type input_kind =
| SATySFi
| Markdown of string
val set_input_kind : input_kind -> unit
val get_input_kind : unit -> input_kind
val set_input_file : abs_path -> unit
val input_file : unit -> abs_path option
val job_directory : unit -> string
val set_output_file : abs_path -> unit
val output_file : unit -> abs_path option
val set_type_check_only : unit -> unit
val type_check_only : unit -> bool
val set_bytecomp_mode : unit -> unit
val bytecomp_mode : unit -> bool
val set_show_full_path : unit -> unit
val show_full_path : unit -> bool
val set_show_fonts : unit -> unit
val show_fonts : unit -> bool
val set_debug_show_bbox : unit -> unit
val debug_show_bbox : unit -> bool
val set_debug_show_space : unit -> unit
val debug_show_space : unit -> bool
val set_debug_show_block_bbox : unit -> unit
val debug_show_block_bbox : unit -> bool
val set_debug_show_block_space : unit -> unit
val debug_show_block_space : unit -> bool
val set_debug_show_overfull : unit -> unit
val debug_show_overfull : unit -> bool
val set_text_mode : string list -> unit
val get_mode : unit -> (string list) option
val is_text_mode : unit -> bool
val set_extra_config_paths : string list -> unit
val get_extra_config_paths : unit -> string list option
val set_no_default_config_paths : unit -> unit
val get_no_default_config_paths : unit -> bool
val set_page_number_limit : int -> unit
val get_page_number_limit : unit -> int
| null | https://raw.githubusercontent.com/gfngfn/SATySFi/9dbd61df0ab05943b3394830c371e927df45251a/src/backend/optionState.mli | ocaml |
open MyUtil
type input_kind =
| SATySFi
| Markdown of string
val set_input_kind : input_kind -> unit
val get_input_kind : unit -> input_kind
val set_input_file : abs_path -> unit
val input_file : unit -> abs_path option
val job_directory : unit -> string
val set_output_file : abs_path -> unit
val output_file : unit -> abs_path option
val set_type_check_only : unit -> unit
val type_check_only : unit -> bool
val set_bytecomp_mode : unit -> unit
val bytecomp_mode : unit -> bool
val set_show_full_path : unit -> unit
val show_full_path : unit -> bool
val set_show_fonts : unit -> unit
val show_fonts : unit -> bool
val set_debug_show_bbox : unit -> unit
val debug_show_bbox : unit -> bool
val set_debug_show_space : unit -> unit
val debug_show_space : unit -> bool
val set_debug_show_block_bbox : unit -> unit
val debug_show_block_bbox : unit -> bool
val set_debug_show_block_space : unit -> unit
val debug_show_block_space : unit -> bool
val set_debug_show_overfull : unit -> unit
val debug_show_overfull : unit -> bool
val set_text_mode : string list -> unit
val get_mode : unit -> (string list) option
val is_text_mode : unit -> bool
val set_extra_config_paths : string list -> unit
val get_extra_config_paths : unit -> string list option
val set_no_default_config_paths : unit -> unit
val get_no_default_config_paths : unit -> bool
val set_page_number_limit : int -> unit
val get_page_number_limit : unit -> int
|
|
f35c51d07602af766e22653a30fd8bd2d474bda49d41f5e9067b7b1751bd156d | eerohele/enne | core.cljc | (ns enne.core
"Generate random Finnish names from data supplied by the Finnish Population Register Centre.
The data is extracted from Excel spreadsheets available at avoindata.fi. If new Excel spreadsheets are made available,
update the URLs in resource/source.edn and run (enne.retriever/retrieve!)"
(:require [clojure.string :as string]
[enne.data :as data]))
(def ^:private middle-name-odds
"The odds for having 0-2 middle names."
{0.05 0
0.55 2
1.00 1})
(defn- number-of-middle-names
"Conjure a random number (between 0 and 2) of middle names to generate."
[]
(let [n (rand)]
(second (first (drop-while #(< (key %) n) middle-name-odds)))))
(defn last-name
[]
(rand-nth (:last data/names)))
(defn female-first-name
[]
(rand-nth (:female/first data/names)))
(defn female-middle-name
[]
(rand-nth (:female/middle data/names)))
(defn male-first-name
[]
(rand-nth (:male/first data/names)))
(defn male-middle-name
[]
(rand-nth (:male/middle data/names)))
(defn middle-names
[middle]
(take (number-of-middle-names) (repeatedly (partial rand-nth middle))))
(defn generate
"Generate an infinite sequence of names."
[last first middle]
(repeatedly #(list* (rand-nth last) (rand-nth first) (middle-names middle))))
(defn female-names
"Generate an infinite sequence of (or `n` if given) female names.
Returns a list where the first item is the last name and the rest are first names.
Example:
(female-names 1)
=> ((Brännare Anais Cassandra Therese))"
([]
(generate (:last data/names) (:female/first data/names) (:female/middle data/names)))
([n]
(take n (female-names))))
(defn male-names
"Generate an infinite sequence of (or `n` if given) male names.
Returns a list where the first item is the last name and the rest are first names.
Example:
(male-names 1)
=> ((Höylä Jared Roobert))"
([]
(generate (:last data/names) (:male/first data/names) (:male/middle data/names)))
([n]
(take n (male-names))))
(defn male-name
[]
(-> 1 male-names first))
(defn male-first-names
[]
(list* (rand-nth (:male/first data/names)) (middle-names (:male/middle data/names))))
(defn female-first-names
[]
(list* (rand-nth (:female/first data/names)) (middle-names (:female/middle data/names))))
(defn female-name
[]
(-> 1 female-names first))
(defn rand-name
([]
((rand-nth [male-name female-name])))
([sex]
(case sex
:sex/female (female-name)
:sex/male (male-name)
(rand-name))))
(defn as-string
"Turn a name into a string."
[[last-name & first-names]]
(str last-name ", " (string/join " " first-names)))
(defn as-strings
"Turn a list of names into strings.
Example:
(strings (male-names 3))
=> (\"Aartola, Süleyman Altti Joonathan\" \"Sarajärvi, Mio Samu Erno\" \"Yömaa, Romeo Phuc Aabel\")"
[names]
(map as-string names))
(defn municipality
[]
(rand-nth data/municipalities))
| null | https://raw.githubusercontent.com/eerohele/enne/07e9112261203631b55bfd742ae3f4ac4c3cbb71/src/enne/core.cljc | clojure | (ns enne.core
"Generate random Finnish names from data supplied by the Finnish Population Register Centre.
The data is extracted from Excel spreadsheets available at avoindata.fi. If new Excel spreadsheets are made available,
update the URLs in resource/source.edn and run (enne.retriever/retrieve!)"
(:require [clojure.string :as string]
[enne.data :as data]))
(def ^:private middle-name-odds
"The odds for having 0-2 middle names."
{0.05 0
0.55 2
1.00 1})
(defn- number-of-middle-names
"Conjure a random number (between 0 and 2) of middle names to generate."
[]
(let [n (rand)]
(second (first (drop-while #(< (key %) n) middle-name-odds)))))
(defn last-name
[]
(rand-nth (:last data/names)))
(defn female-first-name
[]
(rand-nth (:female/first data/names)))
(defn female-middle-name
[]
(rand-nth (:female/middle data/names)))
(defn male-first-name
[]
(rand-nth (:male/first data/names)))
(defn male-middle-name
[]
(rand-nth (:male/middle data/names)))
(defn middle-names
[middle]
(take (number-of-middle-names) (repeatedly (partial rand-nth middle))))
(defn generate
"Generate an infinite sequence of names."
[last first middle]
(repeatedly #(list* (rand-nth last) (rand-nth first) (middle-names middle))))
(defn female-names
"Generate an infinite sequence of (or `n` if given) female names.
Returns a list where the first item is the last name and the rest are first names.
Example:
(female-names 1)
=> ((Brännare Anais Cassandra Therese))"
([]
(generate (:last data/names) (:female/first data/names) (:female/middle data/names)))
([n]
(take n (female-names))))
(defn male-names
"Generate an infinite sequence of (or `n` if given) male names.
Returns a list where the first item is the last name and the rest are first names.
Example:
(male-names 1)
=> ((Höylä Jared Roobert))"
([]
(generate (:last data/names) (:male/first data/names) (:male/middle data/names)))
([n]
(take n (male-names))))
(defn male-name
[]
(-> 1 male-names first))
(defn male-first-names
[]
(list* (rand-nth (:male/first data/names)) (middle-names (:male/middle data/names))))
(defn female-first-names
[]
(list* (rand-nth (:female/first data/names)) (middle-names (:female/middle data/names))))
(defn female-name
[]
(-> 1 female-names first))
(defn rand-name
([]
((rand-nth [male-name female-name])))
([sex]
(case sex
:sex/female (female-name)
:sex/male (male-name)
(rand-name))))
(defn as-string
"Turn a name into a string."
[[last-name & first-names]]
(str last-name ", " (string/join " " first-names)))
(defn as-strings
"Turn a list of names into strings.
Example:
(strings (male-names 3))
=> (\"Aartola, Süleyman Altti Joonathan\" \"Sarajärvi, Mio Samu Erno\" \"Yömaa, Romeo Phuc Aabel\")"
[names]
(map as-string names))
(defn municipality
[]
(rand-nth data/municipalities))
|
|
0402c9e74112096cac57b8604f0bc7e3480f2db32a1cb913659fe36c35a542dc | ngorogiannis/cyclist | int.ml | module IntType : Utilsigs.BasicType with type t = int = struct
type t = int
let compare (i : t) (j : t) = if i < j then -1 else if i > j then 1 else 0
let equal (i : t) (j : t) = i = j
let hash (i : t) = Hashtbl.hash i
let to_string = string_of_int
let pp = Format.pp_print_int
end
include IntType
include Containers.Make (IntType)
let min (i : int) (j : int) = Stdlib.min i j
let max (i : int) (j : int) = Stdlib.max i j
let ( < ) (i : int) (j : int) = Stdlib.( < ) i j
let ( <= ) (i : int) (j : int) = Stdlib.( <= ) i j
let ( > ) (i : int) (j : int) = Stdlib.( > ) i j
let ( >= ) (i : int) (j : int) = Stdlib.( >= ) i j
let ( <> ) (i : int) (j : int) = Stdlib.( <> ) i j
let ( = ) i j = equal i j
| null | https://raw.githubusercontent.com/ngorogiannis/cyclist/c93a168d586b308ab2a2c730cd1b2375ab263167/src/lib/int.ml | ocaml | module IntType : Utilsigs.BasicType with type t = int = struct
type t = int
let compare (i : t) (j : t) = if i < j then -1 else if i > j then 1 else 0
let equal (i : t) (j : t) = i = j
let hash (i : t) = Hashtbl.hash i
let to_string = string_of_int
let pp = Format.pp_print_int
end
include IntType
include Containers.Make (IntType)
let min (i : int) (j : int) = Stdlib.min i j
let max (i : int) (j : int) = Stdlib.max i j
let ( < ) (i : int) (j : int) = Stdlib.( < ) i j
let ( <= ) (i : int) (j : int) = Stdlib.( <= ) i j
let ( > ) (i : int) (j : int) = Stdlib.( > ) i j
let ( >= ) (i : int) (j : int) = Stdlib.( >= ) i j
let ( <> ) (i : int) (j : int) = Stdlib.( <> ) i j
let ( = ) i j = equal i j
|
|
5591bc83da1e2a0d0e8b3f7c60816cfcd6c13d0dd922199742b40f0775e00dad | haskell/statistics | Runs.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE FlexibleContexts #
-- | Goodness of fit runs tests.
module Statistics.Test.Runs (
runsTest
, module Statistics.Test.Types
) where
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Numeric.SpecFunctions (choose)
import Prelude hiding (sum)
import Statistics.Sample.Internal (sum)
import Statistics.Test.Types
import Statistics.Types
-- | Goodness of fit test for binned data. It uses only sign of
-- deviations of observations from their expectations. Null
-- hypothesis is that all possible patterns of sign occurrences are
-- equiprobable.
--
-- It's asymptotically independent from chi-square test. So their
-- results could be directly combined
runsTest :: (G.Vector v Bool) => v Bool -> Test ()
# INLINE runsTest #
runsTest v
= Test { testSignificance = mkPValue $ cumulativeProb n m r
, testStatistics = fromIntegral r
, testDistribution = ()
}
where
(n,m,r) = computeRuns v
-- Compute number of positive elements, negative elements and runs
computeRuns :: (G.Vector v Bool) => v Bool -> (Int,Int,Int)
# INLINE computeRuns #
computeRuns v
= fini $ G.foldl' step (0,0,0,Nothing) v
where
step (!nP,!nM,!nR,!old) f =
( if f then nP+1 else nP
, if f then nM else nM+1
, if old == Just f then nR else nR+1
, Just f
)
fini (nP,nM,nR,_) = (nP,nM,nR)
-- Compute denormalized probability of getting R runs given N positive
-- and M positive elements
denormProbability :: Int -> Int -> Int -> Double
denormProbability n m r
| even r = 2 * ((m-1) `choose` (s-1)) * ((n-1) `choose` (s-1))
| otherwise = ((m-1) `choose` (s-1)) * ((n-1) `choose` (s-2))
+ ((m-1) `choose` (s-2)) * ((n-1) `choose` (s-1))
where
s = r `quot` 2
-- Probability of getting R<=R[observed]
cumulativeProb :: Int -> Int -> Int -> Double
cumulativeProb n m r
= min 1
$ sum (U.map (denormProbability n m) $ U.enumFromTo 1 r)
/ ((n+m) `choose` m)
| null | https://raw.githubusercontent.com/haskell/statistics/a2aa25181e50cd63db4a785c20c973a3c4dd5dac/Statistics/Test/Runs.hs | haskell | # LANGUAGE BangPatterns #
| Goodness of fit runs tests.
| Goodness of fit test for binned data. It uses only sign of
deviations of observations from their expectations. Null
hypothesis is that all possible patterns of sign occurrences are
equiprobable.
It's asymptotically independent from chi-square test. So their
results could be directly combined
Compute number of positive elements, negative elements and runs
Compute denormalized probability of getting R runs given N positive
and M positive elements
Probability of getting R<=R[observed] | # LANGUAGE FlexibleContexts #
module Statistics.Test.Runs (
runsTest
, module Statistics.Test.Types
) where
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Numeric.SpecFunctions (choose)
import Prelude hiding (sum)
import Statistics.Sample.Internal (sum)
import Statistics.Test.Types
import Statistics.Types
runsTest :: (G.Vector v Bool) => v Bool -> Test ()
# INLINE runsTest #
runsTest v
= Test { testSignificance = mkPValue $ cumulativeProb n m r
, testStatistics = fromIntegral r
, testDistribution = ()
}
where
(n,m,r) = computeRuns v
computeRuns :: (G.Vector v Bool) => v Bool -> (Int,Int,Int)
# INLINE computeRuns #
computeRuns v
= fini $ G.foldl' step (0,0,0,Nothing) v
where
step (!nP,!nM,!nR,!old) f =
( if f then nP+1 else nP
, if f then nM else nM+1
, if old == Just f then nR else nR+1
, Just f
)
fini (nP,nM,nR,_) = (nP,nM,nR)
denormProbability :: Int -> Int -> Int -> Double
denormProbability n m r
| even r = 2 * ((m-1) `choose` (s-1)) * ((n-1) `choose` (s-1))
| otherwise = ((m-1) `choose` (s-1)) * ((n-1) `choose` (s-2))
+ ((m-1) `choose` (s-2)) * ((n-1) `choose` (s-1))
where
s = r `quot` 2
cumulativeProb :: Int -> Int -> Int -> Double
cumulativeProb n m r
= min 1
$ sum (U.map (denormProbability n m) $ U.enumFromTo 1 r)
/ ((n+m) `choose` m)
|
df4136360fd50e18f2bf941531ca10bb33931f0b114c5f2fa137abf8f428fee3 | tov/shcaml | iVar.mli | * One - shot interprocess exceptions and variables .
(**
* {1 Interprocess Exceptions}
*)
* Relay exceptions from a subprocess .
[ with_interprocess_protect kont ] calls [ kont ] with one
argument , [ protect : ( unit - > ' a ) - > ' a ] .
The function [ kont ] { b must } fork into an { i observer } process and an
{ i observed } process . The observed process { b must not } return from the
call to [ kont ] ; it must , however , call [ protect thunk ] exactly
once with some thunk , which [ protect ] will call . When [ kont ]
returns in the observer process , it blocks until the thunk
returns . If the thunk returns normally ( or execs , or exits ) , then
[ protect thunk ] returns the result of the thunk in the observed
process , and [ with_interprocess_protect ] returns the result of
[ kont ] in the observer process . However , if the thunk raises an
exception , then the observed process terminates with status 2 and
the call to [ with_interprocess_protect ] in the { i observer }
returns abnormally by re - raising the exception .
In this example , if { ! Proc.exec } raises an exception in the child process ,
then [ with_interprocess_protect ] will re - raise that exception in
the parent process :
{ [
with_interprocess_protect
( fun protect - >
match Proc.fork ( ) with
| None - >
protect ( fun ( ) - > Proc.exec prog args ) ;
exit 3 ( * ca n't happen
[with_interprocess_protect kont] calls [kont] with one
argument, [protect: (unit -> 'a) -> 'a].
The function [kont] {b must} fork into an {i observer} process and an
{i observed} process. The observed process {b must not} return from the
call to [kont]; it must, however, call [protect thunk] exactly
once with some thunk, which [protect] will call. When [kont]
returns in the observer process, it blocks until the thunk
returns. If the thunk returns normally (or execs, or exits), then
[protect thunk] returns the result of the thunk in the observed
process, and [with_interprocess_protect] returns the result of
[kont] in the observer process. However, if the thunk raises an
exception, then the observed process terminates with status 2 and
the call to [with_interprocess_protect] in the {i observer}
returns abnormally by re-raising the exception.
In this example, if {!Proc.exec} raises an exception in the child process,
then [with_interprocess_protect] will re-raise that exception in
the parent process:
{[
with_interprocess_protect
(fun protect ->
match Proc.fork () with
| None ->
protect (fun () -> Proc.exec prog args);
exit 3 (* can't happen *)
| Some proc -> proc)
]}
*)
val with_interprocess_protect
: (((unit -> 'a) -> 'a) -> 'b) -> 'b
* Relay exceptions from another process .
[ with_interprocess_raise_and_okay kont ] calls [ kont ] with two
arguments , [ {
oops : exn - > unit
okay : unit - > unit
} ]
The function [ kont ] { b must } fork into an { i observed } process and an
{ i observer } process . When the call to [ kont ] returns in
the { i observer } process , [ with_interprocess_raise_and_okay ] then
waits for either [ oops ] or [ okay ] to be called in the { i observed }
process . If [ okay ( ) ] is called , then it returns the result of
[ kont ] ; if [ oops e ] is called , then it instead raises the exception
[ e ] in the { i observer } process . If the observed process fails to call
either [ oops ] or [ okay ] , then the observer process will block
indefinitely .
[with_interprocess_raise_and_okay kont] calls [kont] with two
arguments, [{
oops : exn -> unit
okay : unit -> unit
}]
The function [kont] {b must} fork into an {i observed} process and an
{i observer} process. When the call to [kont] returns in
the {i observer} process, [with_interprocess_raise_and_okay] then
waits for either [oops] or [okay] to be called in the {i observed}
process. If [okay ()] is called, then it returns the result of
[kont]; if [oops e] is called, then it instead raises the exception
[e] in the {i observer} process. If the observed process fails to call
either [oops] or [okay], then the observer process will block
indefinitely. *)
val with_interprocess_raise_and_okay
: ((exn -> unit) -> (unit -> unit) -> 'b) -> 'b
(**
* {1 Interprocess Variables}
*)
type 'a read_end
(** The read-end of an interprocess variable. *)
type 'a write_end
(** The write-end of an interprocess variable. *)
* Raised on attempts to re - use an { ! IVar } .
IVars allow ( require , in fact ) exactly one read and one write .
IVars allow (require, in fact) exactly one read and one write.
*)
exception Dead
* Create a channel pair [ ( r , w ) ] . The protocol is then as
follows . One process must execute :
- [ read r ]
This operation will block , until another process does one of :
- [ write w v ] : read returns [ Some v ]
- [ close w ] : read returns [ None ]
- [ exec ... ] : read returns [ None ]
- [ exit ... ] : read returns [ None ]
The { ! write } call may or may not block , depending on the underlying
implementation . In any case , it is { b imperative } that read
happens in a separate process from the write / close / exec / exit , or
the program may block indefinitely .
follows. One process must execute:
- [read r]
This operation will block, until another process does one of:
- [write w v] : read returns [Some v]
- [close w] : read returns [None]
- [exec ...] : read returns [None]
- [exit ...] : read returns [None ]
The {!write} call may or may not block, depending on the underlying
implementation. In any case, it is {b imperative} that read
happens in a separate process from the write/close/exec/exit, or
the program may block indefinitely. *)
val create : unit -> 'a read_end * 'a write_end
* Read an [ ' a option ] from an { ! IVar } .
Blocks until the associated { ! write_end } is written or closed .
If [ x ] is written on the other end , returns [ Some x ] ; if
the other end is closed ( including by exit or exec ) , returns
[ None ] .
Blocks until the associated {!write_end} is written or closed.
If [x] is written on the other end, returns [Some x]; if
the other end is closed (including by exit or exec), returns
[None].
*)
val read : 'a read_end -> 'a option
(** Write to an {!IVar}. *)
val write : 'a write_end -> 'a -> unit
* Close an { ! IVar } without writing .
val close : 'a write_end -> unit
| null | https://raw.githubusercontent.com/tov/shcaml/43ae852a00e3a11520f90f2451baa71863409774/lib/iVar.mli | ocaml | *
* {1 Interprocess Exceptions}
can't happen
*
* {1 Interprocess Variables}
* The read-end of an interprocess variable.
* The write-end of an interprocess variable.
* Write to an {!IVar}. | * One - shot interprocess exceptions and variables .
* Relay exceptions from a subprocess .
[ with_interprocess_protect kont ] calls [ kont ] with one
argument , [ protect : ( unit - > ' a ) - > ' a ] .
The function [ kont ] { b must } fork into an { i observer } process and an
{ i observed } process . The observed process { b must not } return from the
call to [ kont ] ; it must , however , call [ protect thunk ] exactly
once with some thunk , which [ protect ] will call . When [ kont ]
returns in the observer process , it blocks until the thunk
returns . If the thunk returns normally ( or execs , or exits ) , then
[ protect thunk ] returns the result of the thunk in the observed
process , and [ with_interprocess_protect ] returns the result of
[ kont ] in the observer process . However , if the thunk raises an
exception , then the observed process terminates with status 2 and
the call to [ with_interprocess_protect ] in the { i observer }
returns abnormally by re - raising the exception .
In this example , if { ! Proc.exec } raises an exception in the child process ,
then [ with_interprocess_protect ] will re - raise that exception in
the parent process :
{ [
with_interprocess_protect
( fun protect - >
match Proc.fork ( ) with
| None - >
protect ( fun ( ) - > Proc.exec prog args ) ;
exit 3 ( * ca n't happen
[with_interprocess_protect kont] calls [kont] with one
argument, [protect: (unit -> 'a) -> 'a].
The function [kont] {b must} fork into an {i observer} process and an
{i observed} process. The observed process {b must not} return from the
call to [kont]; it must, however, call [protect thunk] exactly
once with some thunk, which [protect] will call. When [kont]
returns in the observer process, it blocks until the thunk
returns. If the thunk returns normally (or execs, or exits), then
[protect thunk] returns the result of the thunk in the observed
process, and [with_interprocess_protect] returns the result of
[kont] in the observer process. However, if the thunk raises an
exception, then the observed process terminates with status 2 and
the call to [with_interprocess_protect] in the {i observer}
returns abnormally by re-raising the exception.
In this example, if {!Proc.exec} raises an exception in the child process,
then [with_interprocess_protect] will re-raise that exception in
the parent process:
{[
with_interprocess_protect
(fun protect ->
match Proc.fork () with
| None ->
protect (fun () -> Proc.exec prog args);
| Some proc -> proc)
]}
*)
val with_interprocess_protect
: (((unit -> 'a) -> 'a) -> 'b) -> 'b
* Relay exceptions from another process .
[ with_interprocess_raise_and_okay kont ] calls [ kont ] with two
arguments , [ {
oops : exn - > unit
okay : unit - > unit
} ]
The function [ kont ] { b must } fork into an { i observed } process and an
{ i observer } process . When the call to [ kont ] returns in
the { i observer } process , [ with_interprocess_raise_and_okay ] then
waits for either [ oops ] or [ okay ] to be called in the { i observed }
process . If [ okay ( ) ] is called , then it returns the result of
[ kont ] ; if [ oops e ] is called , then it instead raises the exception
[ e ] in the { i observer } process . If the observed process fails to call
either [ oops ] or [ okay ] , then the observer process will block
indefinitely .
[with_interprocess_raise_and_okay kont] calls [kont] with two
arguments, [{
oops : exn -> unit
okay : unit -> unit
}]
The function [kont] {b must} fork into an {i observed} process and an
{i observer} process. When the call to [kont] returns in
the {i observer} process, [with_interprocess_raise_and_okay] then
waits for either [oops] or [okay] to be called in the {i observed}
process. If [okay ()] is called, then it returns the result of
[kont]; if [oops e] is called, then it instead raises the exception
[e] in the {i observer} process. If the observed process fails to call
either [oops] or [okay], then the observer process will block
indefinitely. *)
val with_interprocess_raise_and_okay
: ((exn -> unit) -> (unit -> unit) -> 'b) -> 'b
type 'a read_end
type 'a write_end
* Raised on attempts to re - use an { ! IVar } .
IVars allow ( require , in fact ) exactly one read and one write .
IVars allow (require, in fact) exactly one read and one write.
*)
exception Dead
* Create a channel pair [ ( r , w ) ] . The protocol is then as
follows . One process must execute :
- [ read r ]
This operation will block , until another process does one of :
- [ write w v ] : read returns [ Some v ]
- [ close w ] : read returns [ None ]
- [ exec ... ] : read returns [ None ]
- [ exit ... ] : read returns [ None ]
The { ! write } call may or may not block , depending on the underlying
implementation . In any case , it is { b imperative } that read
happens in a separate process from the write / close / exec / exit , or
the program may block indefinitely .
follows. One process must execute:
- [read r]
This operation will block, until another process does one of:
- [write w v] : read returns [Some v]
- [close w] : read returns [None]
- [exec ...] : read returns [None]
- [exit ...] : read returns [None ]
The {!write} call may or may not block, depending on the underlying
implementation. In any case, it is {b imperative} that read
happens in a separate process from the write/close/exec/exit, or
the program may block indefinitely. *)
val create : unit -> 'a read_end * 'a write_end
* Read an [ ' a option ] from an { ! IVar } .
Blocks until the associated { ! write_end } is written or closed .
If [ x ] is written on the other end , returns [ Some x ] ; if
the other end is closed ( including by exit or exec ) , returns
[ None ] .
Blocks until the associated {!write_end} is written or closed.
If [x] is written on the other end, returns [Some x]; if
the other end is closed (including by exit or exec), returns
[None].
*)
val read : 'a read_end -> 'a option
val write : 'a write_end -> 'a -> unit
* Close an { ! IVar } without writing .
val close : 'a write_end -> unit
|
5619049806e26479b4a69d83bb3c626c4204f730f3ffa5734f9fdf67e987905b | simongray/datalinguist | util.clj | (ns dk.simongray.datalinguist.util
"Various utility functions used from the other namespaces, along with
collections of more or less static data."
(:require [clojure.string :as str])
(:import [java.util Properties]
[edu.stanford.nlp.ling CoreLabel
CoreLabel$OutputFormat
CoreAnnotations$TrueCaseTextAnnotation
CoreAnnotations$PartOfSpeechAnnotation]))
;; TODO: document annotator support for the official language models
;; -languages.html
;; TODO: implement a spellcheck for annotators during pipeline creation
;;
(def annotators
"The list of annotators included with CoreNLP."
#{"cdc"
"cleanxml"
"coref"
"coref.mention"
"dcoref"
"depparse"
"docdate"
"entitylink"
"entitymentions"
"gender"
"kbp"
"lemma"
"mwt"
"natlog"
"ner"
"openie"
"parse"
"pos"
"quote"
"quote.attribution"
"regexner"
"relation"
"sentiment"
"ssplit"
"tokenize"
"tokensregex"
"truecase"
"udfeats"})
(def corelabel-formats
"Ways to format CoreLabels. Per the convention of CoreNLP, word = value."
{:all CoreLabel$OutputFormat/ALL
:lemma-index CoreLabel$OutputFormat/LEMMA_INDEX
:map CoreLabel$OutputFormat/MAP
:value CoreLabel$OutputFormat/VALUE
:value-index CoreLabel$OutputFormat/VALUE_INDEX
:value-index-map CoreLabel$OutputFormat/VALUE_INDEX_MAP
:value-map CoreLabel$OutputFormat/VALUE_MAP
:value-tag CoreLabel$OutputFormat/VALUE_TAG
:value-tag-index CoreLabel$OutputFormat/VALUE_TAG_INDEX
:value-tag-ner CoreLabel$OutputFormat/VALUE_TAG_NER
:word CoreLabel$OutputFormat/WORD
:word-index CoreLabel$OutputFormat/WORD_INDEX})
(def punctuation-tags
"Part-of-speech tags for punctuation, copied from getPunctuationTags() in the
`edu.stanford.nlp.parser.nndep.ParsingSystem` abstract class."
#{"''"
","
"."
":"
"``"
"-LRB-"
"-RRB-"})
(def configs
"Example pipeline configurations for various languages or special setups."
;; Adapted from configuration previously found on this page (now missing):
;; -languages.html#chinese
{:chinese {:annotators "tokenize,ssplit,pos,parse,depparse",
:parse {:model "edu/stanford/nlp/models/srparser/chineseSR.ser.gz"}
:depparse {:model "edu/stanford/nlp/models/parser/nndep/UD_Chinese.gz"},
:ndepparse {:language "chinese"},
:tokenize {:language "zh"},
:segment {:model "edu/stanford/nlp/models/segmenter/chinese/ctb.gz",
:sighanCorporaDict "edu/stanford/nlp/models/segmenter/chinese",
:serDictionary "edu/stanford/nlp/models/segmenter/chinese/dict-chris6.ser.gz",
:sighanPostProcessing "true"},
:ssplit {:boundaryTokenRegex "[.。]|[!?!?]+"},
:pos {:model "edu/stanford/nlp/models/pos-tagger/chinese-distsim.tagger"}}})
(defn- keys-in
"Get the nested keys in map `m`."
[m]
(let [f (fn [[k v]]
(let [nested-ks (filter (comp not empty?) (keys-in v))
append-ks (fn [path] (into [k] path))
kscoll (map append-ks nested-ks)]
(if (seq kscoll)
kscoll
[[k]])))]
(if (map? m)
(vec (mapcat f m))
[])))
(defn- ks->str
"Convert `ks` (e.g. from keys-in) to a flattened CoreNLP key."
[ks]
(str/join "." (map name ks)))
(defn- flatten-map
"Flatten a map `m` of nested keys."
[m]
(let [kscoll (keys-in m)
flat-k+v (fn [ks] [(ks->str ks) (get-in m ks)])]
(into {} (map flat-k+v kscoll))))
(defn properties
"Make a Properties object based on a map `m`."
[m]
(doto (Properties.)
(.putAll (flatten-map m))))
(defn tokens->string
"Get a normalised string representation of the given `tokens`."
[tokens]
(let [token->text (fn [^CoreLabel token]
(str
(or (.before token) " ")
(or (.get token CoreAnnotations$TrueCaseTextAnnotation)
(.word token))))]
(str/triml (str/join (map token->text tokens)))))
(defn tokens->keyword
"Get a normalised keyword representation of the given `tokens`."
[tokens]
(let [non-word? (fn [^CoreLabel token]
(let [tag (.get token CoreAnnotations$PartOfSpeechAnnotation)]
(or (get punctuation-tags tag)
(re-matches #"\W" (.word token)))))]
(->> (remove non-word? tokens)
(map (comp str/lower-case #(.word ^CoreLabel %)))
(str/join "-")
keyword)))
| null | https://raw.githubusercontent.com/simongray/datalinguist/a496d4bd9204c502d0e869a7098975f35d492657/src/dk/simongray/datalinguist/util.clj | clojure | TODO: document annotator support for the official language models
-languages.html
TODO: implement a spellcheck for annotators during pipeline creation
Adapted from configuration previously found on this page (now missing):
-languages.html#chinese | (ns dk.simongray.datalinguist.util
"Various utility functions used from the other namespaces, along with
collections of more or less static data."
(:require [clojure.string :as str])
(:import [java.util Properties]
[edu.stanford.nlp.ling CoreLabel
CoreLabel$OutputFormat
CoreAnnotations$TrueCaseTextAnnotation
CoreAnnotations$PartOfSpeechAnnotation]))
(def annotators
"The list of annotators included with CoreNLP."
#{"cdc"
"cleanxml"
"coref"
"coref.mention"
"dcoref"
"depparse"
"docdate"
"entitylink"
"entitymentions"
"gender"
"kbp"
"lemma"
"mwt"
"natlog"
"ner"
"openie"
"parse"
"pos"
"quote"
"quote.attribution"
"regexner"
"relation"
"sentiment"
"ssplit"
"tokenize"
"tokensregex"
"truecase"
"udfeats"})
(def corelabel-formats
"Ways to format CoreLabels. Per the convention of CoreNLP, word = value."
{:all CoreLabel$OutputFormat/ALL
:lemma-index CoreLabel$OutputFormat/LEMMA_INDEX
:map CoreLabel$OutputFormat/MAP
:value CoreLabel$OutputFormat/VALUE
:value-index CoreLabel$OutputFormat/VALUE_INDEX
:value-index-map CoreLabel$OutputFormat/VALUE_INDEX_MAP
:value-map CoreLabel$OutputFormat/VALUE_MAP
:value-tag CoreLabel$OutputFormat/VALUE_TAG
:value-tag-index CoreLabel$OutputFormat/VALUE_TAG_INDEX
:value-tag-ner CoreLabel$OutputFormat/VALUE_TAG_NER
:word CoreLabel$OutputFormat/WORD
:word-index CoreLabel$OutputFormat/WORD_INDEX})
(def punctuation-tags
"Part-of-speech tags for punctuation, copied from getPunctuationTags() in the
`edu.stanford.nlp.parser.nndep.ParsingSystem` abstract class."
#{"''"
","
"."
":"
"``"
"-LRB-"
"-RRB-"})
(def configs
"Example pipeline configurations for various languages or special setups."
{:chinese {:annotators "tokenize,ssplit,pos,parse,depparse",
:parse {:model "edu/stanford/nlp/models/srparser/chineseSR.ser.gz"}
:depparse {:model "edu/stanford/nlp/models/parser/nndep/UD_Chinese.gz"},
:ndepparse {:language "chinese"},
:tokenize {:language "zh"},
:segment {:model "edu/stanford/nlp/models/segmenter/chinese/ctb.gz",
:sighanCorporaDict "edu/stanford/nlp/models/segmenter/chinese",
:serDictionary "edu/stanford/nlp/models/segmenter/chinese/dict-chris6.ser.gz",
:sighanPostProcessing "true"},
:ssplit {:boundaryTokenRegex "[.。]|[!?!?]+"},
:pos {:model "edu/stanford/nlp/models/pos-tagger/chinese-distsim.tagger"}}})
(defn- keys-in
"Get the nested keys in map `m`."
[m]
(let [f (fn [[k v]]
(let [nested-ks (filter (comp not empty?) (keys-in v))
append-ks (fn [path] (into [k] path))
kscoll (map append-ks nested-ks)]
(if (seq kscoll)
kscoll
[[k]])))]
(if (map? m)
(vec (mapcat f m))
[])))
(defn- ks->str
"Convert `ks` (e.g. from keys-in) to a flattened CoreNLP key."
[ks]
(str/join "." (map name ks)))
(defn- flatten-map
"Flatten a map `m` of nested keys."
[m]
(let [kscoll (keys-in m)
flat-k+v (fn [ks] [(ks->str ks) (get-in m ks)])]
(into {} (map flat-k+v kscoll))))
(defn properties
"Make a Properties object based on a map `m`."
[m]
(doto (Properties.)
(.putAll (flatten-map m))))
(defn tokens->string
"Get a normalised string representation of the given `tokens`."
[tokens]
(let [token->text (fn [^CoreLabel token]
(str
(or (.before token) " ")
(or (.get token CoreAnnotations$TrueCaseTextAnnotation)
(.word token))))]
(str/triml (str/join (map token->text tokens)))))
(defn tokens->keyword
"Get a normalised keyword representation of the given `tokens`."
[tokens]
(let [non-word? (fn [^CoreLabel token]
(let [tag (.get token CoreAnnotations$PartOfSpeechAnnotation)]
(or (get punctuation-tags tag)
(re-matches #"\W" (.word token)))))]
(->> (remove non-word? tokens)
(map (comp str/lower-case #(.word ^CoreLabel %)))
(str/join "-")
keyword)))
|
00c1be3513899f7b684558dd8d23eb1083df2a7d02c22c08664e9c10ed8ff4df | static-analysis-engineering/codehawk | bCHMetricsHandler.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : and
------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma and Andrew McGraw
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
(* chutil *)
open CHXmlDocument
(* bchlib *)
open BCHLibTypes
val exports_metrics_handler: exports_metrics_t metrics_handler_int
val disassembly_metrics_handler: disassembly_metrics_t metrics_handler_int
val memacc_metrics_handler: memacc_metrics_t metrics_handler_int
val mk_prec_metrics_handler:
memacc_metrics_t -> prec_metrics_t metrics_handler_int
val cfg_metrics_handler: cfg_metrics_t metrics_handler_int
val vars_metrics_handler: vars_metrics_t metrics_handler_int
val calls_metrics_handler: calls_metrics_t metrics_handler_int
val jumps_metrics_handler: jumps_metrics_t metrics_handler_int
val cc_metrics_handler: cc_metrics_t metrics_handler_int
val invs_metrics_handler: invs_metrics_t metrics_handler_int
val tinvs_metrics_handler: tinvs_metrics_t metrics_handler_int
val result_metrics_handler: result_metrics_t metrics_handler_int
val function_run_handler: function_run_t metrics_handler_int
val mk_function_results_handler: string -> function_results_t metrics_handler_int
val file_run_handler: file_run_t metrics_handler_int
val aggregate_metrics_handler: aggregate_metrics_t metrics_handler_int
val userdata_metrics_handler: userdata_metrics_t metrics_handler_int
val ida_data_handler: ida_data_t metrics_handler_int
val file_results_handler: file_results_t metrics_handler_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/dd2c3b9f84b4b5f3c88898505ee912e1e461e809/CodeHawk/CHB/bchlib/bCHMetricsHandler.mli | ocaml | chutil
bchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : and
------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 - 2021 ) 2022 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma and Andrew McGraw
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020-2021 Henny Sipma
Copyright (c) 2022-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open CHXmlDocument
open BCHLibTypes
val exports_metrics_handler: exports_metrics_t metrics_handler_int
val disassembly_metrics_handler: disassembly_metrics_t metrics_handler_int
val memacc_metrics_handler: memacc_metrics_t metrics_handler_int
val mk_prec_metrics_handler:
memacc_metrics_t -> prec_metrics_t metrics_handler_int
val cfg_metrics_handler: cfg_metrics_t metrics_handler_int
val vars_metrics_handler: vars_metrics_t metrics_handler_int
val calls_metrics_handler: calls_metrics_t metrics_handler_int
val jumps_metrics_handler: jumps_metrics_t metrics_handler_int
val cc_metrics_handler: cc_metrics_t metrics_handler_int
val invs_metrics_handler: invs_metrics_t metrics_handler_int
val tinvs_metrics_handler: tinvs_metrics_t metrics_handler_int
val result_metrics_handler: result_metrics_t metrics_handler_int
val function_run_handler: function_run_t metrics_handler_int
val mk_function_results_handler: string -> function_results_t metrics_handler_int
val file_run_handler: file_run_t metrics_handler_int
val aggregate_metrics_handler: aggregate_metrics_t metrics_handler_int
val userdata_metrics_handler: userdata_metrics_t metrics_handler_int
val ida_data_handler: ida_data_t metrics_handler_int
val file_results_handler: file_results_t metrics_handler_int
|
49dc5837fb8c6ae3c4b7577f68555571ede738e342558eb7d474056a121ab484 | phadej/cabal-refact | SetVersion.hs | {-# LANGUAGE OverloadedStrings #-}
module Distribution.Refact.Refactoring.SetVersion (
setVersionRefactoring,
) where
import Prelude ()
import Distribution.Refact.Internal.Prelude
import Distribution.Refact.Types.Pos
import Distribution.Refact.Types.Refactoring
import Distribution.Refact.Types.Structure
import Distribution.Refact.Types.Version
setVersionRefactoring :: Version -> Refactoring
setVersionRefactoring = updateVersionRefactoring . const
updateVersionRefactoring :: (Maybe Version -> Version) -> Refactoring
updateVersionRefactoring upd fs = case fs ^? topLevelField fieldName of
no revision , use zero
Nothing -> (review _InRField <$> insertAfterName as) <> bs
-- otherwise, update
Just _ -> fs & topLevelField fieldName . _FieldVersion . _2 %~ upd . Just
where
fieldName :: Text
fieldName = "version"
-- fields and rest
(as, bs) = spanMaybe (preview _InRField) fs
ver :: Version
ver = upd Nothing
ver' :: FieldValue D
ver' = FieldVersion (D 0 $ length (fieldName ^. unpacked) + 3) ver
insertAfterName [] = [(Name (D 1 0) fieldName, mempty, ver')]
insertAfterName (f@(Name d n, d', fls) : rest)
| n == "name"
= f
: (Name (d & dLine %~ max 1) fieldName, d', mimic d $ firstOf folded fls)
: rest
| otherwise = f : insertAfterName rest
mimic d Nothing = FieldVersion (d <> D 0 2) ver
mimic _ (Just d) = FieldVersion d ver
| null | https://raw.githubusercontent.com/phadej/cabal-refact/9442736429e498f95dc24866c97be587113206ab/src/Distribution/Refact/Refactoring/SetVersion.hs | haskell | # LANGUAGE OverloadedStrings #
otherwise, update
fields and rest | module Distribution.Refact.Refactoring.SetVersion (
setVersionRefactoring,
) where
import Prelude ()
import Distribution.Refact.Internal.Prelude
import Distribution.Refact.Types.Pos
import Distribution.Refact.Types.Refactoring
import Distribution.Refact.Types.Structure
import Distribution.Refact.Types.Version
setVersionRefactoring :: Version -> Refactoring
setVersionRefactoring = updateVersionRefactoring . const
updateVersionRefactoring :: (Maybe Version -> Version) -> Refactoring
updateVersionRefactoring upd fs = case fs ^? topLevelField fieldName of
no revision , use zero
Nothing -> (review _InRField <$> insertAfterName as) <> bs
Just _ -> fs & topLevelField fieldName . _FieldVersion . _2 %~ upd . Just
where
fieldName :: Text
fieldName = "version"
(as, bs) = spanMaybe (preview _InRField) fs
ver :: Version
ver = upd Nothing
ver' :: FieldValue D
ver' = FieldVersion (D 0 $ length (fieldName ^. unpacked) + 3) ver
insertAfterName [] = [(Name (D 1 0) fieldName, mempty, ver')]
insertAfterName (f@(Name d n, d', fls) : rest)
| n == "name"
= f
: (Name (d & dLine %~ max 1) fieldName, d', mimic d $ firstOf folded fls)
: rest
| otherwise = f : insertAfterName rest
mimic d Nothing = FieldVersion (d <> D 0 2) ver
mimic _ (Just d) = FieldVersion d ver
|
9353330b8399ada1d4910780b9ba69f05e756dd9bad53cb71ff5dbf99724f013 | sealchain-project/sealchain | Model.hs | -- | Distributed Hash Table for peer discovery.
module Pos.Infra.DHT.Model
( module Pos.Infra.DHT.Model.Types
) where
import Pos.Infra.DHT.Model.Types
| null | https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/infra/src/Pos/Infra/DHT/Model.hs | haskell | | Distributed Hash Table for peer discovery. |
module Pos.Infra.DHT.Model
( module Pos.Infra.DHT.Model.Types
) where
import Pos.Infra.DHT.Model.Types
|
c94b312e21c1af03a26b42f207eadf978d1f88beb5dae0dbed8b88f7cdbbec58 | madvas/catlantis | config.cljs | (ns catlantis.config)
(def thecatapi-key "ODk2MzY")
(def app-name "Catlantis")
(def default-catapi-params
{:format "xml"
:size "med"
:api-key thecatapi-key})
| null | https://raw.githubusercontent.com/madvas/catlantis/b8880ec2cab27ecfcb3c0ab30e2bbc7767db0d1c/src/catlantis/config.cljs | clojure | (ns catlantis.config)
(def thecatapi-key "ODk2MzY")
(def app-name "Catlantis")
(def default-catapi-params
{:format "xml"
:size "med"
:api-key thecatapi-key})
|
|
74453b740738a0051b696f612af18a0d2078373618b7453d6fff41ee54b3d2c5 | ocsigen/ojwidgets | ojw_completion_sigs.mli |
module type T = sig
(** A completion widget to complete on string value. *)
module D : Ojw_dom_sigs.T
module Dropdown : Ojw_dropdown_sigs.T
module Tr : Ojw_traversable_sigs.T
(** A [completion] widget is [dropdown] widget. The list of the
possible values are displayed using a [dropdown]. *)
class type completion = object
inherit Dropdown.dropdown
(** You can retrieve the value of the [completion] widget or even
change it (you need to [refresh] explicitly the widget). *)
method value : Js.js_string Js.t Js.prop
(** Clear the list of the possible values. The content will be
automatically refresh during the next action. *)
method clear : unit Js.meth
(** Explicitly confirm with the current value of the input. *)
method confirm : unit Lwt.t Js.meth
(** Explicitly refresh the content of the widget (using the given
function [refresh] on the construction of the widget). *)
method refresh : unit Lwt.t Js.meth
end
* Provides behaviours of a completion widget .
The main purpose of this widget is to complete on string value .
[ completion ] uses [ dropdown ] to display matched values . Each item of the
[ dropdown ] { b MUST HAVE } an attribute { b data - value } . The value of this
attribute will be used during comparaison with the input value .
[ refresh limit pattern ] must return the list of the different values .
The [ pattern ] correspond to the current input value , and [ limit ] is the
number of items which will be displayed by the widgets .
If you do n't want to do the comparaison with the value by yourself , you
can use [ auto_match ] which will filter the list of elements returned by
[ refresh ] function . Element which does n't match the input value , will be
ignored and wo n't be displayed with the [ dropdown ] .
[ accents ] indicates if the widget has to take care of accents in the
{ b data - value } attribute and input value . [ sensitive ] indicates the case
has to be insensitive or not .
If you want to begin the completion from the start of input value , you
can set [ from_start ] to [ true ] . Otherwise , it will try to match the value
anywhere in the { data - value } string .
[ force_refresh ] will automatically force the call to the [ refresh ]
function on each actions of the widget . If this option is enabled , the
rendering could blink .
[ clear_input_on_confirm ] will clear the input when method [ confirm ] is
called .
Because [ completion ] is a [ dropdown ] , and a [ dropdown ] is composed by
[ traversable ] widget , you can navigate through matched values using
arrow keys . You can also iterate through them using tab key , if the
option [ move_with_tab ] is set to [ true ]
If [ adaptive ] is enabled , so the input value will be automatically set
to the { b data - value } of current active matched element ( when navigating
using arrow keys ) .
The function [ on_confirm ] is called each time the input value is
confirmed ( using [ confirm ] method or using enter key ) .
The widget need an { b input } element as first parameter . The second
parameter is the container on which the matched values will be
automatically inserted , it must be a { b ul } element .
The main purpose of this widget is to complete on string value.
[completion] uses [dropdown] to display matched values. Each item of the
[dropdown] {b MUST HAVE} an attribute {b data-value}. The value of this
attribute will be used during comparaison with the input value.
[refresh limit pattern] must return the list of the different values.
The [pattern] correspond to the current input value, and [limit] is the
number of items which will be displayed by the widgets.
If you don't want to do the comparaison with the value by yourself, you
can use [auto_match] which will filter the list of elements returned by
[refresh] function. Element which doesn't match the input value, will be
ignored and won't be displayed with the [dropdown].
[accents] indicates if the widget has to take care of accents in the
{b data-value} attribute and input value. [sensitive] indicates the case
has to be insensitive or not.
If you want to begin the completion from the start of input value, you
can set [from_start] to [true]. Otherwise, it will try to match the value
anywhere in the {data-value} string.
[force_refresh] will automatically force the call to the [refresh]
function on each actions of the widget. If this option is enabled, the
rendering could blink.
[clear_input_on_confirm] will clear the input when method [confirm] is
called.
Because [completion] is a [dropdown], and a [dropdown] is composed by
[traversable] widget, you can navigate through matched values using
arrow keys. You can also iterate through them using tab key, if the
option [move_with_tab] is set to [true]
If [adaptive] is enabled, so the input value will be automatically set
to the {b data-value} of current active matched element (when navigating
using arrow keys).
The function [on_confirm] is called each time the input value is
confirmed (using [confirm] method or using enter key).
The widget need an {b input} element as first parameter. The second
parameter is the container on which the matched values will be
automatically inserted, it must be a {b ul} element.
*)
val completion__ :
refresh : (int -> string -> Dropdown.Traversable.Content.element Dropdown.Traversable.Content.elt list Lwt.t)
-> ?limit : int
-> ?accents : bool
-> ?from_start : bool
-> ?force_refresh : bool
-> ?sensitive : bool
-> ?adaptive : bool
-> ?auto_match : bool
-> ?clear_input_on_confirm : bool
-> ?move_with_tab : bool
-> ?on_confirm : (string -> unit Lwt.t)
-> D.element D.elt
-> Dropdown.Traversable.D.element Dropdown.Traversable.D.elt
-> (Dropdown.D.element Dropdown.D.elt *
Dropdown.Traversable.D.element Dropdown.Traversable.D.elt)
end
| null | https://raw.githubusercontent.com/ocsigen/ojwidgets/4be2233980bdd1cae187c749bd27ddbfff389880/src/internals/ojw_completion_sigs.mli | ocaml | * A completion widget to complete on string value.
* A [completion] widget is [dropdown] widget. The list of the
possible values are displayed using a [dropdown].
* You can retrieve the value of the [completion] widget or even
change it (you need to [refresh] explicitly the widget).
* Clear the list of the possible values. The content will be
automatically refresh during the next action.
* Explicitly confirm with the current value of the input.
* Explicitly refresh the content of the widget (using the given
function [refresh] on the construction of the widget). |
module type T = sig
module D : Ojw_dom_sigs.T
module Dropdown : Ojw_dropdown_sigs.T
module Tr : Ojw_traversable_sigs.T
class type completion = object
inherit Dropdown.dropdown
method value : Js.js_string Js.t Js.prop
method clear : unit Js.meth
method confirm : unit Lwt.t Js.meth
method refresh : unit Lwt.t Js.meth
end
* Provides behaviours of a completion widget .
The main purpose of this widget is to complete on string value .
[ completion ] uses [ dropdown ] to display matched values . Each item of the
[ dropdown ] { b MUST HAVE } an attribute { b data - value } . The value of this
attribute will be used during comparaison with the input value .
[ refresh limit pattern ] must return the list of the different values .
The [ pattern ] correspond to the current input value , and [ limit ] is the
number of items which will be displayed by the widgets .
If you do n't want to do the comparaison with the value by yourself , you
can use [ auto_match ] which will filter the list of elements returned by
[ refresh ] function . Element which does n't match the input value , will be
ignored and wo n't be displayed with the [ dropdown ] .
[ accents ] indicates if the widget has to take care of accents in the
{ b data - value } attribute and input value . [ sensitive ] indicates the case
has to be insensitive or not .
If you want to begin the completion from the start of input value , you
can set [ from_start ] to [ true ] . Otherwise , it will try to match the value
anywhere in the { data - value } string .
[ force_refresh ] will automatically force the call to the [ refresh ]
function on each actions of the widget . If this option is enabled , the
rendering could blink .
[ clear_input_on_confirm ] will clear the input when method [ confirm ] is
called .
Because [ completion ] is a [ dropdown ] , and a [ dropdown ] is composed by
[ traversable ] widget , you can navigate through matched values using
arrow keys . You can also iterate through them using tab key , if the
option [ move_with_tab ] is set to [ true ]
If [ adaptive ] is enabled , so the input value will be automatically set
to the { b data - value } of current active matched element ( when navigating
using arrow keys ) .
The function [ on_confirm ] is called each time the input value is
confirmed ( using [ confirm ] method or using enter key ) .
The widget need an { b input } element as first parameter . The second
parameter is the container on which the matched values will be
automatically inserted , it must be a { b ul } element .
The main purpose of this widget is to complete on string value.
[completion] uses [dropdown] to display matched values. Each item of the
[dropdown] {b MUST HAVE} an attribute {b data-value}. The value of this
attribute will be used during comparaison with the input value.
[refresh limit pattern] must return the list of the different values.
The [pattern] correspond to the current input value, and [limit] is the
number of items which will be displayed by the widgets.
If you don't want to do the comparaison with the value by yourself, you
can use [auto_match] which will filter the list of elements returned by
[refresh] function. Element which doesn't match the input value, will be
ignored and won't be displayed with the [dropdown].
[accents] indicates if the widget has to take care of accents in the
{b data-value} attribute and input value. [sensitive] indicates the case
has to be insensitive or not.
If you want to begin the completion from the start of input value, you
can set [from_start] to [true]. Otherwise, it will try to match the value
anywhere in the {data-value} string.
[force_refresh] will automatically force the call to the [refresh]
function on each actions of the widget. If this option is enabled, the
rendering could blink.
[clear_input_on_confirm] will clear the input when method [confirm] is
called.
Because [completion] is a [dropdown], and a [dropdown] is composed by
[traversable] widget, you can navigate through matched values using
arrow keys. You can also iterate through them using tab key, if the
option [move_with_tab] is set to [true]
If [adaptive] is enabled, so the input value will be automatically set
to the {b data-value} of current active matched element (when navigating
using arrow keys).
The function [on_confirm] is called each time the input value is
confirmed (using [confirm] method or using enter key).
The widget need an {b input} element as first parameter. The second
parameter is the container on which the matched values will be
automatically inserted, it must be a {b ul} element.
*)
val completion__ :
refresh : (int -> string -> Dropdown.Traversable.Content.element Dropdown.Traversable.Content.elt list Lwt.t)
-> ?limit : int
-> ?accents : bool
-> ?from_start : bool
-> ?force_refresh : bool
-> ?sensitive : bool
-> ?adaptive : bool
-> ?auto_match : bool
-> ?clear_input_on_confirm : bool
-> ?move_with_tab : bool
-> ?on_confirm : (string -> unit Lwt.t)
-> D.element D.elt
-> Dropdown.Traversable.D.element Dropdown.Traversable.D.elt
-> (Dropdown.D.element Dropdown.D.elt *
Dropdown.Traversable.D.element Dropdown.Traversable.D.elt)
end
|
dea82581b2348975a97b695022a63799eada0249db8b63cf7cc16f75c9336edd | Paczesiowa/hsenv | ArgDescr.hs | module Util.Args.ArgDescr ( DefaultValue(..)
, ArgDescr(..)
, KnownArgs
) where
-- default value for cli option
data DefaultValue = ConstValue String -- explicit default value
| DynValue String -- human readable description of a process
-- that will provide default value
-- cli option description
data ArgDescr =
-- switch
SwitchDescr { argName :: String -- switch name (e.g. 'verbose' for --verbose)
, helpMsg :: String -- human readable description of this switch
, shortOpt :: Maybe Char -- optional short version for this switch
-- (e.g. 'v' for '-v'
-- as a shortcut for '--verbose')
}
-- option with a value
| ValArg { argName :: String -- option name (e.g. 'key' for '--key=value')
, valTemplate :: String -- help template for value (e.g. 'PATH' for --binary=PATH)
, defaultValue :: DefaultValue -- default value
, helpMsg :: String -- human readable description of this switch
}
type KnownArgs = [ArgDescr]
| null | https://raw.githubusercontent.com/Paczesiowa/hsenv/b904436e10bde707a7838661a5ec30de430ca2b9/src/Util/Args/ArgDescr.hs | haskell | default value for cli option
explicit default value
human readable description of a process
that will provide default value
cli option description
switch
switch name (e.g. 'verbose' for --verbose)
human readable description of this switch
optional short version for this switch
(e.g. 'v' for '-v'
as a shortcut for '--verbose')
option with a value
option name (e.g. 'key' for '--key=value')
help template for value (e.g. 'PATH' for --binary=PATH)
default value
human readable description of this switch | module Util.Args.ArgDescr ( DefaultValue(..)
, ArgDescr(..)
, KnownArgs
) where
data ArgDescr =
}
}
type KnownArgs = [ArgDescr]
|
1f9a8aa8b30675f7be5a1bb9107d5f3af85dae535eba2c2e6994b8335b1fbe66 | tlaplus/tlapm | tlapm.mli |
* tlapm.mli --- driver ( interface )
*
*
* Copyright ( C ) 2008 - 2010 INRIA and Microsoft Corporation
* tlapm.mli --- driver (interface)
*
*
* Copyright (C) 2008-2010 INRIA and Microsoft Corporation
*)
val main: string list -> unit
| null | https://raw.githubusercontent.com/tlaplus/tlapm/158386319f5b6cd299f95385a216ade2b85c9f72/src/tlapm.mli | ocaml |
* tlapm.mli --- driver ( interface )
*
*
* Copyright ( C ) 2008 - 2010 INRIA and Microsoft Corporation
* tlapm.mli --- driver (interface)
*
*
* Copyright (C) 2008-2010 INRIA and Microsoft Corporation
*)
val main: string list -> unit
|
|
292ae3ea7d0780b9060ab3e9c0c4268d03ce87542bce8df2bce3df45f10b22e0 | protosens/monorepo.cljc | extra.clj | (ns extra)
| null | https://raw.githubusercontent.com/protosens/monorepo.cljc/1c7cc00cbfb7c7484521146bf998438d2867552f/module/deps.edn/resrc/test/src/extra/extra.clj | clojure | (ns extra)
|
|
41a67a27936d12cb18c82bf8be5b4a2497bafd87b1c4ed447a20a6fddbf7cd34 | hstreamdb/hstream | WriterSpec.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module HStream.Store.WriterSpec where
import qualified Data.ByteString as BS
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Test.QuickCheck.Instances.ByteString ()
import qualified HStream.Store as S
import HStream.Store.SpecUtils
spec :: Spec
spec = describe "Stream Writer & Reader" $ do
let logid = 1
it "append and read" $ do
_ <- S.append client logid "hello" Nothing
readPayload logid Nothing `shouldReturn` "hello"
it "appendBS and read" $ do
_ <- S.appendBS client logid "hello" Nothing
readPayload logid Nothing `shouldReturn` "hello"
it "appendBatch" $ do
_ <- S.appendBatch client logid ["hello", "world"] S.CompressionLZ4 Nothing
readPayload' logid Nothing `shouldReturn` ["hello", "world"]
prop "appendCompressedBS" $ do
let maxPayload = 1024 * 1024
let gen = scale (*1024) arbitrary `suchThat` (\bs -> BS.length bs < maxPayload)
forAllShow gen show $ \payload -> do
S.AppendCompletion{..} <- S.appendCompressedBS client logid payload S.CompressionLZ4 Nothing
readLSN logid (Just appendCompLSN) `shouldReturn` [payload]
| null | https://raw.githubusercontent.com/hstreamdb/hstream/623ccb8577b992383cfacd7d6577616d245b8583/hstream-store/test/HStream/Store/WriterSpec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE ScopedTypeVariables #
module HStream.Store.WriterSpec where
import qualified Data.ByteString as BS
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Test.QuickCheck.Instances.ByteString ()
import qualified HStream.Store as S
import HStream.Store.SpecUtils
spec :: Spec
spec = describe "Stream Writer & Reader" $ do
let logid = 1
it "append and read" $ do
_ <- S.append client logid "hello" Nothing
readPayload logid Nothing `shouldReturn` "hello"
it "appendBS and read" $ do
_ <- S.appendBS client logid "hello" Nothing
readPayload logid Nothing `shouldReturn` "hello"
it "appendBatch" $ do
_ <- S.appendBatch client logid ["hello", "world"] S.CompressionLZ4 Nothing
readPayload' logid Nothing `shouldReturn` ["hello", "world"]
prop "appendCompressedBS" $ do
let maxPayload = 1024 * 1024
let gen = scale (*1024) arbitrary `suchThat` (\bs -> BS.length bs < maxPayload)
forAllShow gen show $ \payload -> do
S.AppendCompletion{..} <- S.appendCompressedBS client logid payload S.CompressionLZ4 Nothing
readLSN logid (Just appendCompLSN) `shouldReturn` [payload]
|
bb1852c11b2e6d1fd80e88c508ef30936480b8641caaeabfc2c4d6ece966e99e | avatar29A/hs-aitubots-api | UserInfo.hs | # LANGUAGE DuplicateRecordFields #
# LANGUAGE RecordWildCards #
{-# LANGUAGE OverloadedStrings #-}
module Aitu.Bot.Forms.Content.UserInfo
( UserInfo(..)
)
where
import Data.Aeson hiding ( Options )
import Data.Text
import qualified Aitu.Bot.Forms.Content.Content
as Content
type UserId = Text
data UserInfo = UserInfo {
contentId :: Content.ContentID
, userId :: UserId
} deriving (Show)
instance ToJSON UserInfo where
toJSON UserInfo {..} = object
["id" .= contentId, "type" .= Content.UserInfo, "user_id" .= userId]
| null | https://raw.githubusercontent.com/avatar29A/hs-aitubots-api/9cc3fd1e4e9e81491628741a6bbb68afbb85704e/src/Aitu/Bot/Forms/Content/UserInfo.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE DuplicateRecordFields #
# LANGUAGE RecordWildCards #
module Aitu.Bot.Forms.Content.UserInfo
( UserInfo(..)
)
where
import Data.Aeson hiding ( Options )
import Data.Text
import qualified Aitu.Bot.Forms.Content.Content
as Content
type UserId = Text
data UserInfo = UserInfo {
contentId :: Content.ContentID
, userId :: UserId
} deriving (Show)
instance ToJSON UserInfo where
toJSON UserInfo {..} = object
["id" .= contentId, "type" .= Content.UserInfo, "user_id" .= userId]
|
41ad92bac9000a1fd1a3ca9b44e3d1ab319de3057c6b2ddf7a7de9bcbf3d0588 | dparis/gen-phzr | canvas_mask_manager.cljs | (ns phzr.pixi.canvas-mask-manager
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->CanvasMaskManager
"A set of functions used to handle masking."
([]
(js/PIXI.CanvasMaskManager.)))
(defn pop-mask
"Restores the current drawing context to the state it was before the mask was applied.
Parameters:
* canvas-mask-manager (PIXI.CanvasMaskManager) - Targeted instance for method
* render-session (Object) - The renderSession whose context will be used for this mask manager."
([canvas-mask-manager render-session]
(phaser->clj
(.popMask canvas-mask-manager
(clj->phaser render-session)))))
(defn push-mask
"This method adds it to the current stack of masks.
Parameters:
* canvas-mask-manager (PIXI.CanvasMaskManager) - Targeted instance for method
* mask-data (Object) - the maskData that will be pushed
* render-session (Object) - The renderSession whose context will be used for this mask manager."
([canvas-mask-manager mask-data render-session]
(phaser->clj
(.pushMask canvas-mask-manager
(clj->phaser mask-data)
(clj->phaser render-session))))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/pixi/canvas_mask_manager.cljs | clojure | (ns phzr.pixi.canvas-mask-manager
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser]))
(defn ->CanvasMaskManager
"A set of functions used to handle masking."
([]
(js/PIXI.CanvasMaskManager.)))
(defn pop-mask
"Restores the current drawing context to the state it was before the mask was applied.
Parameters:
* canvas-mask-manager (PIXI.CanvasMaskManager) - Targeted instance for method
* render-session (Object) - The renderSession whose context will be used for this mask manager."
([canvas-mask-manager render-session]
(phaser->clj
(.popMask canvas-mask-manager
(clj->phaser render-session)))))
(defn push-mask
"This method adds it to the current stack of masks.
Parameters:
* canvas-mask-manager (PIXI.CanvasMaskManager) - Targeted instance for method
* mask-data (Object) - the maskData that will be pushed
* render-session (Object) - The renderSession whose context will be used for this mask manager."
([canvas-mask-manager mask-data render-session]
(phaser->clj
(.pushMask canvas-mask-manager
(clj->phaser mask-data)
(clj->phaser render-session))))) |
|
791fdffd3ff3c4bdf42a42853b59e3dca92164f800389669836b88761bda7dca | nachivpn/mt | g3.erl | -module(g3).
ceq(X,Y) -> X == Y.
cne(X,Y) -> X /= Y.
clte(X,Y) -> X =< Y.
clt(X,Y) -> X < Y.
cgte(X,Y) -> X >= Y.
cgt (X,Y) -> X > Y.
cee (X,Y) -> X =:= Y.
cene (X,Y) -> X =/= Y.
foo1() -> "hello" == "world".
foo2() -> "hello" =:= 1.
foo3() -> 1.0 =/= 1.
foo5() ->
X = 1 div 5,
X >= 5.0.
| null | https://raw.githubusercontent.com/nachivpn/mt/fcffbcb770cfe0b19c189e6c8ce2ccab61062195/test/good/g3.erl | erlang | -module(g3).
ceq(X,Y) -> X == Y.
cne(X,Y) -> X /= Y.
clte(X,Y) -> X =< Y.
clt(X,Y) -> X < Y.
cgte(X,Y) -> X >= Y.
cgt (X,Y) -> X > Y.
cee (X,Y) -> X =:= Y.
cene (X,Y) -> X =/= Y.
foo1() -> "hello" == "world".
foo2() -> "hello" =:= 1.
foo3() -> 1.0 =/= 1.
foo5() ->
X = 1 div 5,
X >= 5.0.
|
|
89bdd473a239c72ff47e92d9ca1b272525d11c9e2fdf9490b099b0825f307352 | felipeZ/Haskell-abinitio | BasisOrthogonalization.hs |
|
Module : Science . QuantumChemistry . HartreeFock . DIIS
Description : DIIS acceleration Convergence
Copyright : tape
@2012,2015 Felipe Zapata core SCF machinery
Module: Science.QuantumChemistry.HartreeFock.DIIS
Description: DIIS acceleration Convergence
Copyright: @2012,2013 Angel Alvarez Adhesive tape
@2012,2015 Felipe Zapata core SCF machinery
@2016 Felipe Zapata
-}
module Science.QuantumChemistry.HartreeFock.BasisOrthogonalization where
import Data.Array.Repa as R
import Data.Array.Repa.Unsafe as R
import Data.Array.Repa.Algorithms.Matrix (mmultP,transpose2P)
import qualified Data.Vector.Unboxed as U
-- internal modules
import Science.QuantumChemistry.GlobalTypes
import qualified Science.QuantumChemistry.NumericalTools.LinearAlgebra as LA
import Science.QuantumChemistry.NumericalTools.EigenValues (eigenSolve)
-- ========================> ORTHOGONALIZATION OF THE BASIS <==================================
| Here is diagonalized the Overlap matrix and it is obtained a transformation matrix S^-1/2
symmOrtho :: Monad m => Array U DIM2 Double -> m (Array U DIM2 Double)
symmOrtho arr = do
let (eigVal,eigVecs) = eigenSolve arr
(Z:.dim:._) = extent arr
For building the S^-1/2 matrix
diag = LA.vec2Diagonal invSqrt
eigVecTrans <- transpose2P eigVecs
mtx1 <- mmultP eigVecs diag
mmultP mtx1 eigVecTrans
LA.mmultP eigenvecs < = < LA.mmultP eigenvecs $ diag
canortho :: Monad m => Array U DIM2 Double -> m (Array U DIM2 Double)
canortho arr = do
let (eigVal,eigVecs) = eigenSolve arr
For building the S^-1/2 matrix
R.computeUnboxedP $ R.traverse eigVecs id (\f sh@(Z:._:. k) -> (invSqrt U.! k) * f sh)
| null | https://raw.githubusercontent.com/felipeZ/Haskell-abinitio/c019bc37c8de78affddf97eb858c1ef18af76d83/Science/QuantumChemistry/HartreeFock/BasisOrthogonalization.hs | haskell | internal modules
========================> ORTHOGONALIZATION OF THE BASIS <================================== |
|
Module : Science . QuantumChemistry . HartreeFock . DIIS
Description : DIIS acceleration Convergence
Copyright : tape
@2012,2015 Felipe Zapata core SCF machinery
Module: Science.QuantumChemistry.HartreeFock.DIIS
Description: DIIS acceleration Convergence
Copyright: @2012,2013 Angel Alvarez Adhesive tape
@2012,2015 Felipe Zapata core SCF machinery
@2016 Felipe Zapata
-}
module Science.QuantumChemistry.HartreeFock.BasisOrthogonalization where
import Data.Array.Repa as R
import Data.Array.Repa.Unsafe as R
import Data.Array.Repa.Algorithms.Matrix (mmultP,transpose2P)
import qualified Data.Vector.Unboxed as U
import Science.QuantumChemistry.GlobalTypes
import qualified Science.QuantumChemistry.NumericalTools.LinearAlgebra as LA
import Science.QuantumChemistry.NumericalTools.EigenValues (eigenSolve)
| Here is diagonalized the Overlap matrix and it is obtained a transformation matrix S^-1/2
symmOrtho :: Monad m => Array U DIM2 Double -> m (Array U DIM2 Double)
symmOrtho arr = do
let (eigVal,eigVecs) = eigenSolve arr
(Z:.dim:._) = extent arr
For building the S^-1/2 matrix
diag = LA.vec2Diagonal invSqrt
eigVecTrans <- transpose2P eigVecs
mtx1 <- mmultP eigVecs diag
mmultP mtx1 eigVecTrans
LA.mmultP eigenvecs < = < LA.mmultP eigenvecs $ diag
canortho :: Monad m => Array U DIM2 Double -> m (Array U DIM2 Double)
canortho arr = do
let (eigVal,eigVecs) = eigenSolve arr
For building the S^-1/2 matrix
R.computeUnboxedP $ R.traverse eigVecs id (\f sh@(Z:._:. k) -> (invSqrt U.! k) * f sh)
|
ecff5f348433f4535eddaf2f5a3e018b1688ddaf5564b6dc123594281141b80b | toyokumo/tarayo | data_source_test.clj | (ns tarayo.mail.mime.multipart.data-source-test
(:require
[clojure.java.io :as io]
[clojure.test :as t]
[tarayo.mail.mime.multipart.data-source :as sut])
(:import
jakarta.activation.DataSource
java.io.ByteArrayOutputStream))
(t/deftest byte-array-data-source-test
(t/testing "default content-type"
(let [bs (.getBytes "hello")
ds (sut/byte-array-data-source bs)]
(t/is (instance? DataSource ds))
(with-open [in (.getInputStream ds)
out (ByteArrayOutputStream.)]
(io/copy in out)
(t/is (= (seq bs) (seq (.toByteArray out)))))
(t/is (= "application/octet-stream" (.getContentType ds)))))
(t/testing "custom content-type"
(let [bs (.getBytes "hello")
ds (sut/byte-array-data-source bs "text/plain")]
(t/is (instance? DataSource ds))
(t/is (= "text/plain" (.getContentType ds))))))
| null | https://raw.githubusercontent.com/toyokumo/tarayo/f9b10b85b7bc1a188d808c3955e258916cd0b38a/test/tarayo/mail/mime/multipart/data_source_test.clj | clojure | (ns tarayo.mail.mime.multipart.data-source-test
(:require
[clojure.java.io :as io]
[clojure.test :as t]
[tarayo.mail.mime.multipart.data-source :as sut])
(:import
jakarta.activation.DataSource
java.io.ByteArrayOutputStream))
(t/deftest byte-array-data-source-test
(t/testing "default content-type"
(let [bs (.getBytes "hello")
ds (sut/byte-array-data-source bs)]
(t/is (instance? DataSource ds))
(with-open [in (.getInputStream ds)
out (ByteArrayOutputStream.)]
(io/copy in out)
(t/is (= (seq bs) (seq (.toByteArray out)))))
(t/is (= "application/octet-stream" (.getContentType ds)))))
(t/testing "custom content-type"
(let [bs (.getBytes "hello")
ds (sut/byte-array-data-source bs "text/plain")]
(t/is (instance? DataSource ds))
(t/is (= "text/plain" (.getContentType ds))))))
|
|
98502c4a79ed0dd30e1bafd4cb0ee877aecaa03d5208e9717655b3b34db6db17 | tweag/ormolu | tuples-out.hs | foo = (1, 2, 3)
bar =
( 1,
2,
3
)
handleStuff =
( let foo = foo
in foo,
let bar = bar
in bar
)
| null | https://raw.githubusercontent.com/tweag/ormolu/34bdf62429768f24b70d0f8ba7730fc4d8ae73ba/data/examples/declaration/value/function/tuples-out.hs | haskell | foo = (1, 2, 3)
bar =
( 1,
2,
3
)
handleStuff =
( let foo = foo
in foo,
let bar = bar
in bar
)
|
|
d9c1d48ddcfe227801d1ec3db224802bcf0abdd39c1c550fee45d565a00d1740 | coq/coq | univGen.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Sorts
open Names
open Constr
open Univ
type univ_length_mismatch = {
actual : int ;
expect : int ;
}
Due to / ocaml#10027 inlining this record will cause
compliation with -rectypes to crash .
compliation with -rectypes to crash. *)
exception UniverseLengthMismatch of univ_length_mismatch
let () = CErrors.register_handler (function
| UniverseLengthMismatch { actual; expect } ->
Some Pp.(str "Universe instance length is " ++ int actual
++ str " but should be " ++ int expect ++ str ".")
| _ -> None)
(* Generator of levels *)
let new_univ_id =
let cnt = ref 0 in
fun () -> incr cnt; !cnt
let new_univ_global () =
let s = if Flags.async_proofs_is_worker() then !Flags.async_proofs_worker_id else "" in
Univ.UGlobal.make (Global.current_dirpath ()) s (new_univ_id ())
let fresh_level () =
Univ.Level.make (new_univ_global ())
let new_sort_id =
let cnt = ref 0 in
fun () -> incr cnt; !cnt
let new_sort_global () =
Sorts.QVar.make (new_sort_id ())
let fresh_instance auctx =
let inst = Array.init (AbstractContext.size auctx) (fun _ -> fresh_level()) in
let ctx = Array.fold_right Level.Set.add inst Level.Set.empty in
let inst = Instance.of_array inst in
inst, (ctx, AbstractContext.instantiate inst auctx)
let existing_instance ?loc auctx inst =
let () =
let actual = Array.length (Instance.to_array inst)
and expect = AbstractContext.size auctx in
if not (Int.equal actual expect) then
Loc.raise ?loc (UniverseLengthMismatch { actual; expect })
else ()
in
inst, (Level.Set.empty, AbstractContext.instantiate inst auctx)
let fresh_instance_from ?loc ctx = function
| Some inst -> existing_instance ?loc ctx inst
| None -> fresh_instance ctx
(** Fresh universe polymorphic construction *)
let fresh_global_instance ?loc ?names env gr =
let auctx = Environ.universes_of_global env gr in
let u, ctx = fresh_instance_from ?loc auctx names in
u, ctx
let fresh_constant_instance env c =
let u, ctx = fresh_global_instance env (GlobRef.ConstRef c) in
(c, u), ctx
let fresh_inductive_instance env ind =
let u, ctx = fresh_global_instance env (GlobRef.IndRef ind) in
(ind, u), ctx
let fresh_constructor_instance env c =
let u, ctx = fresh_global_instance env (GlobRef.ConstructRef c) in
(c, u), ctx
let fresh_array_instance env =
let auctx = CPrimitives.typ_univs CPrimitives.PT_array in
let u, ctx = fresh_instance_from auctx None in
u, ctx
let fresh_global_instance ?loc ?names env gr =
let u, ctx = fresh_global_instance ?loc ?names env gr in
mkRef (gr, u), ctx
let constr_of_monomorphic_global env gr =
if not (Environ.is_polymorphic env gr) then
fst (fresh_global_instance env gr)
else CErrors.user_err
Pp.(str "globalization of polymorphic reference " ++ Nametab.pr_global_env Id.Set.empty gr ++
str " would forget universes.")
let fresh_sort_in_family = function
| InSProp -> Sorts.sprop, ContextSet.empty
| InProp -> Sorts.prop, ContextSet.empty
| InSet -> Sorts.set, ContextSet.empty
| InType | InQSort (* Treat as Type *) ->
let u = fresh_level () in
sort_of_univ (Univ.Universe.make u), ContextSet.singleton u
let new_global_univ () =
let u = fresh_level () in
(Univ.Universe.make u, ContextSet.singleton u)
let fresh_universe_context_set_instance ctx =
if ContextSet.is_empty ctx then Level.Map.empty, ctx
else
let (univs, cst) = ContextSet.levels ctx, ContextSet.constraints ctx in
let univs',subst = Level.Set.fold
(fun u (univs',subst) ->
let u' = fresh_level () in
(Level.Set.add u' univs', Level.Map.add u u' subst))
univs (Level.Set.empty, Level.Map.empty)
in
let cst' = subst_univs_level_constraints subst cst in
subst, (univs', cst')
| null | https://raw.githubusercontent.com/coq/coq/1bacca2d42c3bab2c6826bd8cb50dd1805b310ea/engine/univGen.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
Generator of levels
* Fresh universe polymorphic construction
Treat as Type | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
open Sorts
open Names
open Constr
open Univ
type univ_length_mismatch = {
actual : int ;
expect : int ;
}
Due to / ocaml#10027 inlining this record will cause
compliation with -rectypes to crash .
compliation with -rectypes to crash. *)
exception UniverseLengthMismatch of univ_length_mismatch
let () = CErrors.register_handler (function
| UniverseLengthMismatch { actual; expect } ->
Some Pp.(str "Universe instance length is " ++ int actual
++ str " but should be " ++ int expect ++ str ".")
| _ -> None)
let new_univ_id =
let cnt = ref 0 in
fun () -> incr cnt; !cnt
let new_univ_global () =
let s = if Flags.async_proofs_is_worker() then !Flags.async_proofs_worker_id else "" in
Univ.UGlobal.make (Global.current_dirpath ()) s (new_univ_id ())
let fresh_level () =
Univ.Level.make (new_univ_global ())
let new_sort_id =
let cnt = ref 0 in
fun () -> incr cnt; !cnt
let new_sort_global () =
Sorts.QVar.make (new_sort_id ())
let fresh_instance auctx =
let inst = Array.init (AbstractContext.size auctx) (fun _ -> fresh_level()) in
let ctx = Array.fold_right Level.Set.add inst Level.Set.empty in
let inst = Instance.of_array inst in
inst, (ctx, AbstractContext.instantiate inst auctx)
let existing_instance ?loc auctx inst =
let () =
let actual = Array.length (Instance.to_array inst)
and expect = AbstractContext.size auctx in
if not (Int.equal actual expect) then
Loc.raise ?loc (UniverseLengthMismatch { actual; expect })
else ()
in
inst, (Level.Set.empty, AbstractContext.instantiate inst auctx)
let fresh_instance_from ?loc ctx = function
| Some inst -> existing_instance ?loc ctx inst
| None -> fresh_instance ctx
let fresh_global_instance ?loc ?names env gr =
let auctx = Environ.universes_of_global env gr in
let u, ctx = fresh_instance_from ?loc auctx names in
u, ctx
let fresh_constant_instance env c =
let u, ctx = fresh_global_instance env (GlobRef.ConstRef c) in
(c, u), ctx
let fresh_inductive_instance env ind =
let u, ctx = fresh_global_instance env (GlobRef.IndRef ind) in
(ind, u), ctx
let fresh_constructor_instance env c =
let u, ctx = fresh_global_instance env (GlobRef.ConstructRef c) in
(c, u), ctx
let fresh_array_instance env =
let auctx = CPrimitives.typ_univs CPrimitives.PT_array in
let u, ctx = fresh_instance_from auctx None in
u, ctx
let fresh_global_instance ?loc ?names env gr =
let u, ctx = fresh_global_instance ?loc ?names env gr in
mkRef (gr, u), ctx
let constr_of_monomorphic_global env gr =
if not (Environ.is_polymorphic env gr) then
fst (fresh_global_instance env gr)
else CErrors.user_err
Pp.(str "globalization of polymorphic reference " ++ Nametab.pr_global_env Id.Set.empty gr ++
str " would forget universes.")
let fresh_sort_in_family = function
| InSProp -> Sorts.sprop, ContextSet.empty
| InProp -> Sorts.prop, ContextSet.empty
| InSet -> Sorts.set, ContextSet.empty
let u = fresh_level () in
sort_of_univ (Univ.Universe.make u), ContextSet.singleton u
let new_global_univ () =
let u = fresh_level () in
(Univ.Universe.make u, ContextSet.singleton u)
let fresh_universe_context_set_instance ctx =
if ContextSet.is_empty ctx then Level.Map.empty, ctx
else
let (univs, cst) = ContextSet.levels ctx, ContextSet.constraints ctx in
let univs',subst = Level.Set.fold
(fun u (univs',subst) ->
let u' = fresh_level () in
(Level.Set.add u' univs', Level.Map.add u u' subst))
univs (Level.Set.empty, Level.Map.empty)
in
let cst' = subst_univs_level_constraints subst cst in
subst, (univs', cst')
|
27b298953870699ebf614c1065003a216e0be702879d7be8d0432fbabed5ebca | hanshuebner/bos | packages.lisp | (in-package :cl-user)
(defpackage :bos.web
(:nicknames :web :worldpay-test)
(:use :cl
:date-calc
:cl-user
:cl-interpol
:cl-ppcre
:alexandria
:xhtml-generator
:cxml
:puri
:bknr.web
:bknr.web.frontend
:bknr.datastore
:bknr.indices
:bknr.utils
:bknr.user
:bknr.images
:bknr.cron
:bknr.rss
:bos.m2
:bos.m2.config)
(:shadowing-import-from :cl-interpol #:quote-meta-chars)
(:shadowing-import-from :alexandria #:array-index)
(:export))
(defpackage :simple-sat-map
(:use :cl
:bknr.indices
:bknr.datastore
:alexandria)
(:shadowing-import-from :alexandria #:array-index)
(:nicknames :ssm)
(:export #:simple-map-handler
#:import)) | null | https://raw.githubusercontent.com/hanshuebner/bos/ab5944cc46f4a5ff5a08fd8aa4d228c0f9cfc771/web/packages.lisp | lisp | (in-package :cl-user)
(defpackage :bos.web
(:nicknames :web :worldpay-test)
(:use :cl
:date-calc
:cl-user
:cl-interpol
:cl-ppcre
:alexandria
:xhtml-generator
:cxml
:puri
:bknr.web
:bknr.web.frontend
:bknr.datastore
:bknr.indices
:bknr.utils
:bknr.user
:bknr.images
:bknr.cron
:bknr.rss
:bos.m2
:bos.m2.config)
(:shadowing-import-from :cl-interpol #:quote-meta-chars)
(:shadowing-import-from :alexandria #:array-index)
(:export))
(defpackage :simple-sat-map
(:use :cl
:bknr.indices
:bknr.datastore
:alexandria)
(:shadowing-import-from :alexandria #:array-index)
(:nicknames :ssm)
(:export #:simple-map-handler
#:import)) |
|
f14a6303a1692686f469bae83a674286733a09c4b74c0672257d44ae6ce18c45 | tdammers/sprinkles | Main.hs | # LANGUAGE NoImplicitPrelude #
{-#LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
# LANGUAGE CPP #
-- | CLI program that drives a Sprinkles instance.
module Main where
import Web.Sprinkles.Prelude hiding ( (<|>), try )
import Web.Sprinkles
import Web.Sprinkles.Project (Project)
import Web.Sprinkles.Exceptions
import Text.Read (read, readMaybe)
import Data.Default (def)
import Text.Parsec
import Data.EmbedVersion
import qualified Data.Text as Text
import Control.Concurrent.Async
data CliOptions =
ServeProject ServerConfig |
BakeProject FilePath ServerConfig [FilePath] |
DumpVersion
deriving (Show)
parseArgs :: [Text] -> IO [CliOptions]
parseArgs argv = do
let result = runParser argsP () "command line arguments" argv
either
(fail . show)
return
result
type ArgsP = Parsec [Text] ()
data ArgSpec a = Flag Text a
| Optional Text (Maybe Text -> Maybe a)
| Required Text (Text -> Maybe a)
| Bare (Text -> Maybe a)
argsP :: ArgsP [CliOptions]
argsP = some (versionP <|> bakeArgsP <|> serveArgsP) <* eof
versionP :: ArgsP CliOptions
versionP =
(try (tExactly "-version") <|>
try (tExactly "--version") <|>
try (tExactly "-v")) >> return DumpVersion
serveArgsP :: ArgsP CliOptions
serveArgsP = do
try $ tExactly "-serve"
pipeline <- Text.Parsec.many $ choice (map (Text.Parsec.try . serveArgP) serveArgSpecs)
return . ServeProject $ foldr ($) def pipeline
bakeArgsP :: ArgsP CliOptions
bakeArgsP = do
try $ tExactly "-bake"
dirname < - try $ serveArgP ( Optional " o " $ Just . Text.unpack . fromMaybe " ./baked " )
let dirname = "./baked"
extraEntryPoints <- fmap Text.unpack <$> Text.Parsec.many bareArgP
return $ BakeProject dirname def extraEntryPoints
tSatisfy :: (Show t, Stream s m t) => (t -> Bool) -> ParsecT s u m t
tSatisfy cond = do
actual <- anyToken
if cond actual
then return actual
else fail $ "unexpected " ++ show actual
tExactly :: (Show t, Stream s m t, Eq t) => t -> ParsecT s u m t
tExactly expected = tSatisfy (== expected)
isFlag :: Text -> Bool
isFlag = ("-" `isPrefixOf`)
isNotFlag :: Text -> Bool
isNotFlag = not . isFlag
serveArgP :: ArgSpec a -> ArgsP a
serveArgP (Flag str x) = do
tExactly ("-" <> str)
return x
serveArgP (Optional str f) = do
tExactly ("-" <> str)
paramMay <- optionMaybe bareArgP
maybe
(fail "invalid parameter")
return
(f paramMay)
serveArgP (Required str f) = do
tExactly ("-" <> str)
param <- bareArgP
maybe
(fail "invalid parameter")
return
(f param)
serveArgP (Bare f) =
(f <$> bareArgP) >>= maybe (fail "invalid bare argument") return
bareArgP :: ArgsP Text
bareArgP = tSatisfy isNotFlag
serveArgSpecs :: [ArgSpec (ServerConfig -> ServerConfig)]
serveArgSpecs =
[ Optional
"warp"
(maybe
(Just $ \config -> config { scDriver = WarpDriver Nothing })
(\str -> do
port <- readMaybe . unpack $ str
return $ \config -> config { scDriver = WarpDriver (Just port) })
)
, Required
"dir"
(\str -> Just (\config -> config { scRootDir = unpack str }))
, Bare (\str -> do
port <- readMaybe . unpack $ str
return $ \config -> config { scDriver = WarpDriver (Just port) })
, Flag "cgi" (\config -> config { scDriver = CGIDriver })
, Flag "scgi" (\config -> config { scDriver = SCGIDriver })
, Flag "fcgi" (\config -> config { scDriver = FastCGIDriver })
]
sprinklesVersion :: Text
sprinklesVersion = $(embedPackageVersionStr "sprinkles.cabal")
sprinklesFeatures :: Text
sprinklesFeatures = Text.unwords $
[]
#if FEATURE_MYSQL
++ ["MYSQL"]
#endif
#if FEATURE_POSTGRES
++ ["POSTGRES"]
#endif
#if FEATURE_SQLITE
++ ["SQLITE"]
#endif
#if FEATURE_CURL
++ ["CURL"]
#endif
main :: IO ()
main = runMain `catch` handleUncaughtExceptions
prepareProject :: ServerConfig -> IO (ServerConfig, Project)
prepareProject sconfigA = do
sconfigF <- loadServerConfig $ scRootDir sconfigA
let sconfig = sconfigF `mappend` sconfigA
project <- loadProject sconfig
return (sconfig, project)
runMain :: IO ()
runMain = do
args <- getArgs
opts <- parseArgs args
forConcurrently_ opts $ \opt -> do
case opt of
ServeProject sconfigA -> do
prepareProject sconfigA >>= \(sconfig, project) ->
serveProject sconfig project
BakeProject path sconfigA extraEntryPoints -> do
prepareProject sconfigA >>= \(sconfig, project) ->
bakeProject path project extraEntryPoints
DumpVersion -> do
putStrLn sprinklesVersion
putStrLn $ "Features: " <> sprinklesFeatures
| null | https://raw.githubusercontent.com/tdammers/sprinkles/a9161e4506427a3cf5f686654edc7ed9aa3ea82b/app/Main.hs | haskell | #LANGUAGE OverloadedStrings #
| CLI program that drives a Sprinkles instance. | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE LambdaCase #
# LANGUAGE CPP #
module Main where
import Web.Sprinkles.Prelude hiding ( (<|>), try )
import Web.Sprinkles
import Web.Sprinkles.Project (Project)
import Web.Sprinkles.Exceptions
import Text.Read (read, readMaybe)
import Data.Default (def)
import Text.Parsec
import Data.EmbedVersion
import qualified Data.Text as Text
import Control.Concurrent.Async
data CliOptions =
ServeProject ServerConfig |
BakeProject FilePath ServerConfig [FilePath] |
DumpVersion
deriving (Show)
parseArgs :: [Text] -> IO [CliOptions]
parseArgs argv = do
let result = runParser argsP () "command line arguments" argv
either
(fail . show)
return
result
type ArgsP = Parsec [Text] ()
data ArgSpec a = Flag Text a
| Optional Text (Maybe Text -> Maybe a)
| Required Text (Text -> Maybe a)
| Bare (Text -> Maybe a)
argsP :: ArgsP [CliOptions]
argsP = some (versionP <|> bakeArgsP <|> serveArgsP) <* eof
versionP :: ArgsP CliOptions
versionP =
(try (tExactly "-version") <|>
try (tExactly "--version") <|>
try (tExactly "-v")) >> return DumpVersion
serveArgsP :: ArgsP CliOptions
serveArgsP = do
try $ tExactly "-serve"
pipeline <- Text.Parsec.many $ choice (map (Text.Parsec.try . serveArgP) serveArgSpecs)
return . ServeProject $ foldr ($) def pipeline
bakeArgsP :: ArgsP CliOptions
bakeArgsP = do
try $ tExactly "-bake"
dirname < - try $ serveArgP ( Optional " o " $ Just . Text.unpack . fromMaybe " ./baked " )
let dirname = "./baked"
extraEntryPoints <- fmap Text.unpack <$> Text.Parsec.many bareArgP
return $ BakeProject dirname def extraEntryPoints
tSatisfy :: (Show t, Stream s m t) => (t -> Bool) -> ParsecT s u m t
tSatisfy cond = do
actual <- anyToken
if cond actual
then return actual
else fail $ "unexpected " ++ show actual
tExactly :: (Show t, Stream s m t, Eq t) => t -> ParsecT s u m t
tExactly expected = tSatisfy (== expected)
isFlag :: Text -> Bool
isFlag = ("-" `isPrefixOf`)
isNotFlag :: Text -> Bool
isNotFlag = not . isFlag
serveArgP :: ArgSpec a -> ArgsP a
serveArgP (Flag str x) = do
tExactly ("-" <> str)
return x
serveArgP (Optional str f) = do
tExactly ("-" <> str)
paramMay <- optionMaybe bareArgP
maybe
(fail "invalid parameter")
return
(f paramMay)
serveArgP (Required str f) = do
tExactly ("-" <> str)
param <- bareArgP
maybe
(fail "invalid parameter")
return
(f param)
serveArgP (Bare f) =
(f <$> bareArgP) >>= maybe (fail "invalid bare argument") return
bareArgP :: ArgsP Text
bareArgP = tSatisfy isNotFlag
serveArgSpecs :: [ArgSpec (ServerConfig -> ServerConfig)]
serveArgSpecs =
[ Optional
"warp"
(maybe
(Just $ \config -> config { scDriver = WarpDriver Nothing })
(\str -> do
port <- readMaybe . unpack $ str
return $ \config -> config { scDriver = WarpDriver (Just port) })
)
, Required
"dir"
(\str -> Just (\config -> config { scRootDir = unpack str }))
, Bare (\str -> do
port <- readMaybe . unpack $ str
return $ \config -> config { scDriver = WarpDriver (Just port) })
, Flag "cgi" (\config -> config { scDriver = CGIDriver })
, Flag "scgi" (\config -> config { scDriver = SCGIDriver })
, Flag "fcgi" (\config -> config { scDriver = FastCGIDriver })
]
sprinklesVersion :: Text
sprinklesVersion = $(embedPackageVersionStr "sprinkles.cabal")
sprinklesFeatures :: Text
sprinklesFeatures = Text.unwords $
[]
#if FEATURE_MYSQL
++ ["MYSQL"]
#endif
#if FEATURE_POSTGRES
++ ["POSTGRES"]
#endif
#if FEATURE_SQLITE
++ ["SQLITE"]
#endif
#if FEATURE_CURL
++ ["CURL"]
#endif
main :: IO ()
main = runMain `catch` handleUncaughtExceptions
prepareProject :: ServerConfig -> IO (ServerConfig, Project)
prepareProject sconfigA = do
sconfigF <- loadServerConfig $ scRootDir sconfigA
let sconfig = sconfigF `mappend` sconfigA
project <- loadProject sconfig
return (sconfig, project)
runMain :: IO ()
runMain = do
args <- getArgs
opts <- parseArgs args
forConcurrently_ opts $ \opt -> do
case opt of
ServeProject sconfigA -> do
prepareProject sconfigA >>= \(sconfig, project) ->
serveProject sconfig project
BakeProject path sconfigA extraEntryPoints -> do
prepareProject sconfigA >>= \(sconfig, project) ->
bakeProject path project extraEntryPoints
DumpVersion -> do
putStrLn sprinklesVersion
putStrLn $ "Features: " <> sprinklesFeatures
|
647e45606b030ffd728204a854342c66a7b1a6dfbcdd857c9f254f9848eb9e0c | kupl/LearnML | original.ml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec envmt (e : lambda) : string list =
let lst : string list = [] in
match e with
| V v -> lst
| P (v, lambdar) -> (v :: envmt lambdar) @ lst
| C (lambda1, lambda2) -> envmt lambda1 @ envmt lambda2 @ lst
let rec exist ((e : lambda), (lst : string list)) : int =
match e with
| V v -> (
match lst with
| [] -> 0
| hd :: tl -> if hd = v then 1 + exist (e, tl) else exist (e, tl) )
| P (v, lambda1) -> exist (lambda1, lst)
| C (lambda1, lambda2) -> exist (lambda1, lst) + exist (lambda2, lst)
let rec check (e : lambda) : bool =
let env : string list = envmt e in
if exist (e, env) = 0 then false
else if List.length env > exist (e, env) then false
else true
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/lambda/sub54/original.ml | ocaml | type lambda = V of var | P of (var * lambda) | C of (lambda * lambda)
and var = string
let rec envmt (e : lambda) : string list =
let lst : string list = [] in
match e with
| V v -> lst
| P (v, lambdar) -> (v :: envmt lambdar) @ lst
| C (lambda1, lambda2) -> envmt lambda1 @ envmt lambda2 @ lst
let rec exist ((e : lambda), (lst : string list)) : int =
match e with
| V v -> (
match lst with
| [] -> 0
| hd :: tl -> if hd = v then 1 + exist (e, tl) else exist (e, tl) )
| P (v, lambda1) -> exist (lambda1, lst)
| C (lambda1, lambda2) -> exist (lambda1, lst) + exist (lambda2, lst)
let rec check (e : lambda) : bool =
let env : string list = envmt e in
if exist (e, env) = 0 then false
else if List.length env > exist (e, env) then false
else true
|
|
ede27384af7c3187572d7557220adc07a88a3a33530e3527eb101d66b3088c7b | thelema/ocaml-community | jg_completion.ml | (*************************************************************************)
(* *)
(* OCaml LablTk library *)
(* *)
, Kyoto University RIMS
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
(* General Public License, with the special exception on linking *)
(* described in file ../../../LICENSE. *)
(* *)
(*************************************************************************)
$ Id$
let lt_string ?(nocase=false) s1 s2 =
if nocase then String.lowercase s1 < String.lowercase s2 else s1 < s2
class completion ?nocase texts = object
val mutable texts = texts
val nocase = nocase
val mutable prefix = ""
val mutable current = 0
method add c =
prefix <- prefix ^ c;
while current < List.length texts - 1 &&
lt_string (List.nth texts current) prefix ?nocase
do
current <- current + 1
done;
current
method current = current
method get_current = List.nth texts current
method reset =
prefix <- "";
current <- 0
end
class timed ?nocase ?wait texts = object (self)
inherit completion texts ?nocase as super
val wait = match wait with None -> 500 | Some n -> n
val mutable timer = None
method! add c =
begin match timer with
None -> self#reset
| Some t -> Timer.remove t
end;
timer <- Some (Timer.add ~ms:wait ~callback:(fun () -> self#reset));
super#add c
method! reset =
timer <- None; super#reset
end
| null | https://raw.githubusercontent.com/thelema/ocaml-community/ed0a2424bbf13d1b33292725e089f0d7ba94b540/otherlibs/labltk/browser/jg_completion.ml | ocaml | ***********************************************************************
OCaml LablTk library
General Public License, with the special exception on linking
described in file ../../../LICENSE.
*********************************************************************** | , Kyoto University RIMS
Copyright 1999 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
$ Id$
let lt_string ?(nocase=false) s1 s2 =
if nocase then String.lowercase s1 < String.lowercase s2 else s1 < s2
class completion ?nocase texts = object
val mutable texts = texts
val nocase = nocase
val mutable prefix = ""
val mutable current = 0
method add c =
prefix <- prefix ^ c;
while current < List.length texts - 1 &&
lt_string (List.nth texts current) prefix ?nocase
do
current <- current + 1
done;
current
method current = current
method get_current = List.nth texts current
method reset =
prefix <- "";
current <- 0
end
class timed ?nocase ?wait texts = object (self)
inherit completion texts ?nocase as super
val wait = match wait with None -> 500 | Some n -> n
val mutable timer = None
method! add c =
begin match timer with
None -> self#reset
| Some t -> Timer.remove t
end;
timer <- Some (Timer.add ~ms:wait ~callback:(fun () -> self#reset));
super#add c
method! reset =
timer <- None; super#reset
end
|
24bc178cfdb97ec28be7aef78336a4196dac0bec136fc5c127bc11869bfaa3b6 | art-w/ocaml-webdriver | test_any.ml | module Make (Webdriver : Webdriver.S) = struct
let url_of_file filename = "file://" ^ Sys.getcwd () ^ "/" ^ filename
let url_a = url_of_file "test/a.html"
let url_b = url_of_file "test/b.html"
let test name cmds = (name, cmds)
open Webdriver.Infix
let navigation = test "navigation"
begin
let open Webdriver in
let* () = goto url_a in
let* ta = title in
assert (ta = "Page A") ;
let* u = current_url in
assert (u = url_a) ;
let* () = goto url_b in
let* tb = title in
assert (tb = "Page B") ;
let* ub = current_url in
assert (ub = url_b) ;
let* () = back in
let* u = current_url in
assert (u = url_a) ;
let* () = forward in
let* u = current_url in
assert (u = url_b) ;
let* () = refresh in
let* u = current_url in
assert (u = url_b) ;
return ()
end
let multiple_windows = test "multiple windows"
begin
let open Webdriver in
let* w0 = Window.current in
let* ws = Window.all in
assert (ws = [w0]) ;
let* w1, _ = Window.make `window in
let* w0' = Window.current in
assert (w0 = w0') ;
let* () = Window.switch_to w1 in
let* w1' = Window.current in
assert (w1 = w1') ;
let sort ws = List.sort Stdlib.compare ws in
let* ws = Window.all in
assert (sort ws = sort [ w0 ; w1 ]) ;
let* () = Window.switch_to w0 in
let* w0' = Window.current in
assert (w0 = w0') ;
let* remaining_windows = Window.close in
assert (remaining_windows = [w1]) ;
return ()
end
let window_rect = test "window resizing"
begin
let open Webdriver in
let* r1 = Window.get_rect in
assert (r1.x >= 0) ;
assert (r1.y >= 0) ;
assert (r1.width > 0) ;
assert (r1.height > 0) ;
let* r2 = Window.maximize in
assert (r2.x >= 0) ;
assert (r2.y >= 0) ;
assert (r2.width >= r1.width) ;
assert (r2.height >= r1.height) ;
let* r2' = Window.get_rect in
assert (r2 = r2') ;
let* r3 = Window.fullscreen in
assert (r3.x >= 0) ;
assert (r3.y >= 0) ;
assert (r3.width >= r2.width) ;
assert (r3.height >= r2.height) ;
let* r3' = Window.get_rect in
assert (r3 = r3') ;
let* r4 = Window.minimize in
assert (r4.x >= 0) ;
assert (r4.y >= 0) ;
assert (r4.width <= r3.width) ;
assert (r4.height <= r3.height) ;
let* r4' = Window.get_rect in
assert (r4 = r4') ;
let my_rect = { Window.x = 42 ; y = 420 ; width = 500 ; height = 600 } in
let* my_rect' = Window.set_rect my_rect in
assert (my_rect = my_rect') ;
let* my_rect'' = Window.get_rect in
assert (my_rect = my_rect'') ;
return ()
end
let source = test "source"
begin
let open Webdriver in
let* () = goto url_a in
let* html = source in
let is_html = "<html><head>" in
let prefix = String.sub html 0 (String.length is_html) in
assert (prefix = is_html) ;
return ()
end
let screenshot = test "screenshot"
begin
let open Webdriver in
let* () = goto url_a in
let* png = screenshot () in
assert ("PNG" = String.sub png 1 3) ;
let* elt = find_first `css "h1" in
let* png_elt = screenshot ~elt () in
assert ("PNG" = String.sub png_elt 1 3) ;
assert (String.length png > String.length png_elt) ;
return ()
end
let exec_js = test "execute javascript"
begin
let open Webdriver in
let* () = goto url_a in
let* json = execute "return 42" in
assert (json = `Int 42) ;
let t0 = Unix.gettimeofday () in
let* json =
execute_async
{| var k = arguments[0];
setTimeout(function() { k(666) }, 1000);
|}
in
let t1 = Unix.gettimeofday () in
assert (json = `Int 666) ;
assert (t1 -. t0 > 1.0) ;
return ()
end
let find = test "find"
begin
let open Webdriver in
let* () = goto url_a in
let* first_h1 = find_first `tag_name "h1" in
let* all_h1 = find_all `tag_name "h1" in
assert (List.length all_h1 = 2) ;
assert (List.hd all_h1 = first_h1) ;
let* all_links = find_all `css "a" in
let* fst_link = find_first `partial_link_text "link to B" in
let* snd_link = find_first `partial_link_text "another link" in
assert (all_links = [ fst_link ; snd_link ]) ;
let* fst_link' = find_first `link_text "first link to B" in
assert (fst_link = fst_link') ;
let* fst_link'' = find_first `css "p a" in
assert (fst_link = fst_link'') ;
let* snd_link' = find_first `link_text "another link to B" in
assert (snd_link = snd_link') ;
let* snd_link'' = find_first `xpath "//a[contains(text(), 'another')]" in
assert (snd_link = snd_link'') ;
return ()
end
let of_option = function
| None -> assert false
| Some v -> v
let inspect = test "inspect"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "form input[type='string']" in
let* name = attribute input "name" in
assert (name = "foo") ;
let* init_value = attribute input "value" in
assert (init_value = "default value") ;
let* () = send_keys input "hello" in
let* new_value = attribute input "value" in
assert (new_value = "default value") ;
let* real_value = property input "value" in
let real_value = of_option real_value in
assert (real_value = "default valuehello") ;
let* color = css input "background-color" in
assert (List.mem color ["rgb(0, 128, 0)"; "rgba(0, 128, 0, 1)"]) ;
return ()
end
let form_interact = test "form interaction"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "input[name='foo']" in
let* () = send_keys input (Key.backspace ^ "able") in
let* btn = find_first `css "input[type='submit']" in
let* () = click btn in
let* url = current_url in
assert (url = url_b ^ "?foo=default+valuable") ;
let* () = back in
let* input = find_first `css "input[name='foo']" in
let* () = clear input in
let* () = send_keys input "again" in
let* btn = find_first `css "input[type='submit']" in
let* () = click btn in
let* url = current_url in
assert (url = url_b ^ "?foo=again") ;
return ()
end
let perform = test "perform"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "input[name='foo']" in
let* default = of_option |<< property input "value" in
assert (default = "default value") ;
let* () = click input in
let* focus = active in
assert (focus = input) ;
let expected = [ `down "a" ; `up "a" ; `down "b" ; `up "b" ] in
assert (typing "ab" = expected) ;
let* () = perform [ keyboard (typing "ab") ] in
let* str = of_option |<< property input "value" in
assert (str = "default valueab") ;
let erase_all_expected =
[ `down Key.control
; `down "a"
; `up "a"
; `up Key.control
; `down Key.backspace
; `up Key.backspace
] in
let erase_all = typing (Key.control ^ "a" ^ Key.backspace) in
assert (erase_all = erase_all_expected) ;
let* () = perform [ keyboard erase_all ] in
let* str = of_option |<< property input "value" in
assert (str = "") ;
let* () = send_keys input "test" in
let* str = of_option |<< property input "value" in
assert (str = "test") ;
let* () = perform [ keyboard (typing Key.enter) ] in
let* () =
Wait.until
(let+ url = current_url in
url = url_b ^ "?foo=test")
in
let* () = back in
let* url = current_url in
assert (url = url_a) ;
let* input = find_first `css "input[name='foo']" in
let* str = of_option |<< property input "value" in
assert (str = "test") ;
let* btn = find_first `css "input[type='submit']" in
let* rect = rect btn in
let* _ = Window.maximize in
let btn_top_left = (1 + int_of_float rect.x, 1 + int_of_float rect.y) in
let move = absolute ~duration:50 btn_top_left in
let* () = click input in
let* active = active in
assert (active = input) ;
let do_click = [ `down button_left ; `pause 50 ; `up button_left ] in
let* () =
perform
[ mouse (`move move :: `noop :: do_click)
; keyboard [`down "z" ; `up "z"]
]
in
let* () =
Wait.until
(let+ url = current_url in
url = url_b ^ "?foo=testz")
in
let* () = back in
let* input = find_first `css "input[name='foo']" in
let* () = click input in
let* btn = find_first `css "input[type='submit']" in
let reset = absolute ~duration:50 (0, 0) in
let move = center ~duration:50 btn in
let* () =
perform
[ mouse (`move reset :: `move move :: do_click)
; keyboard [`down "y" ; `up "y"]
]
in
let* url = current_url in
assert (url = url_b ^ "?foo=testzy") ;
return ()
end
let alert_js = test "alert from js"
begin
let open Webdriver in
let* () = goto url_b in
let* btn = find_first `css "#alert" in
let* txt = text btn in
assert (txt = "Alert on double click") ;
let* () = double_click btn in
let* msg = Alert.get_text in
assert (msg = Some "Hi from javascript") ;
let* () = Alert.dismiss in
let* txt = text btn in
assert (txt = "Clicked!") ;
return ()
end
let all =
[ window_rect
; navigation
; multiple_windows
; source
; screenshot
; exec_js
; find
; inspect
; form_interact
; perform
; alert_js
]
end
| null | https://raw.githubusercontent.com/art-w/ocaml-webdriver/f7afeef4dc7db56055f70d0695623c08a10faccc/test/test_any.ml | ocaml | module Make (Webdriver : Webdriver.S) = struct
let url_of_file filename = "file://" ^ Sys.getcwd () ^ "/" ^ filename
let url_a = url_of_file "test/a.html"
let url_b = url_of_file "test/b.html"
let test name cmds = (name, cmds)
open Webdriver.Infix
let navigation = test "navigation"
begin
let open Webdriver in
let* () = goto url_a in
let* ta = title in
assert (ta = "Page A") ;
let* u = current_url in
assert (u = url_a) ;
let* () = goto url_b in
let* tb = title in
assert (tb = "Page B") ;
let* ub = current_url in
assert (ub = url_b) ;
let* () = back in
let* u = current_url in
assert (u = url_a) ;
let* () = forward in
let* u = current_url in
assert (u = url_b) ;
let* () = refresh in
let* u = current_url in
assert (u = url_b) ;
return ()
end
let multiple_windows = test "multiple windows"
begin
let open Webdriver in
let* w0 = Window.current in
let* ws = Window.all in
assert (ws = [w0]) ;
let* w1, _ = Window.make `window in
let* w0' = Window.current in
assert (w0 = w0') ;
let* () = Window.switch_to w1 in
let* w1' = Window.current in
assert (w1 = w1') ;
let sort ws = List.sort Stdlib.compare ws in
let* ws = Window.all in
assert (sort ws = sort [ w0 ; w1 ]) ;
let* () = Window.switch_to w0 in
let* w0' = Window.current in
assert (w0 = w0') ;
let* remaining_windows = Window.close in
assert (remaining_windows = [w1]) ;
return ()
end
let window_rect = test "window resizing"
begin
let open Webdriver in
let* r1 = Window.get_rect in
assert (r1.x >= 0) ;
assert (r1.y >= 0) ;
assert (r1.width > 0) ;
assert (r1.height > 0) ;
let* r2 = Window.maximize in
assert (r2.x >= 0) ;
assert (r2.y >= 0) ;
assert (r2.width >= r1.width) ;
assert (r2.height >= r1.height) ;
let* r2' = Window.get_rect in
assert (r2 = r2') ;
let* r3 = Window.fullscreen in
assert (r3.x >= 0) ;
assert (r3.y >= 0) ;
assert (r3.width >= r2.width) ;
assert (r3.height >= r2.height) ;
let* r3' = Window.get_rect in
assert (r3 = r3') ;
let* r4 = Window.minimize in
assert (r4.x >= 0) ;
assert (r4.y >= 0) ;
assert (r4.width <= r3.width) ;
assert (r4.height <= r3.height) ;
let* r4' = Window.get_rect in
assert (r4 = r4') ;
let my_rect = { Window.x = 42 ; y = 420 ; width = 500 ; height = 600 } in
let* my_rect' = Window.set_rect my_rect in
assert (my_rect = my_rect') ;
let* my_rect'' = Window.get_rect in
assert (my_rect = my_rect'') ;
return ()
end
let source = test "source"
begin
let open Webdriver in
let* () = goto url_a in
let* html = source in
let is_html = "<html><head>" in
let prefix = String.sub html 0 (String.length is_html) in
assert (prefix = is_html) ;
return ()
end
let screenshot = test "screenshot"
begin
let open Webdriver in
let* () = goto url_a in
let* png = screenshot () in
assert ("PNG" = String.sub png 1 3) ;
let* elt = find_first `css "h1" in
let* png_elt = screenshot ~elt () in
assert ("PNG" = String.sub png_elt 1 3) ;
assert (String.length png > String.length png_elt) ;
return ()
end
let exec_js = test "execute javascript"
begin
let open Webdriver in
let* () = goto url_a in
let* json = execute "return 42" in
assert (json = `Int 42) ;
let t0 = Unix.gettimeofday () in
let* json =
execute_async
{| var k = arguments[0];
setTimeout(function() { k(666) }, 1000);
|}
in
let t1 = Unix.gettimeofday () in
assert (json = `Int 666) ;
assert (t1 -. t0 > 1.0) ;
return ()
end
let find = test "find"
begin
let open Webdriver in
let* () = goto url_a in
let* first_h1 = find_first `tag_name "h1" in
let* all_h1 = find_all `tag_name "h1" in
assert (List.length all_h1 = 2) ;
assert (List.hd all_h1 = first_h1) ;
let* all_links = find_all `css "a" in
let* fst_link = find_first `partial_link_text "link to B" in
let* snd_link = find_first `partial_link_text "another link" in
assert (all_links = [ fst_link ; snd_link ]) ;
let* fst_link' = find_first `link_text "first link to B" in
assert (fst_link = fst_link') ;
let* fst_link'' = find_first `css "p a" in
assert (fst_link = fst_link'') ;
let* snd_link' = find_first `link_text "another link to B" in
assert (snd_link = snd_link') ;
let* snd_link'' = find_first `xpath "//a[contains(text(), 'another')]" in
assert (snd_link = snd_link'') ;
return ()
end
let of_option = function
| None -> assert false
| Some v -> v
let inspect = test "inspect"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "form input[type='string']" in
let* name = attribute input "name" in
assert (name = "foo") ;
let* init_value = attribute input "value" in
assert (init_value = "default value") ;
let* () = send_keys input "hello" in
let* new_value = attribute input "value" in
assert (new_value = "default value") ;
let* real_value = property input "value" in
let real_value = of_option real_value in
assert (real_value = "default valuehello") ;
let* color = css input "background-color" in
assert (List.mem color ["rgb(0, 128, 0)"; "rgba(0, 128, 0, 1)"]) ;
return ()
end
let form_interact = test "form interaction"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "input[name='foo']" in
let* () = send_keys input (Key.backspace ^ "able") in
let* btn = find_first `css "input[type='submit']" in
let* () = click btn in
let* url = current_url in
assert (url = url_b ^ "?foo=default+valuable") ;
let* () = back in
let* input = find_first `css "input[name='foo']" in
let* () = clear input in
let* () = send_keys input "again" in
let* btn = find_first `css "input[type='submit']" in
let* () = click btn in
let* url = current_url in
assert (url = url_b ^ "?foo=again") ;
return ()
end
let perform = test "perform"
begin
let open Webdriver in
let* () = goto url_a in
let* input = find_first `css "input[name='foo']" in
let* default = of_option |<< property input "value" in
assert (default = "default value") ;
let* () = click input in
let* focus = active in
assert (focus = input) ;
let expected = [ `down "a" ; `up "a" ; `down "b" ; `up "b" ] in
assert (typing "ab" = expected) ;
let* () = perform [ keyboard (typing "ab") ] in
let* str = of_option |<< property input "value" in
assert (str = "default valueab") ;
let erase_all_expected =
[ `down Key.control
; `down "a"
; `up "a"
; `up Key.control
; `down Key.backspace
; `up Key.backspace
] in
let erase_all = typing (Key.control ^ "a" ^ Key.backspace) in
assert (erase_all = erase_all_expected) ;
let* () = perform [ keyboard erase_all ] in
let* str = of_option |<< property input "value" in
assert (str = "") ;
let* () = send_keys input "test" in
let* str = of_option |<< property input "value" in
assert (str = "test") ;
let* () = perform [ keyboard (typing Key.enter) ] in
let* () =
Wait.until
(let+ url = current_url in
url = url_b ^ "?foo=test")
in
let* () = back in
let* url = current_url in
assert (url = url_a) ;
let* input = find_first `css "input[name='foo']" in
let* str = of_option |<< property input "value" in
assert (str = "test") ;
let* btn = find_first `css "input[type='submit']" in
let* rect = rect btn in
let* _ = Window.maximize in
let btn_top_left = (1 + int_of_float rect.x, 1 + int_of_float rect.y) in
let move = absolute ~duration:50 btn_top_left in
let* () = click input in
let* active = active in
assert (active = input) ;
let do_click = [ `down button_left ; `pause 50 ; `up button_left ] in
let* () =
perform
[ mouse (`move move :: `noop :: do_click)
; keyboard [`down "z" ; `up "z"]
]
in
let* () =
Wait.until
(let+ url = current_url in
url = url_b ^ "?foo=testz")
in
let* () = back in
let* input = find_first `css "input[name='foo']" in
let* () = click input in
let* btn = find_first `css "input[type='submit']" in
let reset = absolute ~duration:50 (0, 0) in
let move = center ~duration:50 btn in
let* () =
perform
[ mouse (`move reset :: `move move :: do_click)
; keyboard [`down "y" ; `up "y"]
]
in
let* url = current_url in
assert (url = url_b ^ "?foo=testzy") ;
return ()
end
let alert_js = test "alert from js"
begin
let open Webdriver in
let* () = goto url_b in
let* btn = find_first `css "#alert" in
let* txt = text btn in
assert (txt = "Alert on double click") ;
let* () = double_click btn in
let* msg = Alert.get_text in
assert (msg = Some "Hi from javascript") ;
let* () = Alert.dismiss in
let* txt = text btn in
assert (txt = "Clicked!") ;
return ()
end
let all =
[ window_rect
; navigation
; multiple_windows
; source
; screenshot
; exec_js
; find
; inspect
; form_interact
; perform
; alert_js
]
end
|
|
ad4076df4e3b1c01c0f1e11c26269122df0d78bb4b89ef05c1210e2145abbe3d | lingnand/VIMonad | Man.hs | -----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.Man
Copyright : ( c ) 2007
-- License : BSD3-style (see LICENSE)
--
Maintainer : < >
-- Portability : non-portable (uses "manpath" and "bash")
--
-- A manual page prompt for XMonad window manager.
--
TODO
--
-- * narrow completions by section number, if the one is specified
-- (like @\/etc\/bash_completion@ does)
-----------------------------------------------------------------------------
module XMonad.Prompt.Man (
-- * Usage
-- $usage
manPrompt
, getCommandOutput
, Man
) where
import XMonad
import XMonad.Prompt
import XMonad.Util.Run
import XMonad.Prompt.Shell (split)
import System.Directory
import System.Process
import System.IO
import qualified Control.Exception.Extensible as E
import Control.Monad
import Data.List
import Data.Maybe
-- $usage
1 . In your @~\/.xmonad\/xmonad.hs@ :
--
> import XMonad . Prompt
> import XMonad . Prompt . Man
--
2 . In your keybindings add something like :
--
> , ( ( modm , xK_F1 ) , )
--
-- For detailed instruction on editing the key binding see
" XMonad . Doc . Extending#Editing_key_bindings " .
data Man = Man
instance XPrompt Man where
showXPrompt Man = "Manual page: "
-- | Query for manual page to be displayed.
manPrompt :: XPConfig -> X ()
manPrompt c = do
mans <- io getMans
mkXPrompt Man c (manCompl mans) $ runInTerm "" . (++) "man "
getMans :: IO [String]
getMans = do
paths <- do
let getout cmd = getCommandOutput cmd `E.catch` \E.SomeException{} -> return ""
-- one of these combinations should give some output
p1 <- getout "manpath -g 2>/dev/null"
p2 <- getout "manpath 2>/dev/null"
return $ intercalate ":" $ lines $ p1 ++ p2
let sects = ["man" ++ show n | n <- [1..9 :: Int]]
dirs = [d ++ "/" ++ s | d <- split ':' paths, s <- sects]
mans <- forM (nub dirs) $ \d -> do
exists <- doesDirectoryExist d
if exists
then map (stripExt . stripSuffixes [".gz", ".bz2"]) `fmap`
getDirectoryContents d
else return []
return $ uniqSort $ concat mans
manCompl :: [String] -> String -> IO [String]
manCompl mans s | s == "" || last s == ' ' = return []
| otherwise = do
XXX readline instead of bash 's ?
f <- lines `fmap` getCommandOutput ("bash -c 'compgen -A file " ++ s ++ "'")
mkComplFunFromList (f ++ mans) s
-- | Run a command using shell and return its output.
--
XXX Merge into ' XMonad . Util . Run ' ?
--
( Ask \"gurus\ " whether @evaluate ( length ... ) @ approach is
-- better\/more idiomatic.)
getCommandOutput :: String -> IO String
getCommandOutput s = do
-- we can ignore the process handle because we ignor SIGCHLD
(pin, pout, perr, _) <- runInteractiveCommand s
hClose pin
output <- hGetContents pout
E.evaluate (length output)
hClose perr
return output
stripExt :: String -> String
stripExt = reverse . drop 1 . dropWhile (/= '.') . reverse
stripSuffixes :: Eq a => [[a]] -> [a] -> [a]
stripSuffixes sufs fn =
head . catMaybes $ map (flip rstrip fn) sufs ++ [Just fn]
rstrip :: Eq a => [a] -> [a] -> Maybe [a]
rstrip suf lst
| suf `isSuffixOf` lst = Just $ take (length lst - length suf) lst
| otherwise = Nothing
| null | https://raw.githubusercontent.com/lingnand/VIMonad/048e419fc4ef57a5235dbaeef8890faf6956b574/XMonadContrib/XMonad/Prompt/Man.hs | haskell | ---------------------------------------------------------------------------
|
Module : XMonad.Prompt.Man
License : BSD3-style (see LICENSE)
Portability : non-portable (uses "manpath" and "bash")
A manual page prompt for XMonad window manager.
* narrow completions by section number, if the one is specified
(like @\/etc\/bash_completion@ does)
---------------------------------------------------------------------------
* Usage
$usage
$usage
For detailed instruction on editing the key binding see
| Query for manual page to be displayed.
one of these combinations should give some output
| Run a command using shell and return its output.
better\/more idiomatic.)
we can ignore the process handle because we ignor SIGCHLD | Copyright : ( c ) 2007
Maintainer : < >
TODO
module XMonad.Prompt.Man (
manPrompt
, getCommandOutput
, Man
) where
import XMonad
import XMonad.Prompt
import XMonad.Util.Run
import XMonad.Prompt.Shell (split)
import System.Directory
import System.Process
import System.IO
import qualified Control.Exception.Extensible as E
import Control.Monad
import Data.List
import Data.Maybe
1 . In your @~\/.xmonad\/xmonad.hs@ :
> import XMonad . Prompt
> import XMonad . Prompt . Man
2 . In your keybindings add something like :
> , ( ( modm , xK_F1 ) , )
" XMonad . Doc . Extending#Editing_key_bindings " .
data Man = Man
instance XPrompt Man where
showXPrompt Man = "Manual page: "
manPrompt :: XPConfig -> X ()
manPrompt c = do
mans <- io getMans
mkXPrompt Man c (manCompl mans) $ runInTerm "" . (++) "man "
getMans :: IO [String]
getMans = do
paths <- do
let getout cmd = getCommandOutput cmd `E.catch` \E.SomeException{} -> return ""
p1 <- getout "manpath -g 2>/dev/null"
p2 <- getout "manpath 2>/dev/null"
return $ intercalate ":" $ lines $ p1 ++ p2
let sects = ["man" ++ show n | n <- [1..9 :: Int]]
dirs = [d ++ "/" ++ s | d <- split ':' paths, s <- sects]
mans <- forM (nub dirs) $ \d -> do
exists <- doesDirectoryExist d
if exists
then map (stripExt . stripSuffixes [".gz", ".bz2"]) `fmap`
getDirectoryContents d
else return []
return $ uniqSort $ concat mans
manCompl :: [String] -> String -> IO [String]
manCompl mans s | s == "" || last s == ' ' = return []
| otherwise = do
XXX readline instead of bash 's ?
f <- lines `fmap` getCommandOutput ("bash -c 'compgen -A file " ++ s ++ "'")
mkComplFunFromList (f ++ mans) s
XXX Merge into ' XMonad . Util . Run ' ?
( Ask \"gurus\ " whether @evaluate ( length ... ) @ approach is
getCommandOutput :: String -> IO String
getCommandOutput s = do
(pin, pout, perr, _) <- runInteractiveCommand s
hClose pin
output <- hGetContents pout
E.evaluate (length output)
hClose perr
return output
stripExt :: String -> String
stripExt = reverse . drop 1 . dropWhile (/= '.') . reverse
stripSuffixes :: Eq a => [[a]] -> [a] -> [a]
stripSuffixes sufs fn =
head . catMaybes $ map (flip rstrip fn) sufs ++ [Just fn]
rstrip :: Eq a => [a] -> [a] -> Maybe [a]
rstrip suf lst
| suf `isSuffixOf` lst = Just $ take (length lst - length suf) lst
| otherwise = Nothing
|
8acee4da22e11ca62459abfd71f22cc013c598d6fb98d5db8c2f4de8ff69def0 | dmitryvk/sbcl-win32-threads | mipsstrops.lisp | ;;;; string hacking functions that are stubs for things that might
;;;; be microcoded someday
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!IMPL")
;;; Compare the substrings specified by STRING1 and STRING2 and return
NIL if the strings are STRING= , or the lowest index of STRING1 in
which the two differ . If one string is longer than the other and
;;; the shorter is a prefix of the longer, the length of the shorter +
START1 is returned . The arguments must be simple strings .
;;;
This would be done on the Vax with CMPC3 .
(defun %sp-string-compare (string1 start1 end1 string2 start2 end2)
(declare (simple-string string1 string2))
(declare (fixnum start1 end1 start2 end2))
(let ((len1 (- end1 start1))
(len2 (- end2 start2)))
(declare (fixnum len1 len2))
(cond
((= len1 len2)
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index1 end1) nil)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
((> len1 len2)
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index2 end2) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
(t
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index1 end1) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1)))))))
;;; like %SP-STRING-COMPARE, only backwards
(defun %sp-reverse-string-compare (string1 start1 end1 string2 start2 end2)
(declare (simple-string string1 string2))
(declare (fixnum start1 end1 start2 end2))
(let ((len1 (- end1 start1))
(len2 (- end2 start2)))
(declare (fixnum len1 len2))
(cond
((= len1 len2)
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index1 start1) nil)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
((> len1 len2)
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index2 start2) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
(t
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index1 start1) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1)))))))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/code/mipsstrops.lisp | lisp | string hacking functions that are stubs for things that might
be microcoded someday
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Compare the substrings specified by STRING1 and STRING2 and return
the shorter is a prefix of the longer, the length of the shorter +
like %SP-STRING-COMPARE, only backwards |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!IMPL")
NIL if the strings are STRING= , or the lowest index of STRING1 in
which the two differ . If one string is longer than the other and
START1 is returned . The arguments must be simple strings .
This would be done on the Vax with CMPC3 .
(defun %sp-string-compare (string1 start1 end1 string2 start2 end2)
(declare (simple-string string1 string2))
(declare (fixnum start1 end1 start2 end2))
(let ((len1 (- end1 start1))
(len2 (- end2 start2)))
(declare (fixnum len1 len2))
(cond
((= len1 len2)
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index1 end1) nil)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
((> len1 len2)
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index2 end2) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
(t
(do ((index1 start1 (1+ index1))
(index2 start2 (1+ index2)))
((= index1 end1) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1)))))))
(defun %sp-reverse-string-compare (string1 start1 end1 string2 start2 end2)
(declare (simple-string string1 string2))
(declare (fixnum start1 end1 start2 end2))
(let ((len1 (- end1 start1))
(len2 (- end2 start2)))
(declare (fixnum len1 len2))
(cond
((= len1 len2)
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index1 start1) nil)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
((> len1 len2)
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index2 start2) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1))))
(t
(do ((index1 (1- end1) (1- index1))
(index2 (1- end2) (1- index2)))
((< index1 start1) index1)
(declare (fixnum index1 index2))
(if (char/= (schar string1 index1) (schar string2 index2))
(return index1)))))))
|
b0ddd2c14cc5170541a72268a0800595885913bc8455326d681047bebd63fd0b | melange-re/melange | res_compmisc.ml | Copyright ( C ) 2015 - 2020 , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let init_path () =
let dirs = !Clflags.include_dirs in
let exp_dirs =
List.map (Misc.expand_directory Config.standard_library) dirs
in
Load_path.reset ();
let exp_dirs = List.rev_append exp_dirs (Js_config.std_include_dirs ()) in
List.iter Load_path.add_dir exp_dirs;
Ext_log.dwarn ~__POS__ "Compiler include dirs: %s@."
(String.concat "; " (Load_path.get_paths ()));
Env.reset_cache ()
(* Return the initial environment in which compilation proceeds. *)
(* Note: do not do init_path() in initial_env, this breaks
toplevel initialization (PR#1775) *)
let[@ocaml.warning "-3"] open_implicit_module m env =
let lid =
{ Asttypes.loc = Location.in_file "command line"; txt = Longident.parse m }
in
snd
(!Typeclass.type_open_descr env
{
popen_expr = lid;
popen_override = Override;
popen_loc = lid.loc;
popen_attributes = [];
})
let initial_env () =
Ident.reinit ();
let initial = Env.initial_safe_string in
let env =
if !Clflags.nopervasives then initial
else open_implicit_module "Stdlib" initial
in
List.fold_left
(fun env m -> open_implicit_module m env)
env
(List.rev !Clflags.open_modules)
| null | https://raw.githubusercontent.com/melange-re/melange/4929daafb7e8993337b0e1982d32c00065cbf806/jscomp/core/res_compmisc.ml | ocaml | Return the initial environment in which compilation proceeds.
Note: do not do init_path() in initial_env, this breaks
toplevel initialization (PR#1775) | Copyright ( C ) 2015 - 2020 , Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
let init_path () =
let dirs = !Clflags.include_dirs in
let exp_dirs =
List.map (Misc.expand_directory Config.standard_library) dirs
in
Load_path.reset ();
let exp_dirs = List.rev_append exp_dirs (Js_config.std_include_dirs ()) in
List.iter Load_path.add_dir exp_dirs;
Ext_log.dwarn ~__POS__ "Compiler include dirs: %s@."
(String.concat "; " (Load_path.get_paths ()));
Env.reset_cache ()
let[@ocaml.warning "-3"] open_implicit_module m env =
let lid =
{ Asttypes.loc = Location.in_file "command line"; txt = Longident.parse m }
in
snd
(!Typeclass.type_open_descr env
{
popen_expr = lid;
popen_override = Override;
popen_loc = lid.loc;
popen_attributes = [];
})
let initial_env () =
Ident.reinit ();
let initial = Env.initial_safe_string in
let env =
if !Clflags.nopervasives then initial
else open_implicit_module "Stdlib" initial
in
List.fold_left
(fun env m -> open_implicit_module m env)
env
(List.rev !Clflags.open_modules)
|
64c9e1d9c182f309ddd70f334848c4e651ffbe7deb9378e0f7ed178ec18669fc | returntocorp/ocaml-tree-sitter-core | test.ml | (*
All the unit tests for this library.
*)
let test_suites : unit Alcotest.test list = [
Matcher.test;
Src_file.test;
Util_string.test;
]
| null | https://raw.githubusercontent.com/returntocorp/ocaml-tree-sitter-core/b2404ed27e053f23745f7b2335405134dcb087ab/src/run/test/test.ml | ocaml |
All the unit tests for this library.
|
let test_suites : unit Alcotest.test list = [
Matcher.test;
Src_file.test;
Util_string.test;
]
|
3da474fccdb0d1b79857ff2e57941eb43935471ce8e12f21f3418d432d9fcf2f | sysbio-bioinf/avatar | import.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
Eclipse Public License 2.0 ( -v20.html )
; which can be found in the file LICENSE at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns avatar.data.import
(:require
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.set :as set]
[clojure.data.int-map :as im]
[avatar.util :as u]
[avatar.data.csv :as csv]
[avatar.algorithms.common :as c]
[avatar.ui.tools :as t]
[avatar.ui.dialogs.progress-view :as progress])
(:import
(org.apache.poi.hssf.usermodel HSSFWorkbook)
(org.apache.poi.xssf.usermodel XSSFWorkbook)
(org.apache.poi.ss.usermodel Workbook Sheet Row Cell)
(java.io Closeable)
(com.monitorjbl.xlsx StreamingReader)))
(defn workbook
^Workbook [file]
(let [ext (str/lower-case (u/file-extension file))]
(case ext
"xls" (HSSFWorkbook. (io/input-stream file))
"xlsx" (-> (StreamingReader/builder)
(.rowCacheSize 10)
(.bufferSize 4096)
(.open (io/input-stream file))))))
(defn sheets
[^Workbook workbook]
(iterator-seq (.sheetIterator workbook)))
(defn sheet-name
[^Sheet sheet]
(.getSheetName sheet))
(defn rows
[^Sheet sheet]
(iterator-seq (.rowIterator sheet)))
(defn first-cell-index
[^Row row]
(.getFirstCellNum row))
(defn last-cell-index
[^Row row]
(unchecked-dec (.getLastCellNum row)))
(defn cells
[^Row row]
(iterator-seq (.cellIterator row)))
(defn cell-at
^Cell [^Row row, ^long column-index]
(.getCell row, column-index))
(defn column-index
^long [^Cell cell]
(.getColumnIndex cell))
(defn row-index
^long [^Cell cell]
(.getRowIndex cell))
(defn cell-value
[^Cell cell]
(u/case+ (.getCellType cell)
Cell/CELL_TYPE_STRING (.getStringCellValue cell)
Cell/CELL_TYPE_BOOLEAN (.getBooleanCellValue cell)
Cell/CELL_TYPE_NUMERIC (.getNumericCellValue cell)
nil))
(defn row-sizes
[^Sheet sheet]
(mapv
(comp count cells)
(rows sheet)))
(defn almost-integer?
[^double value]
(< (Math/abs (- value (Math/round value))) 1.0E-9))
(defn value->str
[value]
(cond
(string? value) (when-not (str/blank? value) value)
; only true floats (not almost integers) are converted to floating point string representations
(float? value) (str (cond-> (double value) (almost-integer? value) (-> Math/round long)))
(integer? value) (str value)
:else value))
(defn empty-columns
[^Sheet sheet, column-count]
(let [all-columns (c/populated-dense-int-set column-count),
empty-cells-per-row (mapv
(fn [^Row row]
(u/reduce-indexed
(fn [empty-cells, cell-index, cell]
(cond-> empty-cells
(cell-value cell)
(disj cell-index)))
all-columns
(cells row)))
(rows sheet))]
(reduce im/intersection (c/populated-dense-int-set column-count) empty-cells-per-row)))
(defn empty-row?
[row]
(every?
(fn empty-cell? [cell]
(let [value (cell-value cell)]
(or
(nil? value)
(and (string? value) (str/blank? value)))))
row))
(defn empty-rows
[^Sheet sheet]
(u/reduce-indexed
(fn [empty-rows, row-index, row]
(cond-> empty-rows
(empty-row? row)
(conj row-index)))
(im/dense-int-set)
(rows sheet)))
(defn sheet-info
[^Sheet sheet]
(let [row-count (count (rows sheet)),
column-count (reduce
(fn [max-column-index, row]
(max max-column-index, (last-cell-index row)))
0
(rows sheet))
empty-columns (empty-columns sheet, column-count)
empty-rows (empty-rows sheet)]
{:row-count (- row-count (count empty-rows)),
:column-count (- column-count (count empty-columns)),
:empty-columns empty-columns,
:empty-rows empty-rows}))
(defn process-row-old
"Extract values from the cells of the given row into a vector of length given by the column count."
[{:keys [empty-columns, column-count]}, ^Row row]
(persistent!
(reduce
(fn [cells-vec, column-index]
(if (contains? empty-columns column-index)
cells-vec
(let [value (some-> (cell-at row, column-index)
cell-value
value->str)]
(conj! cells-vec value)))
)
(transient [])
(range column-count))))
(defn process-row
[^Row row]
(let [cell-it (.cellIterator row)]
(loop [column->value-map (transient {})]
(if (.hasNext cell-it)
(let [cell (.next cell-it)
column-index (column-index cell)
value (some-> cell cell-value value->str)]
(recur
(cond-> column->value-map
(some? value)
(assoc! column-index value))))
(let [column->value-map (persistent! column->value-map)
non-empty-cells (into (im/dense-int-set) (keys column->value-map))
column-count (if (seq non-empty-cells)
(reduce max non-empty-cells)
0)]
(when (seq non-empty-cells)
{:row-values (mapv #(get column->value-map %) (range (inc column-count)))
:column-count column-count
:non-empty-cells non-empty-cells}))))))
(defn remove-empty-columns
[non-empty-columns, row]
(let [n (count row)]
(persistent!
(reduce
(fn [new-row, old-index]
(conj! new-row
; when old-index exceeds row size append nil
(when (< old-index n)
(nth row old-index))))
(transient [])
non-empty-columns))))
(defn process-sheet
[^Sheet sheet]
(let [sheet-name (sheet-name sheet)
row-it (.rowIterator sheet)]
(loop [rows (transient []), non-empty-columns (im/dense-int-set)]
(if (.hasNext row-it)
(let [row (.next row-it)]
(if-let [{:keys [row-values, column-count, non-empty-cells]} (process-row row)]
(recur
(conj! rows row-values)
(im/union non-empty-columns non-empty-cells))
(recur
rows
non-empty-columns)))
; done: create sheet map
(let [rows (persistent! rows)
row-count (count rows)
column-count (count non-empty-columns)]
(when (pos? row-count)
{:sheet-name sheet-name
:row-count row-count
:column-count column-count
:rows (mapv (partial remove-empty-columns non-empty-columns) rows)}))))))
(defn import-excel-file
[file]
(progress/with-shown-indeterminate-progress (format "Reading Excel file \"%s\"" (u/file-name file))
(with-open [wb (workbook file)]
(let [sheets (->> wb
sheets
(u/keepv process-sheet))]
(when (seq sheets)
(hash-map :type :excel, :file (u/file-absolute-path file), :sheets sheets))))))
(defn check-input
[matrix?, data]
(cond
(== (count data) 0)
:error/no-row
(== (count data) 1)
:error/only-one-row
matrix? (let [row-sizes (->> data (mapv count) distinct)]
(if (some #(< % 2) row-sizes)
:error/only-one-column
data))
:else (let [row-sizes (->> data (mapv count) distinct)]
table should have more than one column
(if (some #(> % 1) row-sizes)
data
:error/only-one-column))))
(defn guess-separator
[file]
(let [comma (int \,),
semicolon (int \;),
tab (int \tab)]
(with-open [rdr (io/reader file)]
(loop [comma-count 0, semicolon-count 0, tab-count 0]
(let [x (.read rdr)]
(if (>= x 0)
(recur
(cond-> comma-count
(== comma x) unchecked-inc),
(cond-> semicolon-count
(== semicolon x) unchecked-inc)
(cond-> tab-count
(== tab x) unchecked-inc))
semicolon - count ] , [ \tab tab - count ] ]
(remove (comp zero? second))
(sort-by second >))))))))
(defn read-csv-file
"Read the given file and check whether the read data is a valid data matrix.
Otherwise, a keyword identifying the error reason is returned.
When no separator is given, the function tries the separators (\\, \\; \\tab) found in the file in the order of their occurence count until no error occurs.
In case no separator produces a valid data matrix, a map of separator to error reason is returned."
([file, matrix?]
; guessing separator from comma, semicolon and tab
(reduce
(fn [error-map, sep]
(let [data (read-csv-file file, matrix?, sep)]
(if (keyword? data)
(assoc error-map sep data)
(reduced data))))
{}
(mapv first (guess-separator file))))
([file, matrix?, separator]
(with-open [rdr (io/reader file)]
(->> (csv/read-csv rdr, :separator separator)
vec
(check-input matrix?)))))
(defn process-csv-row
[column-count, row]
(let [n (count row)
row (mapv value->str row)]
(if (< n column-count)
(into row (repeat (- column-count n) ""))
row)))
(defn import-csv-file
[file, matrix?]
(let [rows (read-csv-file file, matrix?)]
(when-not (map? rows)
(let [column-count (reduce max (mapv count rows))]
(->> rows
(mapv (partial process-csv-row column-count))
(hash-map :type :csv, :file (u/file-absolute-path file), :column-count column-count, :row-count (count rows), :rows))))))
(defn row->int-set
[set-bit?, alterations]
(let [alterations (cond-> alterations (not (vector? alterations)) vec),
n (count alterations)]
(loop [sample 0, s (im/dense-int-set)]
(if (< sample n)
(let [v (nth alterations sample)]
(recur
(inc sample),
(cond-> s (set-bit? v) (conj sample))))
s))))
(def group+gene (juxt :group :gene))
(defn merge-alterations
[gene-1, gene-2]
(let [alteration-types (reduce
(fn [type-set, gene]
(into type-set (-> gene :alteration-data keys)))
[gene-1, gene-2])]
(reduce
(fn [result-gene, atype]
(let [alteration-map-1 (get-in gene-1 [:alteration-data, atype])
alteration-map-2 (get-in gene-2 [:alteration-data, atype])]
(assoc-in result-gene [:alteration-data, atype]
{:alterations
(into (or (:alterations alteration-map-1) (im/dense-int-set))
(:alterations alteration-map-2)),
:missing-values
(into (or (:missing-values alteration-map-1) (im/dense-int-set))
(:missing-values alteration-map-2))})))
gene-1
alteration-types)))
(defn merge-duplicates
[gene-list]
(let [order (->> gene-list (map group+gene) distinct vec)
merged-genes-map (persistent!
(reduce
(fn [merged-genes-map, gene]
(u/update-in! merged-genes-map [(group+gene gene)]
(fn [prev-gene]
(if prev-gene
(merge-alterations prev-gene, gene)
gene))))
(transient {})
gene-list))]
(mapv
#(get merged-genes-map %)
order)))
(defn matrix->gene-sample-data
"Converts a data matrix (e.g. read from a csv file) to sample alteration data."
[study-alias, alteration-type, column-count, gene-names-index, gene-groups-index, sample-ids-index, sample-groups-index, rows]
(assert (t/alteration-type? alteration-type) "must be an alteration type")
(let [sample-count (cond-> column-count
gene-names-index dec
gene-groups-index dec)
; function to extract alteration data from rows (or sample group names)
extract-sample-data (if (or gene-names-index gene-groups-index)
(let [columns-to-delete (cond-> #{}
gene-names-index (conj gene-names-index)
gene-groups-index (conj gene-groups-index))]
(fn [row]
(u/delete columns-to-delete, row)))
identity)
gene-list (mapv
(fn row->gene-sample-data [i, row]
(let [sample-data (extract-sample-data row)]
(-> {:gene (some-> (nth row gene-names-index) str/trim)
:order-fixed? false,
:group (when gene-groups-index
(let [group (nth row gene-groups-index)]
(u/trim-nil group)))}
(assoc-in [:alteration-data, alteration-type]
{:alterations (row->int-set #(= % 1) sample-data),
:missing-values (row->int-set #(= % :missing) sample-data)}))))
(range)
(if (or sample-ids-index sample-groups-index)
(u/delete
(cond-> #{}
sample-ids-index (conj sample-ids-index)
sample-groups-index (conj sample-groups-index))
rows)
rows))
sample-group-map (if sample-groups-index
(let [sample-groups (extract-sample-data (nth rows sample-groups-index))]
(zipmap (range) (mapv u/trim-nil sample-groups)))
(zipmap (range sample-count) (repeat study-alias)))]
{:gene-list (merge-duplicates gene-list),
:alteration-type-set #{alteration-type},
:clinical-attributes (cond-> #{"STUDY"}
sample-group-map
(conj "SAMPLE_GROUP")),
:clinical-data-map (persistent!
(reduce-kv
(fn [clinical-data-map, sample-index, sample-group]
(assoc! clinical-data-map, sample-index
(cond-> {"STUDY" study-alias}
sample-group-map
(assoc "SAMPLE_GROUP" sample-group))))
(transient {})
sample-group-map)),
:sample-count sample-count,
:sample-id-vec (if sample-ids-index
(let [sample-ids (extract-sample-data (nth rows sample-ids-index))]
(mapv u/trim-nil sample-ids))
; default sample ids
(mapv (comp str inc) (range sample-count))),
:sample-group-map sample-group-map,
:sample-permutation {:sample->column (vec (range sample-count)),
:column->sample (vec (range sample-count))}}))
(defn attribute-set
[attribute, alteration-list]
(persistent!
(reduce
(fn [result-set, alteration-data]
(conj! result-set (get alteration-data attribute)))
(transient #{})
alteration-list)))
(defn gene-patient-map->gene-sample-data
[alteration-type, study-name, sample-id-vec, gene->mutated-samples-map]
(let [gene-set (set (keys gene->mutated-samples-map)),
sorted-gene-vec (->> gene-set set sort vec),
sample-count (count sample-id-vec),
sample-group-map (zipmap (range sample-count) (repeat study-name)),
gene-list (mapv
(fn [gene-name]
(-> {:gene gene-name,
:group nil,
:order-fixed? false}
(assoc-in [:alteration-data, alteration-type]
{:alterations (get gene->mutated-samples-map gene-name),
; no information about missing values because of alteration list
:missing-values (im/dense-int-set)})))
sorted-gene-vec)]
{:gene-list gene-list,
:alteration-type-set #{alteration-type},
:clinical-attributes #{"STUDY"},
:clinical-data-map (zipmap (range sample-count) (repeat {"STUDY" study-name})),
:sample-count sample-count,
:sample-id-vec sample-id-vec,
:sample-group-map sample-group-map,
:sample-permutation {:sample->column (vec (range sample-count)),
:column->sample (vec (range sample-count))}}))
(defn alteration-list->gene-sample-data
[alteration-type, study-name, alteration-list]
(let [samples (attribute-set :sample, alteration-list),
sample-id-vec (mapv u/trim-nil samples),
sample->index (zipmap sample-id-vec (range)),
gene->mutated-samples-map (persistent!
(reduce
(fn [result-map, {:keys [sample, gene]}]
(u/update-in! result-map [gene]
(fnil conj (im/dense-int-set))
(sample->index sample)))
(transient {})
alteration-list))]
(gene-patient-map->gene-sample-data alteration-type, study-name, sample-id-vec, gene->mutated-samples-map)))
(defn merge-clinical-data
[sample-id-vec, clinical-samples-map, clinical-patients-map]
(if (seq clinical-patients-map)
(let [sample-id-set (set sample-id-vec)]
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data-map]
(let [patient-id (get sample-data-map "PATIENT_ID")
patient-data-map (get clinical-patients-map patient-id)]
(cond-> result-map
(contains? sample-id-set sample-id)
(assoc!
sample-id
; merge patient data onto sample data
(merge sample-data-map patient-data-map)))))
(transient {})
clinical-samples-map)))
; no additional patient data
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data-map]
(assoc! result-map sample-id sample-data-map))
(transient {})
clinical-samples-map))))
(defn rename-sample-ids->index
[sample-id->index, clinical-data-map]
(persistent!
(reduce-kv
(fn [result-map, clinical-sample-id, clinical-sample-data]
(if-let [index (get sample-id->index clinical-sample-id)]
(assoc! result-map index clinical-sample-data)
; we have clinical data for a sample which is not present in the alteration data or expression data
(do
(u/log-debug "No index found for sample with id \"%s\"!" clinical-sample-id)
result-map)))
(transient {})
clinical-data-map)))
(defn sample-id-renaming
"Returns a map that contains renaming rules for sample ids with additional suffix that contain the clinical sample id as prefix."
[clinical-samples, sample-id-set]
(let [clinical-sample-ids (->> clinical-samples keys (map str/trim) set)]
(if (or (empty? sample-id-set) (= sample-id-set clinical-sample-ids))
; identity mapping
(zipmap clinical-sample-ids clinical-sample-ids)
; create mapping
(persistent!
(reduce
(fn [renaming-map, sample-id]
(if (contains? clinical-sample-ids sample-id)
(assoc! renaming-map sample-id sample-id)
(let [matching-sample-ids (filterv #(.startsWith ^String sample-id ^String %) clinical-sample-ids),
n (count matching-sample-ids)]
(cond
exactly one match , add renaming rule
(== n 1) (assoc! renaming-map sample-id (first matching-sample-ids))
; no match, keep the sample-id unchanged
(== n 0) (assoc! renaming-map sample-id sample-id)
:else (u/runtime-exception "Multiple clinical sample ids found for sample id \"%s\" from alteration data." sample-id)))))
(transient {})
sample-id-set)))))
(defn add-study-name
[study-name, clinical-data-map]
(if (str/blank? study-name)
clinical-data-map
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data]
(assoc! result-map sample-id (assoc sample-data "STUDY" study-name)))
(transient {})
clinical-data-map))))
(defn clinical-attributes
[clinical-data-map]
(->> clinical-data-map
(reduce-kv
(fn [results-set, _, sample-data-map]
(u/into! results-set (keys sample-data-map)))
(transient #{}))
persistent!))
(defn add-clinical-data
[study-name, {:keys [clinical-samples, clinical-patients] :as study-data-map}, {:keys [sample-id-vec] :as gene-sample-data}]
(let [sample-id->index (zipmap sample-id-vec (range))]
(if (and (seq sample-id-vec) (seq clinical-samples))
(let [clinical-data-map (add-study-name study-name,
(rename-sample-ids->index sample-id->index,
(merge-clinical-data sample-id-vec, clinical-samples, clinical-patients)))]
(assoc gene-sample-data
:clinical-attributes (clinical-attributes clinical-data-map),
:clinical-data-map clinical-data-map))
; no clinical data or no sample/patient ids
gene-sample-data)))
(defn alteration-list->gene-alterations-per-sample
[sample-id->index-map, alteration-data-list]
(->> alteration-data-list
(reduce
(fn [result-map, {:keys [sample, gene]}]
(u/update-in! result-map [gene, :alterations]
(fnil conj (im/dense-int-set))
(get sample-id->index-map sample)))
(transient {}))
persistent!
; add empty :missing-values sets
(reduce-kv
(fn [result-map, gene, gene-data]
(assoc! result-map gene (assoc gene-data :missing-values (im/dense-int-set))))
(transient {}))
persistent!))
(defn value-array->alteration-set
[altered?-fn, sample-id->index-map, sample-ids, ^doubles value-array]
(let [n (count sample-id->index-map)
sample-id->array-index (zipmap sample-ids (range))]
(reduce-kv
(fn [gene-data, sample-id, sample-index]
(if-let [array-index (get sample-id->array-index sample-id)]
; if altered, add to alterations
(let [value (aget value-array array-index)]
(cond-> gene-data
(and (not (Double/isNaN value)) (altered?-fn value))
(update-in [:alterations] conj sample-index)
(Double/isNaN value)
; add to missing values
(update-in [:missing-values] conj sample-index)))
; add to missing values
(update-in gene-data [:missing-values] conj sample-index)))
{:alterations (im/dense-int-set),
:missing-values (im/dense-int-set)}
sample-id->index-map)))
(defn alteration-matrix-gene-alterations-per-sample
[altered?-fn, sample-id->index-map, {:keys [values-per-gene, sample-ids], :as alteration-matrix-data}]
(persistent!
(reduce
(fn [result-map, {:keys [hugo-symbol, entry-gene-id, values]}]
(assoc! result-map
(or hugo-symbol entry-gene-id)
(value-array->alteration-set altered?-fn, sample-id->index-map, sample-ids, values)))
(transient {})
values-per-gene)))
| null | https://raw.githubusercontent.com/sysbio-bioinf/avatar/cbf9968485f96fb61725aaa7381dba53624d6189/src/clojure/avatar/data/import.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file LICENSE at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
only true floats (not almost integers) are converted to floating point string representations
when old-index exceeds row size append nil
done: create sheet map
),
\\tab) found in the file in the order of their occurence count until no error occurs.
guessing separator from comma, semicolon and tab
function to extract alteration data from rows (or sample group names)
default sample ids
no information about missing values because of alteration list
merge patient data onto sample data
no additional patient data
we have clinical data for a sample which is not present in the alteration data or expression data
identity mapping
create mapping
no match, keep the sample-id unchanged
no clinical data or no sample/patient ids
add empty :missing-values sets
if altered, add to alterations
add to missing values
add to missing values | Copyright ( c ) . All rights reserved .
Eclipse Public License 2.0 ( -v20.html )
(ns avatar.data.import
(:require
[clojure.java.io :as io]
[clojure.string :as str]
[clojure.set :as set]
[clojure.data.int-map :as im]
[avatar.util :as u]
[avatar.data.csv :as csv]
[avatar.algorithms.common :as c]
[avatar.ui.tools :as t]
[avatar.ui.dialogs.progress-view :as progress])
(:import
(org.apache.poi.hssf.usermodel HSSFWorkbook)
(org.apache.poi.xssf.usermodel XSSFWorkbook)
(org.apache.poi.ss.usermodel Workbook Sheet Row Cell)
(java.io Closeable)
(com.monitorjbl.xlsx StreamingReader)))
(defn workbook
^Workbook [file]
(let [ext (str/lower-case (u/file-extension file))]
(case ext
"xls" (HSSFWorkbook. (io/input-stream file))
"xlsx" (-> (StreamingReader/builder)
(.rowCacheSize 10)
(.bufferSize 4096)
(.open (io/input-stream file))))))
(defn sheets
[^Workbook workbook]
(iterator-seq (.sheetIterator workbook)))
(defn sheet-name
[^Sheet sheet]
(.getSheetName sheet))
(defn rows
[^Sheet sheet]
(iterator-seq (.rowIterator sheet)))
(defn first-cell-index
[^Row row]
(.getFirstCellNum row))
(defn last-cell-index
[^Row row]
(unchecked-dec (.getLastCellNum row)))
(defn cells
[^Row row]
(iterator-seq (.cellIterator row)))
(defn cell-at
^Cell [^Row row, ^long column-index]
(.getCell row, column-index))
(defn column-index
^long [^Cell cell]
(.getColumnIndex cell))
(defn row-index
^long [^Cell cell]
(.getRowIndex cell))
(defn cell-value
[^Cell cell]
(u/case+ (.getCellType cell)
Cell/CELL_TYPE_STRING (.getStringCellValue cell)
Cell/CELL_TYPE_BOOLEAN (.getBooleanCellValue cell)
Cell/CELL_TYPE_NUMERIC (.getNumericCellValue cell)
nil))
(defn row-sizes
[^Sheet sheet]
(mapv
(comp count cells)
(rows sheet)))
(defn almost-integer?
[^double value]
(< (Math/abs (- value (Math/round value))) 1.0E-9))
(defn value->str
[value]
(cond
(string? value) (when-not (str/blank? value) value)
(float? value) (str (cond-> (double value) (almost-integer? value) (-> Math/round long)))
(integer? value) (str value)
:else value))
(defn empty-columns
[^Sheet sheet, column-count]
(let [all-columns (c/populated-dense-int-set column-count),
empty-cells-per-row (mapv
(fn [^Row row]
(u/reduce-indexed
(fn [empty-cells, cell-index, cell]
(cond-> empty-cells
(cell-value cell)
(disj cell-index)))
all-columns
(cells row)))
(rows sheet))]
(reduce im/intersection (c/populated-dense-int-set column-count) empty-cells-per-row)))
(defn empty-row?
[row]
(every?
(fn empty-cell? [cell]
(let [value (cell-value cell)]
(or
(nil? value)
(and (string? value) (str/blank? value)))))
row))
(defn empty-rows
[^Sheet sheet]
(u/reduce-indexed
(fn [empty-rows, row-index, row]
(cond-> empty-rows
(empty-row? row)
(conj row-index)))
(im/dense-int-set)
(rows sheet)))
(defn sheet-info
[^Sheet sheet]
(let [row-count (count (rows sheet)),
column-count (reduce
(fn [max-column-index, row]
(max max-column-index, (last-cell-index row)))
0
(rows sheet))
empty-columns (empty-columns sheet, column-count)
empty-rows (empty-rows sheet)]
{:row-count (- row-count (count empty-rows)),
:column-count (- column-count (count empty-columns)),
:empty-columns empty-columns,
:empty-rows empty-rows}))
(defn process-row-old
"Extract values from the cells of the given row into a vector of length given by the column count."
[{:keys [empty-columns, column-count]}, ^Row row]
(persistent!
(reduce
(fn [cells-vec, column-index]
(if (contains? empty-columns column-index)
cells-vec
(let [value (some-> (cell-at row, column-index)
cell-value
value->str)]
(conj! cells-vec value)))
)
(transient [])
(range column-count))))
(defn process-row
[^Row row]
(let [cell-it (.cellIterator row)]
(loop [column->value-map (transient {})]
(if (.hasNext cell-it)
(let [cell (.next cell-it)
column-index (column-index cell)
value (some-> cell cell-value value->str)]
(recur
(cond-> column->value-map
(some? value)
(assoc! column-index value))))
(let [column->value-map (persistent! column->value-map)
non-empty-cells (into (im/dense-int-set) (keys column->value-map))
column-count (if (seq non-empty-cells)
(reduce max non-empty-cells)
0)]
(when (seq non-empty-cells)
{:row-values (mapv #(get column->value-map %) (range (inc column-count)))
:column-count column-count
:non-empty-cells non-empty-cells}))))))
(defn remove-empty-columns
[non-empty-columns, row]
(let [n (count row)]
(persistent!
(reduce
(fn [new-row, old-index]
(conj! new-row
(when (< old-index n)
(nth row old-index))))
(transient [])
non-empty-columns))))
(defn process-sheet
[^Sheet sheet]
(let [sheet-name (sheet-name sheet)
row-it (.rowIterator sheet)]
(loop [rows (transient []), non-empty-columns (im/dense-int-set)]
(if (.hasNext row-it)
(let [row (.next row-it)]
(if-let [{:keys [row-values, column-count, non-empty-cells]} (process-row row)]
(recur
(conj! rows row-values)
(im/union non-empty-columns non-empty-cells))
(recur
rows
non-empty-columns)))
(let [rows (persistent! rows)
row-count (count rows)
column-count (count non-empty-columns)]
(when (pos? row-count)
{:sheet-name sheet-name
:row-count row-count
:column-count column-count
:rows (mapv (partial remove-empty-columns non-empty-columns) rows)}))))))
(defn import-excel-file
[file]
(progress/with-shown-indeterminate-progress (format "Reading Excel file \"%s\"" (u/file-name file))
(with-open [wb (workbook file)]
(let [sheets (->> wb
sheets
(u/keepv process-sheet))]
(when (seq sheets)
(hash-map :type :excel, :file (u/file-absolute-path file), :sheets sheets))))))
(defn check-input
[matrix?, data]
(cond
(== (count data) 0)
:error/no-row
(== (count data) 1)
:error/only-one-row
matrix? (let [row-sizes (->> data (mapv count) distinct)]
(if (some #(< % 2) row-sizes)
:error/only-one-column
data))
:else (let [row-sizes (->> data (mapv count) distinct)]
table should have more than one column
(if (some #(> % 1) row-sizes)
data
:error/only-one-column))))
(defn guess-separator
[file]
(let [comma (int \,),
tab (int \tab)]
(with-open [rdr (io/reader file)]
(loop [comma-count 0, semicolon-count 0, tab-count 0]
(let [x (.read rdr)]
(if (>= x 0)
(recur
(cond-> comma-count
(== comma x) unchecked-inc),
(cond-> semicolon-count
(== semicolon x) unchecked-inc)
(cond-> tab-count
(== tab x) unchecked-inc))
semicolon - count ] , [ \tab tab - count ] ]
(remove (comp zero? second))
(sort-by second >))))))))
(defn read-csv-file
"Read the given file and check whether the read data is a valid data matrix.
Otherwise, a keyword identifying the error reason is returned.
In case no separator produces a valid data matrix, a map of separator to error reason is returned."
([file, matrix?]
(reduce
(fn [error-map, sep]
(let [data (read-csv-file file, matrix?, sep)]
(if (keyword? data)
(assoc error-map sep data)
(reduced data))))
{}
(mapv first (guess-separator file))))
([file, matrix?, separator]
(with-open [rdr (io/reader file)]
(->> (csv/read-csv rdr, :separator separator)
vec
(check-input matrix?)))))
(defn process-csv-row
[column-count, row]
(let [n (count row)
row (mapv value->str row)]
(if (< n column-count)
(into row (repeat (- column-count n) ""))
row)))
(defn import-csv-file
[file, matrix?]
(let [rows (read-csv-file file, matrix?)]
(when-not (map? rows)
(let [column-count (reduce max (mapv count rows))]
(->> rows
(mapv (partial process-csv-row column-count))
(hash-map :type :csv, :file (u/file-absolute-path file), :column-count column-count, :row-count (count rows), :rows))))))
(defn row->int-set
[set-bit?, alterations]
(let [alterations (cond-> alterations (not (vector? alterations)) vec),
n (count alterations)]
(loop [sample 0, s (im/dense-int-set)]
(if (< sample n)
(let [v (nth alterations sample)]
(recur
(inc sample),
(cond-> s (set-bit? v) (conj sample))))
s))))
(def group+gene (juxt :group :gene))
(defn merge-alterations
[gene-1, gene-2]
(let [alteration-types (reduce
(fn [type-set, gene]
(into type-set (-> gene :alteration-data keys)))
[gene-1, gene-2])]
(reduce
(fn [result-gene, atype]
(let [alteration-map-1 (get-in gene-1 [:alteration-data, atype])
alteration-map-2 (get-in gene-2 [:alteration-data, atype])]
(assoc-in result-gene [:alteration-data, atype]
{:alterations
(into (or (:alterations alteration-map-1) (im/dense-int-set))
(:alterations alteration-map-2)),
:missing-values
(into (or (:missing-values alteration-map-1) (im/dense-int-set))
(:missing-values alteration-map-2))})))
gene-1
alteration-types)))
(defn merge-duplicates
[gene-list]
(let [order (->> gene-list (map group+gene) distinct vec)
merged-genes-map (persistent!
(reduce
(fn [merged-genes-map, gene]
(u/update-in! merged-genes-map [(group+gene gene)]
(fn [prev-gene]
(if prev-gene
(merge-alterations prev-gene, gene)
gene))))
(transient {})
gene-list))]
(mapv
#(get merged-genes-map %)
order)))
(defn matrix->gene-sample-data
"Converts a data matrix (e.g. read from a csv file) to sample alteration data."
[study-alias, alteration-type, column-count, gene-names-index, gene-groups-index, sample-ids-index, sample-groups-index, rows]
(assert (t/alteration-type? alteration-type) "must be an alteration type")
(let [sample-count (cond-> column-count
gene-names-index dec
gene-groups-index dec)
extract-sample-data (if (or gene-names-index gene-groups-index)
(let [columns-to-delete (cond-> #{}
gene-names-index (conj gene-names-index)
gene-groups-index (conj gene-groups-index))]
(fn [row]
(u/delete columns-to-delete, row)))
identity)
gene-list (mapv
(fn row->gene-sample-data [i, row]
(let [sample-data (extract-sample-data row)]
(-> {:gene (some-> (nth row gene-names-index) str/trim)
:order-fixed? false,
:group (when gene-groups-index
(let [group (nth row gene-groups-index)]
(u/trim-nil group)))}
(assoc-in [:alteration-data, alteration-type]
{:alterations (row->int-set #(= % 1) sample-data),
:missing-values (row->int-set #(= % :missing) sample-data)}))))
(range)
(if (or sample-ids-index sample-groups-index)
(u/delete
(cond-> #{}
sample-ids-index (conj sample-ids-index)
sample-groups-index (conj sample-groups-index))
rows)
rows))
sample-group-map (if sample-groups-index
(let [sample-groups (extract-sample-data (nth rows sample-groups-index))]
(zipmap (range) (mapv u/trim-nil sample-groups)))
(zipmap (range sample-count) (repeat study-alias)))]
{:gene-list (merge-duplicates gene-list),
:alteration-type-set #{alteration-type},
:clinical-attributes (cond-> #{"STUDY"}
sample-group-map
(conj "SAMPLE_GROUP")),
:clinical-data-map (persistent!
(reduce-kv
(fn [clinical-data-map, sample-index, sample-group]
(assoc! clinical-data-map, sample-index
(cond-> {"STUDY" study-alias}
sample-group-map
(assoc "SAMPLE_GROUP" sample-group))))
(transient {})
sample-group-map)),
:sample-count sample-count,
:sample-id-vec (if sample-ids-index
(let [sample-ids (extract-sample-data (nth rows sample-ids-index))]
(mapv u/trim-nil sample-ids))
(mapv (comp str inc) (range sample-count))),
:sample-group-map sample-group-map,
:sample-permutation {:sample->column (vec (range sample-count)),
:column->sample (vec (range sample-count))}}))
(defn attribute-set
[attribute, alteration-list]
(persistent!
(reduce
(fn [result-set, alteration-data]
(conj! result-set (get alteration-data attribute)))
(transient #{})
alteration-list)))
(defn gene-patient-map->gene-sample-data
[alteration-type, study-name, sample-id-vec, gene->mutated-samples-map]
(let [gene-set (set (keys gene->mutated-samples-map)),
sorted-gene-vec (->> gene-set set sort vec),
sample-count (count sample-id-vec),
sample-group-map (zipmap (range sample-count) (repeat study-name)),
gene-list (mapv
(fn [gene-name]
(-> {:gene gene-name,
:group nil,
:order-fixed? false}
(assoc-in [:alteration-data, alteration-type]
{:alterations (get gene->mutated-samples-map gene-name),
:missing-values (im/dense-int-set)})))
sorted-gene-vec)]
{:gene-list gene-list,
:alteration-type-set #{alteration-type},
:clinical-attributes #{"STUDY"},
:clinical-data-map (zipmap (range sample-count) (repeat {"STUDY" study-name})),
:sample-count sample-count,
:sample-id-vec sample-id-vec,
:sample-group-map sample-group-map,
:sample-permutation {:sample->column (vec (range sample-count)),
:column->sample (vec (range sample-count))}}))
(defn alteration-list->gene-sample-data
[alteration-type, study-name, alteration-list]
(let [samples (attribute-set :sample, alteration-list),
sample-id-vec (mapv u/trim-nil samples),
sample->index (zipmap sample-id-vec (range)),
gene->mutated-samples-map (persistent!
(reduce
(fn [result-map, {:keys [sample, gene]}]
(u/update-in! result-map [gene]
(fnil conj (im/dense-int-set))
(sample->index sample)))
(transient {})
alteration-list))]
(gene-patient-map->gene-sample-data alteration-type, study-name, sample-id-vec, gene->mutated-samples-map)))
(defn merge-clinical-data
[sample-id-vec, clinical-samples-map, clinical-patients-map]
(if (seq clinical-patients-map)
(let [sample-id-set (set sample-id-vec)]
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data-map]
(let [patient-id (get sample-data-map "PATIENT_ID")
patient-data-map (get clinical-patients-map patient-id)]
(cond-> result-map
(contains? sample-id-set sample-id)
(assoc!
sample-id
(merge sample-data-map patient-data-map)))))
(transient {})
clinical-samples-map)))
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data-map]
(assoc! result-map sample-id sample-data-map))
(transient {})
clinical-samples-map))))
(defn rename-sample-ids->index
[sample-id->index, clinical-data-map]
(persistent!
(reduce-kv
(fn [result-map, clinical-sample-id, clinical-sample-data]
(if-let [index (get sample-id->index clinical-sample-id)]
(assoc! result-map index clinical-sample-data)
(do
(u/log-debug "No index found for sample with id \"%s\"!" clinical-sample-id)
result-map)))
(transient {})
clinical-data-map)))
(defn sample-id-renaming
"Returns a map that contains renaming rules for sample ids with additional suffix that contain the clinical sample id as prefix."
[clinical-samples, sample-id-set]
(let [clinical-sample-ids (->> clinical-samples keys (map str/trim) set)]
(if (or (empty? sample-id-set) (= sample-id-set clinical-sample-ids))
(zipmap clinical-sample-ids clinical-sample-ids)
(persistent!
(reduce
(fn [renaming-map, sample-id]
(if (contains? clinical-sample-ids sample-id)
(assoc! renaming-map sample-id sample-id)
(let [matching-sample-ids (filterv #(.startsWith ^String sample-id ^String %) clinical-sample-ids),
n (count matching-sample-ids)]
(cond
exactly one match , add renaming rule
(== n 1) (assoc! renaming-map sample-id (first matching-sample-ids))
(== n 0) (assoc! renaming-map sample-id sample-id)
:else (u/runtime-exception "Multiple clinical sample ids found for sample id \"%s\" from alteration data." sample-id)))))
(transient {})
sample-id-set)))))
(defn add-study-name
[study-name, clinical-data-map]
(if (str/blank? study-name)
clinical-data-map
(persistent!
(reduce-kv
(fn [result-map, sample-id, sample-data]
(assoc! result-map sample-id (assoc sample-data "STUDY" study-name)))
(transient {})
clinical-data-map))))
(defn clinical-attributes
[clinical-data-map]
(->> clinical-data-map
(reduce-kv
(fn [results-set, _, sample-data-map]
(u/into! results-set (keys sample-data-map)))
(transient #{}))
persistent!))
(defn add-clinical-data
[study-name, {:keys [clinical-samples, clinical-patients] :as study-data-map}, {:keys [sample-id-vec] :as gene-sample-data}]
(let [sample-id->index (zipmap sample-id-vec (range))]
(if (and (seq sample-id-vec) (seq clinical-samples))
(let [clinical-data-map (add-study-name study-name,
(rename-sample-ids->index sample-id->index,
(merge-clinical-data sample-id-vec, clinical-samples, clinical-patients)))]
(assoc gene-sample-data
:clinical-attributes (clinical-attributes clinical-data-map),
:clinical-data-map clinical-data-map))
gene-sample-data)))
(defn alteration-list->gene-alterations-per-sample
[sample-id->index-map, alteration-data-list]
(->> alteration-data-list
(reduce
(fn [result-map, {:keys [sample, gene]}]
(u/update-in! result-map [gene, :alterations]
(fnil conj (im/dense-int-set))
(get sample-id->index-map sample)))
(transient {}))
persistent!
(reduce-kv
(fn [result-map, gene, gene-data]
(assoc! result-map gene (assoc gene-data :missing-values (im/dense-int-set))))
(transient {}))
persistent!))
(defn value-array->alteration-set
[altered?-fn, sample-id->index-map, sample-ids, ^doubles value-array]
(let [n (count sample-id->index-map)
sample-id->array-index (zipmap sample-ids (range))]
(reduce-kv
(fn [gene-data, sample-id, sample-index]
(if-let [array-index (get sample-id->array-index sample-id)]
(let [value (aget value-array array-index)]
(cond-> gene-data
(and (not (Double/isNaN value)) (altered?-fn value))
(update-in [:alterations] conj sample-index)
(Double/isNaN value)
(update-in [:missing-values] conj sample-index)))
(update-in gene-data [:missing-values] conj sample-index)))
{:alterations (im/dense-int-set),
:missing-values (im/dense-int-set)}
sample-id->index-map)))
(defn alteration-matrix-gene-alterations-per-sample
[altered?-fn, sample-id->index-map, {:keys [values-per-gene, sample-ids], :as alteration-matrix-data}]
(persistent!
(reduce
(fn [result-map, {:keys [hugo-symbol, entry-gene-id, values]}]
(assoc! result-map
(or hugo-symbol entry-gene-id)
(value-array->alteration-set altered?-fn, sample-id->index-map, sample-ids, values)))
(transient {})
values-per-gene)))
|
146df120d6789553d650ffeb7989549dbfc4da6cc8ae25046f15552319200bad | drlivingston/kabob | id_mapping.clj | (ns edu.ucdenver.ccp.kabob.build.id-mapping
(use edu.ucdenver.ccp.kr.variable
edu.ucdenver.ccp.kr.unify
edu.ucdenver.ccp.kr.kb
edu.ucdenver.ccp.kr.rdf
edu.ucdenver.ccp.kr.sparql
edu.ucdenver.ccp.kr.rule))
;; TODO - should log a warning if none of the conditions are met,
;; e.g. if cls1 is a gene and cls2 is a protein
(defn determine-bio-relation [cls1 cls2]
(if (= cls1 cls2) 'owl/sameAs
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000704)) 'kiao/hasIndirectTemplate
(if (and (= cls1 'so/SO_0000234)
(= cls2 'so/SO_0000704)) 'kiao/hasDirectTemplate
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000234)) 'kiao/hasDirectTemplate
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000356)) 'kiao/hasDirectTemplate
(if (and (= cls1 'so/SO_0000356)
(= cls2 'so/SO_0000704)) 'kiao/hasDirectTemplate nil)))))))
(defn determine-pro-bio-relation [related-cls]
(prn "Getting pro-bio relation for: " (name related-cls))
(cond
(.startsWith (name related-cls) "MGI") 'kiao/hasIndirectTemplate
(.startsWith (name related-cls) "HGNC") 'kiao/hasIndirectTemplate
(.startsWith (name related-cls) "UNIPROT") 'owl/sameAs
(.startsWith (name related-cls) "REACTOME") nil
(.startsWith (name related-cls) "ECOCYC") nil
:else ((prn "unhandled id mapped to a pro concept: " (name related-cls)) nil )
)
)
(defn get-de-ice-ns [ice]
(prn "Getting de-ice ns for: " (name ice))
(cond
(.startsWith (name ice) "MGI") "mgi"
(.startsWith (name ice) "HGNC") "hgnc"
(.startsWith (name ice) "UNIPROT") "uniprot"
(.startsWith (name ice) "REACTOME") "reactome"
(.startsWith (name ice) "ECOCYC") "ecocyc"
:else ((prn "unhandled id type mapped to a pro concept: " (name ice)) nil)
)
)
generic rule definition for linking two bio world instances by a relation .
(defn id-mapping-post-processing-rule [rule-name record-template field1 field2]
{:name rule-name
:query `((_/record kiao/hasTemplate ~record-template)
(_/fv1 ro/part_of _/record)
(_/fv1 kiao/hasTemplate ~field1)
(_/fv1 iao/IAO_0000219 _/fv1ice)
(_/fv1ice iao/IAO_0000219 ?bio1)
(?bio1 rdfs/subClassOf ?bio1ParentCls)
(_/fv2 ro/part_of _/record)
(_/fv2 kiao/hasTemplate ~field2)
(_/fv2 iao/IAO_0000219 _/fv2ice)
(_/fv2ice iao/IAO_0000219 ?bio2)
(?bio2 rdfs/subClassOf ?bio2ParentCls))
:post-process (fn [bindings]
(let [bio1 (get bindings '?bio1)
bio2 (get bindings '?bio2)
bio1SuperCls (get bindings '?bio1ParentCls)
bio2SuperCls (get bindings '?bio2ParentCls)
relation-1to2 (determine-bio-relation bio1SuperCls
bio2SuperCls)]
(and bio1 bio2 relation-1to2
`((~bio1 ~relation-1to2 ~bio2))))) })
(defn de-ice [ice new-base]
(let [id (second (re-find #"^(.*)_ICE$" (name ice)))]
(and id
(symbol new-base id))))
filters out the is_a relations - only includes those marked as " exact " -
;; creates "PRO relation entity" triples where the relation is owl/sameAs if
;; the entity is a protein and kiao/hasIndirectTemplate if the entity is a gene
( defn pro - exact - processing - rule [ rule related - supercls relation ]
;; (assoc rule :query
;; '((_/record kiao/hasTemplate ~record-template)
;; (_/fv1 ro/part_of _/record)
( _ /fv1 kiao / hasTemplate ~field1 )
;; (_/fv1 iao/IAO_0000219 ?fv1ice)
;; (_/fv2 ro/part_of _/record)
;; (_/fv2 kiao/hasTemplate ~field2)
;; (_/fv2 iao/IAO_0000219 _/fv2ice)
;; (_/fv2ice iao/IAO_0000219 ?bio2)
( ? rdfs / subClassOf ~related - supercls )
;; (_/fv3 ro/part_of _/record)
;; (_/fv3 kiao/hasTemplate 'pr/prmappingTypeDataField1)
;; (_/fv3 iao/IAO_0000219 "exact")))
;; (assoc rule :post-process (fn [bindings]
;; (let [pro (de-ice (get bindings '?fv1ice) "pro")
( get bindings ' ? ) ]
;; (and pro bio2
;; `((~pro ~relation ~bio2)))))))
;;!!!
;;record-template looks like it might always be null? not sure.
;;!!!
(defn pro-exact-processing-rule [rule-name record-template field1 field2]
{:name rule-name
:query `((_/record kiao/hasTemplate ~record-template)
(_/f1 ro/part_of _/record)
(_/f1 kiao/hasTemplate ~field1)
(_/f1 iao/IAO_0000219 ?f1ice)
(_/f2 ro/part_of _/record)
(_/f2 kiao/hasTemplate ~field2)
(_/f2 iao/IAO_0000219 ?f2ice)
(_/f3 ro/part_of _/record)
(_/f3 kiao/hasTemplate iaopr/prmappingTypeDataField1)
(_/f3 iao/IAO_0000219 "exact"))
:post-process (fn [bindings]
(let [pro (de-ice (get bindings '?f1ice) "pr")
related-ice (get bindings '?f2ice)
related-bio-concept (de-ice related-ice (get-de-ice-ns related-ice))
relation (determine-pro-bio-relation related-ice)]
(and pro related-bio-concept relation
`((~pro ~relation ~related-bio-concept))))) })
(defn filter-refseq-rule [rule]
(assoc rule
:query (concat (:query rule)
'((?fv3 ro/part_of _/record)
(?fv3 kiao/hasTemplate iaoeg/egstatusDataField1)
(:union ((?fv3 iao/IAO_0000219 "VALIDATED"))
((?fv3 iao/IAO_0000219 "REVIEWED")))
))))
;; due to the processing query above, if there is a protein field in the id
mapping , list it as the first field argument and put the gene field ( or
whatever the other field is ) second .
(def ^:dynamic *id-mapping-post-processing-rules*
(list
map PRO IDs to Uniprot IDs - sameAs
(pro-exact-processing-rule 'pro-to-protein-exact-id-mapping
'iaopr/prProMappingRecordSchema1
'iaopr/prproteinOntologyIdDataField1
'iaopr/prtargetRecordIdDataField1)
;; WORKS
map IDs to protein RefSeq IDs
from the Entrez Gene gene2accession file
;; (filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-protein-refseq-id-mapping
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / egprotein_accession_dot_versionDataField1
' iaoeg / eggeneIDDataField1 ) )
;; WORKS
map IDs to genomic RefSeq IDs
from the Entrez Gene gene2accession file
;; (filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-genomic-refseq-id-mapping
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / eggeneIDDataField1
' iaoeg / eggenomic_nucleotide_accession_dot_versionDataField1 ) )
;; WORKS
map IDs to RNA RefSeq IDs
from the Entrez Gene gene2accession file
;; (filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-rna-refseq-id-mapping
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / egRNA_nucleotide_accession_dot_versionDataField1
' iaoeg / eggeneIDDataField1 ) )
;; WORKS
map HPRD IDs to EntrezGene gene IDs from the HPRD data file
( id - mapping - post - processing - rule ' hprd - to - entrezgene - id - mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
;; 'iaohprd/hprdentrezGeneIDDataField1)
KEGG gene ID to external references ( only EG at this point )
from the KEGG GeneIDList file
( id - mapping - post - processing - rule ' kegg - gene - id - mapping
;; 'iaokegg/keggKeggGeneIdListFileDataSchema1
;; 'iaokegg/kegginternalKeggGeneIDDataField1
;; 'iaokegg/keggexternalGeneIDDataField1)
;; WORKS
MGI to from MGIEntrezGene.rpt file
( id - mapping - post - processing - rule ' mgi - entrezgene - id - mapping
;; 'iaomgi/mgiMGIEntrezGeneFileDataSchema1
;; 'iaomgi/mgimgiAccessionIDDataField1
;; 'iaomgi/mgientrezGeneIDDataField1)
;; WORKS
MGI to RefSeq ID from MRKSequence.rpt file
( id - mapping - post - processing - rule ' mgi - refseq - id - mapping
;; 'iaomgi/mgiMRKSequenceFileDataSchema1
;; 'iaomgi/mgirefSeqAccessionIDsDataField1
;; 'iaomgi/mgimgiAccessionIDDataField1)
;; WORKS
MGI to UniProt ID from MRKSwissprot.rpt file
( id - mapping - post - processing - rule ' mgi - uniprot - id - mapping
;; 'iaomgi/mgiMRKSwissProtFileDataSchema1
;; 'iaomgi/mgiswissProtAccessionIDsDataField1
;; 'iaomgi/mgimgiAccessionIDDataField1)
;; WORKS
;; PharmGKB to EntrezGene ID from PharmGKB genes file
;; (id-mapping-post-processing-rule 'pharmgkb-entrezgene-id-mapping
;; 'iaopharmgkb/pharmgkbPharmGkbGeneFileRecordSchema1
;; 'iaopharmgkb/pharmgkbaccessionIdDataField1
;; 'iaopharmgkb/pharmgkbentrezGeneIdDataField1)
;; WORKS
;; PharmGKB to UniProt ID from PharmGKB genes file
( id - mapping - post - processing - rule ' pharmgkb - uniprot - id - mapping
;; 'iaopharmgkb/pharmgkbPharmGkbGeneFileRecordSchema1
;; 'iaopharmgkb/pharmgkbuniprotIdDataField1
;; 'iaopharmgkb/pharmgkbaccessionIdDataField1)
;; WORKS
to EntrezGene ID from TRANSFAC gene.dat file
;; (id-mapping-post-processing-rule 'transfac-entrezgene-id-mapping
' iaotransfac / transfacTransfacGeneDatFileDataSchema1
' iaotransfac / transfactransfacGeneIDDataField1
' iaotransfac / transfacentrezGeneDatabaseReferenceIDDataField1 )
;; WORKS
to MGI ID from TRANSFAC gene.dat file
;; (id-mapping-post-processing-rule 'transfac-mgi-id-mapping
;; 'iaotransfac/transfacTransfacGeneDatFileDataSchema1
;; 'iaotransfac/transfactransfacGeneIDDataField1
;; 'iaotransfac/transfacmgiDatabaseReferenceIDDataField1)
;; WORKS
map DIP IDs to external references from the DIP data file
;; (id-mapping-post-processing-rule 'dip-id-mapping
;; 'iaodip/dipDipInteractorSchema1
;; 'iaodip/dipinteractorIDDataField1
;; 'iaodip/dipdbXReferenceIDsDataField1)
;; WORKS
;; map HGNC Symbols (which are unique) to EntrezGene IDs
;; from the HGNC download data file
;; (id-mapping-post-processing-rule 'hgnc-symbol-mapping
;; 'iaohgnc/hgncHgncDownloadFileDataSchema1
;; 'iaohgnc/hgnchgncGeneSymbolDataField1
;; 'iaohgnc/hgncentrezGeneIDDataField1)
;; WORKS
map HPRD IDs to RefSeq gene IDs from the HPRD data file
;; (id-mapping-post-processing-rule 'hprd-to-refseq-gene-id-mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
;; 'iaohprd/hprdnucleotideAccessionDataField1)
;; WORKS
map HPRD IDs to RefSeq protein IDs from the HPRD data file
;; (id-mapping-post-processing-rule 'hprd-to-refseq-protein-id-mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
' iaohprd / hprdproteinAccessionDataField1 )
;; WORKS
map HPRD IDs to UniProt protein IDs from the HPRD data file
( id - mapping - post - processing - rule ' hprd - to - uniprot - id - mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
' iaohprd / hprdswissProtIDsDataField1 )
;; FAILS
to EMBL ID from TRANSFAC gene.dat file
;; (id-mapping-post-processing-rule 'transfac-embl-id-mapping
' iaotransfac / transfacTransfacGeneDatFileDataSchema1
;; 'iaotransfac/transfactransfacGeneIDDataField1
;; 'iaotransfac/transfacemblDatabaseReferenceIDsDataField1)
;; FAILS - due to sequence version number for embl ids appended in the uniprot id mappings file. This will be removed during the next build, and then this rule should function properly
map UniProt IDs to EMBL Ids
;; from the UniProt idmappings_selected.tab file
;; (id-mapping-post-processing-rule 'uniprot-to-embl-id-mapping
;; 'iaouniprot/uniprotUniProtIDMappingFileDataSchema1
;; 'iaouniprot/uniprotuniProtAccessionIDDataField1
;; 'iaouniprot/uniprotemblIDsDataField1)
;; WORKS
;; map IRefWeb IDs for interactor A to external references
;; from the IRefWeb data file
;; (id-mapping-post-processing-rule 'irefweb-interactor-a-id-mapping
;; 'iaoirefweb/irefwebIRefWebMitab4_0FileDataSchema1
;; 'iaoirefweb/irefwebirefwebInteractorID_ADataField1
;; 'iaoirefweb/irefwebdatabaseReferences_ADataField1)
;; WORKS
;; map IRefWeb IDs for interactor B to external references
;; from the IRefWeb data file
;; (id-mapping-post-processing-rule 'irefweb-interactor-b-id-mapping
;; 'iaoirefweb/irefwebIRefWebMitab4_0FileDataSchema1
;; 'iaoirefweb/irefwebirefwebInteractorID_BDataField1
;; 'iaoirefweb/irefwebdatabaseReferences_BDataField1)
;; WORKS
map UniProt IDs to
;; from the UniProt idmappings_selected.tab file
;; (id-mapping-post-processing-rule 'uniprot-to-eg-id-mapping
;; 'iaouniprot/uniprotUniProtIDMappingFileDataSchema1
;; 'iaouniprot/uniprotuniProtAccessionIDDataField1
;; 'iaouniprot/uniprotentrezGeneIDsDataField1)
))
| null | https://raw.githubusercontent.com/drlivingston/kabob/7038076849744c959da9c8507e8a8ab7215410aa/kabob-build/src/main/clojure/edu/ucdenver/ccp/kabob/build/id_mapping.clj | clojure | TODO - should log a warning if none of the conditions are met,
e.g. if cls1 is a gene and cls2 is a protein
creates "PRO relation entity" triples where the relation is owl/sameAs if
the entity is a protein and kiao/hasIndirectTemplate if the entity is a gene
(assoc rule :query
'((_/record kiao/hasTemplate ~record-template)
(_/fv1 ro/part_of _/record)
(_/fv1 iao/IAO_0000219 ?fv1ice)
(_/fv2 ro/part_of _/record)
(_/fv2 kiao/hasTemplate ~field2)
(_/fv2 iao/IAO_0000219 _/fv2ice)
(_/fv2ice iao/IAO_0000219 ?bio2)
(_/fv3 ro/part_of _/record)
(_/fv3 kiao/hasTemplate 'pr/prmappingTypeDataField1)
(_/fv3 iao/IAO_0000219 "exact")))
(assoc rule :post-process (fn [bindings]
(let [pro (de-ice (get bindings '?fv1ice) "pro")
(and pro bio2
`((~pro ~relation ~bio2)))))))
!!!
record-template looks like it might always be null? not sure.
!!!
due to the processing query above, if there is a protein field in the id
WORKS
(filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-protein-refseq-id-mapping
WORKS
(filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-genomic-refseq-id-mapping
WORKS
(filter-refseq-rule (id-mapping-post-processing-rule 'eg-to-rna-refseq-id-mapping
WORKS
'iaohprd/hprdentrezGeneIDDataField1)
'iaokegg/keggKeggGeneIdListFileDataSchema1
'iaokegg/kegginternalKeggGeneIDDataField1
'iaokegg/keggexternalGeneIDDataField1)
WORKS
'iaomgi/mgiMGIEntrezGeneFileDataSchema1
'iaomgi/mgimgiAccessionIDDataField1
'iaomgi/mgientrezGeneIDDataField1)
WORKS
'iaomgi/mgiMRKSequenceFileDataSchema1
'iaomgi/mgirefSeqAccessionIDsDataField1
'iaomgi/mgimgiAccessionIDDataField1)
WORKS
'iaomgi/mgiMRKSwissProtFileDataSchema1
'iaomgi/mgiswissProtAccessionIDsDataField1
'iaomgi/mgimgiAccessionIDDataField1)
WORKS
PharmGKB to EntrezGene ID from PharmGKB genes file
(id-mapping-post-processing-rule 'pharmgkb-entrezgene-id-mapping
'iaopharmgkb/pharmgkbPharmGkbGeneFileRecordSchema1
'iaopharmgkb/pharmgkbaccessionIdDataField1
'iaopharmgkb/pharmgkbentrezGeneIdDataField1)
WORKS
PharmGKB to UniProt ID from PharmGKB genes file
'iaopharmgkb/pharmgkbPharmGkbGeneFileRecordSchema1
'iaopharmgkb/pharmgkbuniprotIdDataField1
'iaopharmgkb/pharmgkbaccessionIdDataField1)
WORKS
(id-mapping-post-processing-rule 'transfac-entrezgene-id-mapping
WORKS
(id-mapping-post-processing-rule 'transfac-mgi-id-mapping
'iaotransfac/transfacTransfacGeneDatFileDataSchema1
'iaotransfac/transfactransfacGeneIDDataField1
'iaotransfac/transfacmgiDatabaseReferenceIDDataField1)
WORKS
(id-mapping-post-processing-rule 'dip-id-mapping
'iaodip/dipDipInteractorSchema1
'iaodip/dipinteractorIDDataField1
'iaodip/dipdbXReferenceIDsDataField1)
WORKS
map HGNC Symbols (which are unique) to EntrezGene IDs
from the HGNC download data file
(id-mapping-post-processing-rule 'hgnc-symbol-mapping
'iaohgnc/hgncHgncDownloadFileDataSchema1
'iaohgnc/hgnchgncGeneSymbolDataField1
'iaohgnc/hgncentrezGeneIDDataField1)
WORKS
(id-mapping-post-processing-rule 'hprd-to-refseq-gene-id-mapping
'iaohprd/hprdnucleotideAccessionDataField1)
WORKS
(id-mapping-post-processing-rule 'hprd-to-refseq-protein-id-mapping
WORKS
FAILS
(id-mapping-post-processing-rule 'transfac-embl-id-mapping
'iaotransfac/transfactransfacGeneIDDataField1
'iaotransfac/transfacemblDatabaseReferenceIDsDataField1)
FAILS - due to sequence version number for embl ids appended in the uniprot id mappings file. This will be removed during the next build, and then this rule should function properly
from the UniProt idmappings_selected.tab file
(id-mapping-post-processing-rule 'uniprot-to-embl-id-mapping
'iaouniprot/uniprotUniProtIDMappingFileDataSchema1
'iaouniprot/uniprotuniProtAccessionIDDataField1
'iaouniprot/uniprotemblIDsDataField1)
WORKS
map IRefWeb IDs for interactor A to external references
from the IRefWeb data file
(id-mapping-post-processing-rule 'irefweb-interactor-a-id-mapping
'iaoirefweb/irefwebIRefWebMitab4_0FileDataSchema1
'iaoirefweb/irefwebirefwebInteractorID_ADataField1
'iaoirefweb/irefwebdatabaseReferences_ADataField1)
WORKS
map IRefWeb IDs for interactor B to external references
from the IRefWeb data file
(id-mapping-post-processing-rule 'irefweb-interactor-b-id-mapping
'iaoirefweb/irefwebIRefWebMitab4_0FileDataSchema1
'iaoirefweb/irefwebirefwebInteractorID_BDataField1
'iaoirefweb/irefwebdatabaseReferences_BDataField1)
WORKS
from the UniProt idmappings_selected.tab file
(id-mapping-post-processing-rule 'uniprot-to-eg-id-mapping
'iaouniprot/uniprotUniProtIDMappingFileDataSchema1
'iaouniprot/uniprotuniProtAccessionIDDataField1
'iaouniprot/uniprotentrezGeneIDsDataField1) | (ns edu.ucdenver.ccp.kabob.build.id-mapping
(use edu.ucdenver.ccp.kr.variable
edu.ucdenver.ccp.kr.unify
edu.ucdenver.ccp.kr.kb
edu.ucdenver.ccp.kr.rdf
edu.ucdenver.ccp.kr.sparql
edu.ucdenver.ccp.kr.rule))
(defn determine-bio-relation [cls1 cls2]
(if (= cls1 cls2) 'owl/sameAs
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000704)) 'kiao/hasIndirectTemplate
(if (and (= cls1 'so/SO_0000234)
(= cls2 'so/SO_0000704)) 'kiao/hasDirectTemplate
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000234)) 'kiao/hasDirectTemplate
(if (and (= cls1 'chebi/CHEBI_36080)
(= cls2 'so/SO_0000356)) 'kiao/hasDirectTemplate
(if (and (= cls1 'so/SO_0000356)
(= cls2 'so/SO_0000704)) 'kiao/hasDirectTemplate nil)))))))
(defn determine-pro-bio-relation [related-cls]
(prn "Getting pro-bio relation for: " (name related-cls))
(cond
(.startsWith (name related-cls) "MGI") 'kiao/hasIndirectTemplate
(.startsWith (name related-cls) "HGNC") 'kiao/hasIndirectTemplate
(.startsWith (name related-cls) "UNIPROT") 'owl/sameAs
(.startsWith (name related-cls) "REACTOME") nil
(.startsWith (name related-cls) "ECOCYC") nil
:else ((prn "unhandled id mapped to a pro concept: " (name related-cls)) nil )
)
)
(defn get-de-ice-ns [ice]
(prn "Getting de-ice ns for: " (name ice))
(cond
(.startsWith (name ice) "MGI") "mgi"
(.startsWith (name ice) "HGNC") "hgnc"
(.startsWith (name ice) "UNIPROT") "uniprot"
(.startsWith (name ice) "REACTOME") "reactome"
(.startsWith (name ice) "ECOCYC") "ecocyc"
:else ((prn "unhandled id type mapped to a pro concept: " (name ice)) nil)
)
)
generic rule definition for linking two bio world instances by a relation .
(defn id-mapping-post-processing-rule [rule-name record-template field1 field2]
{:name rule-name
:query `((_/record kiao/hasTemplate ~record-template)
(_/fv1 ro/part_of _/record)
(_/fv1 kiao/hasTemplate ~field1)
(_/fv1 iao/IAO_0000219 _/fv1ice)
(_/fv1ice iao/IAO_0000219 ?bio1)
(?bio1 rdfs/subClassOf ?bio1ParentCls)
(_/fv2 ro/part_of _/record)
(_/fv2 kiao/hasTemplate ~field2)
(_/fv2 iao/IAO_0000219 _/fv2ice)
(_/fv2ice iao/IAO_0000219 ?bio2)
(?bio2 rdfs/subClassOf ?bio2ParentCls))
:post-process (fn [bindings]
(let [bio1 (get bindings '?bio1)
bio2 (get bindings '?bio2)
bio1SuperCls (get bindings '?bio1ParentCls)
bio2SuperCls (get bindings '?bio2ParentCls)
relation-1to2 (determine-bio-relation bio1SuperCls
bio2SuperCls)]
(and bio1 bio2 relation-1to2
`((~bio1 ~relation-1to2 ~bio2))))) })
(defn de-ice [ice new-base]
(let [id (second (re-find #"^(.*)_ICE$" (name ice)))]
(and id
(symbol new-base id))))
filters out the is_a relations - only includes those marked as " exact " -
( defn pro - exact - processing - rule [ rule related - supercls relation ]
( _ /fv1 kiao / hasTemplate ~field1 )
( ? rdfs / subClassOf ~related - supercls )
( get bindings ' ? ) ]
(defn pro-exact-processing-rule [rule-name record-template field1 field2]
{:name rule-name
:query `((_/record kiao/hasTemplate ~record-template)
(_/f1 ro/part_of _/record)
(_/f1 kiao/hasTemplate ~field1)
(_/f1 iao/IAO_0000219 ?f1ice)
(_/f2 ro/part_of _/record)
(_/f2 kiao/hasTemplate ~field2)
(_/f2 iao/IAO_0000219 ?f2ice)
(_/f3 ro/part_of _/record)
(_/f3 kiao/hasTemplate iaopr/prmappingTypeDataField1)
(_/f3 iao/IAO_0000219 "exact"))
:post-process (fn [bindings]
(let [pro (de-ice (get bindings '?f1ice) "pr")
related-ice (get bindings '?f2ice)
related-bio-concept (de-ice related-ice (get-de-ice-ns related-ice))
relation (determine-pro-bio-relation related-ice)]
(and pro related-bio-concept relation
`((~pro ~relation ~related-bio-concept))))) })
(defn filter-refseq-rule [rule]
(assoc rule
:query (concat (:query rule)
'((?fv3 ro/part_of _/record)
(?fv3 kiao/hasTemplate iaoeg/egstatusDataField1)
(:union ((?fv3 iao/IAO_0000219 "VALIDATED"))
((?fv3 iao/IAO_0000219 "REVIEWED")))
))))
mapping , list it as the first field argument and put the gene field ( or
whatever the other field is ) second .
(def ^:dynamic *id-mapping-post-processing-rules*
(list
map PRO IDs to Uniprot IDs - sameAs
(pro-exact-processing-rule 'pro-to-protein-exact-id-mapping
'iaopr/prProMappingRecordSchema1
'iaopr/prproteinOntologyIdDataField1
'iaopr/prtargetRecordIdDataField1)
map IDs to protein RefSeq IDs
from the Entrez Gene gene2accession file
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / egprotein_accession_dot_versionDataField1
' iaoeg / eggeneIDDataField1 ) )
map IDs to genomic RefSeq IDs
from the Entrez Gene gene2accession file
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / eggeneIDDataField1
' iaoeg / eggenomic_nucleotide_accession_dot_versionDataField1 ) )
map IDs to RNA RefSeq IDs
from the Entrez Gene gene2accession file
' iaoeg / egEntrezGene2AccessionOrRefseqFileDataSchema1
' iaoeg / egRNA_nucleotide_accession_dot_versionDataField1
' iaoeg / eggeneIDDataField1 ) )
map HPRD IDs to EntrezGene gene IDs from the HPRD data file
( id - mapping - post - processing - rule ' hprd - to - entrezgene - id - mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
KEGG gene ID to external references ( only EG at this point )
from the KEGG GeneIDList file
( id - mapping - post - processing - rule ' kegg - gene - id - mapping
MGI to from MGIEntrezGene.rpt file
( id - mapping - post - processing - rule ' mgi - entrezgene - id - mapping
MGI to RefSeq ID from MRKSequence.rpt file
( id - mapping - post - processing - rule ' mgi - refseq - id - mapping
MGI to UniProt ID from MRKSwissprot.rpt file
( id - mapping - post - processing - rule ' mgi - uniprot - id - mapping
( id - mapping - post - processing - rule ' pharmgkb - uniprot - id - mapping
to EntrezGene ID from TRANSFAC gene.dat file
' iaotransfac / transfacTransfacGeneDatFileDataSchema1
' iaotransfac / transfactransfacGeneIDDataField1
' iaotransfac / transfacentrezGeneDatabaseReferenceIDDataField1 )
to MGI ID from TRANSFAC gene.dat file
map DIP IDs to external references from the DIP data file
map HPRD IDs to RefSeq gene IDs from the HPRD data file
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
map HPRD IDs to RefSeq protein IDs from the HPRD data file
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
' iaohprd / hprdproteinAccessionDataField1 )
map HPRD IDs to UniProt protein IDs from the HPRD data file
( id - mapping - post - processing - rule ' hprd - to - uniprot - id - mapping
' iaohprd / hprdHprdIdMappingsTxtFileDataSchema1
' / hprdhprdIDDataField1
' iaohprd / hprdswissProtIDsDataField1 )
to EMBL ID from TRANSFAC gene.dat file
' iaotransfac / transfacTransfacGeneDatFileDataSchema1
map UniProt IDs to EMBL Ids
map UniProt IDs to
))
|
b520fc74f84e87b47400b78c55f16bc3391a3c6f1f6a6ec019c240213edd3a42 | aengelberg/clocop | solver.clj | (ns clocop.solver
"A namespace with functions for customizing the way the solver works."
(:import (JaCoP.core Var)
(JaCoP.search SimpleSelect
InputOrderSelect
ComparatorVariable
LargestDomain
LargestMax
LargestMin
MaxRegret
MinDomainOverDegree
MostConstrainedDynamic
MostConstrainedStatic
SmallestDomain
SmallestMax
SmallestMin
WeightedDegree
Indomain
IndomainMin
IndomainMax
IndomainMiddle
IndomainList
IndomainRandom
IndomainSimpleRandom
)))
(defn ^SimpleSelect selector
[variables & {:as args}]
(let [{pick-var :pick-var
pick-val :pick-val} args]
(SimpleSelect. (into-array Var variables)
pick-var
pick-val)))
(defn ^InputOrderSelect input-order-selector
[store list-of-vars pick-val]
(InputOrderSelect. store (into-array Var list-of-vars) pick-val))
(defn ^ComparatorVariable pick-var
[pick-var-type]
(case pick-var-type
:largest-domain (LargestDomain.), :largest-max (LargestMax.), :largest-min (LargestMin.),
:max-regret (MaxRegret.),
:min-domain-over-degree (MinDomainOverDegree.),
:most-constrained-dynamic (MostConstrainedDynamic.)
:most-constrained-static (MostConstrainedStatic.)
:smallest-domain (SmallestDomain.), :smallest-max (SmallestMax.), :smallest-min (SmallestMin.),
:weighted-degree (WeightedDegree.)))
(defn ^Indomain pick-val
[indomain-type]
(case indomain-type
:min (IndomainMin.)
:max (IndomainMax.)
:middle (IndomainMiddle.)
:random (IndomainRandom.)
:simple-random (IndomainSimpleRandom.)
(let [[indomain-type & args] indomain-type]
(case indomain-type
:random (IndomainRandom. (first args)))))) | null | https://raw.githubusercontent.com/aengelberg/clocop/25c774c879fdc7d7565079880a940442d585877e/src/clocop/solver.clj | clojure | (ns clocop.solver
"A namespace with functions for customizing the way the solver works."
(:import (JaCoP.core Var)
(JaCoP.search SimpleSelect
InputOrderSelect
ComparatorVariable
LargestDomain
LargestMax
LargestMin
MaxRegret
MinDomainOverDegree
MostConstrainedDynamic
MostConstrainedStatic
SmallestDomain
SmallestMax
SmallestMin
WeightedDegree
Indomain
IndomainMin
IndomainMax
IndomainMiddle
IndomainList
IndomainRandom
IndomainSimpleRandom
)))
(defn ^SimpleSelect selector
[variables & {:as args}]
(let [{pick-var :pick-var
pick-val :pick-val} args]
(SimpleSelect. (into-array Var variables)
pick-var
pick-val)))
(defn ^InputOrderSelect input-order-selector
[store list-of-vars pick-val]
(InputOrderSelect. store (into-array Var list-of-vars) pick-val))
(defn ^ComparatorVariable pick-var
[pick-var-type]
(case pick-var-type
:largest-domain (LargestDomain.), :largest-max (LargestMax.), :largest-min (LargestMin.),
:max-regret (MaxRegret.),
:min-domain-over-degree (MinDomainOverDegree.),
:most-constrained-dynamic (MostConstrainedDynamic.)
:most-constrained-static (MostConstrainedStatic.)
:smallest-domain (SmallestDomain.), :smallest-max (SmallestMax.), :smallest-min (SmallestMin.),
:weighted-degree (WeightedDegree.)))
(defn ^Indomain pick-val
[indomain-type]
(case indomain-type
:min (IndomainMin.)
:max (IndomainMax.)
:middle (IndomainMiddle.)
:random (IndomainRandom.)
:simple-random (IndomainSimpleRandom.)
(let [[indomain-type & args] indomain-type]
(case indomain-type
:random (IndomainRandom. (first args)))))) |
|
a2f7c144ef7eb4aabec2d5068c23e0338fa8b10cc85b71b09f02f77ccaf0747a | nponeccop/HNC | Parser.hs | -----------------------------------------------------------------------------------------
{-| Module : Main
Copyright :
License : All Rights Reserved
Maintainer :
Stability :
Portability :
-}
-----------------------------------------------------------------------------------------
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts #-}
# OPTIONS_GHC -fno - warn - unused - do - bind #
module Parser.Parser (program, identifier) where
import Control.Monad
import Text.Parsec.Prim
import Text.Parsec.Combinator
import Text.Parsec.Char
import Parser.AST
identifier
= liftM2 (:) xletter $ many $ xletter <|> digit where
xletter = letter <|> char '_'
literal = between q q $ many $ noneOf "\"" where q = char '"' --"
nl = optional cr >> lf where
cr = char '\r'
lf = char '\n'
constant = Constant <$> (
(ConstInt . read <$> many1 digit)
<|>
(ConstString <$> literal))
atom = Atom <$> identifier
simpleExpression x = do
xx <-
constant
<|>
(parens >>= app)
<|>
do
a <- atom
app a <|> return a
return $ makeLet xx x
app a = Application a <$> (many1 . try $ char ' ' >> (function <|> constant))
parens = between (char '(') (char ')') (function >>= app)
function = atom <|> parens
compoundExpression = between nlIndent2 nlDedent2 $ do
x <- many1 . try $ do
a <- definition
nlIndent
return a
simpleExpression x
indent = many $ char '\t'
_assignment = do
indent
i <- identifier
string " := "
Assign i <$> simpleExpression []
definition = do
indent
fn <- try identifier
args <- many . try $ char ' ' >> identifier
string " = "
Definition fn args <$> (compoundExpression <|> simpleExpression [])
nlIndent = nl >> indent
nlIndent2 = void $ do
char '{'
string " " <|> nlIndent
nlDedent2 = void $ do
string " " <|> nlIndent
char '}'
program = sepBy definition nl
| null | https://raw.githubusercontent.com/nponeccop/HNC/d8447009a04c56ae2cba4c7c179e39384085ea00/Parser/Parser.hs | haskell | ---------------------------------------------------------------------------------------
| Module : Main
Copyright :
License : All Rights Reserved
Maintainer :
Stability :
Portability :
---------------------------------------------------------------------------------------
# LANGUAGE NoMonomorphismRestriction, FlexibleContexts #
" | # OPTIONS_GHC -fno - warn - unused - do - bind #
module Parser.Parser (program, identifier) where
import Control.Monad
import Text.Parsec.Prim
import Text.Parsec.Combinator
import Text.Parsec.Char
import Parser.AST
identifier
= liftM2 (:) xletter $ many $ xletter <|> digit where
xletter = letter <|> char '_'
nl = optional cr >> lf where
cr = char '\r'
lf = char '\n'
constant = Constant <$> (
(ConstInt . read <$> many1 digit)
<|>
(ConstString <$> literal))
atom = Atom <$> identifier
simpleExpression x = do
xx <-
constant
<|>
(parens >>= app)
<|>
do
a <- atom
app a <|> return a
return $ makeLet xx x
app a = Application a <$> (many1 . try $ char ' ' >> (function <|> constant))
parens = between (char '(') (char ')') (function >>= app)
function = atom <|> parens
compoundExpression = between nlIndent2 nlDedent2 $ do
x <- many1 . try $ do
a <- definition
nlIndent
return a
simpleExpression x
indent = many $ char '\t'
_assignment = do
indent
i <- identifier
string " := "
Assign i <$> simpleExpression []
definition = do
indent
fn <- try identifier
args <- many . try $ char ' ' >> identifier
string " = "
Definition fn args <$> (compoundExpression <|> simpleExpression [])
nlIndent = nl >> indent
nlIndent2 = void $ do
char '{'
string " " <|> nlIndent
nlDedent2 = void $ do
string " " <|> nlIndent
char '}'
program = sepBy definition nl
|
a54425dc61103daae91426d52d2770dab9bd3c75f32f79a0c2fce5ab68d0c142 | comtihon/social_network_example | sn_contacts_logic.erl | %%%-------------------------------------------------------------------
@author tihon
( C ) 2017 , < COMPANY >
%%% @doc
This module is responsible for searching provided msisdn in user 's
%%% address book. If userA contacts contain userB and userB contacts
contain userA - they become friends . If only one of them contains
%%% other's msisdn - invite will be sent.
%%% @end
%%%-------------------------------------------------------------------
-module(sn_contacts_logic).
-author("tihon").
%% API
-export([scan_contacts_get_friends/2]).
-spec scan_contacts_get_friends(list(), map()) -> {{true, map()}, map()}.
scan_contacts_get_friends(_Contacts, _UserState) ->
erlang:error(not_implemented). | null | https://raw.githubusercontent.com/comtihon/social_network_example/eed32f43947fc8d88c41e38fc2d31d2371be0c2b/src/business/friends/invite/sn_contacts_logic.erl | erlang | -------------------------------------------------------------------
@doc
address book. If userA contacts contain userB and userB contacts
other's msisdn - invite will be sent.
@end
-------------------------------------------------------------------
API | @author tihon
( C ) 2017 , < COMPANY >
This module is responsible for searching provided msisdn in user 's
contain userA - they become friends . If only one of them contains
-module(sn_contacts_logic).
-author("tihon").
-export([scan_contacts_get_friends/2]).
-spec scan_contacts_get_friends(list(), map()) -> {{true, map()}, map()}.
scan_contacts_get_friends(_Contacts, _UserState) ->
erlang:error(not_implemented). |
6e8adab7a775d0e8f86b771418337e8cd8baa981f35f1ab817e3c0a002453bd3 | ohua-dev/ohua-core | ALang.hs | |
Module : $ Header$
Description : Implementation for basic tail recrusion support .
Copyright : ( c ) , 2017 . All Rights Reserved .
License : EPL-1.0
Maintainer : ,
Stability : experimental
Portability : portable
This source code is licensed under the terms described in the associated LICENSE.TXT file
= = Design :
The tail recursion implementation for the ohua - core compiler encompasses the following phases :
= = = Phase 1 : ( Performed directly on the initial ALang form . )
Turn recursions into :
@
let f ' = \x1 ... xn - > ...
let y1 = ....
...
let yn = ...
...
if c then
result
else
recur y1 ... yn
let f = Y f '
@
Lambda - inlinling will then just inline f ' while still performing all other transformations
on it . A nice benefit : lowering for tail recursion is just an implementation of
a HigherOrderFunction lowering . As such though , we can not access the lambda , i.e. , So we need to do the lambda modifications on ALang before ( which is nicer anyways ) .
We transform this into true tail recursion and hence nothing is performed on the branches .
As a result , we remove the whole if statement and just send the input to the conditional ,
the output of the cycle ( recursion ) and the final output to the recurFun . It will use the ` cond ` to
understand which arcs to pull from !
Benefits : no array , left and right functions needed .
The lambda expression from phase 2 is lifted into a control context .
Normally there are two operators ( ifFun / select or smapFun / collect ) . this time the operator
recurFun is both !
The resulting code for a call is then :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
result
else
recurFun y1 ... yn
@
The ` recurFun ` calls now encapsulate the recursion . The ` recurFun ` call only returns when the recursion finished .
And then :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun ( ) ( ) a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
recurFun c result y1 ... yn
@
Here , recurFun and recurFun are actually the same operator .
must make that association .
Maybe it should rather be :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
fix result
else
recurFun y1 ... yn
@
Using the ` Y ` combinator , this would be :
@
let = \f x1 x2 x3 - > ...
let y1 = ...
...
if c then
fix result
else
f y1 y2 y3 in
let result = Y g a b c in
result
@
The above is an inlining of the function ` g ` , which is inarguably impossible
without additional semantics . That is , without knowing that the ` recurFun ` calls
are actually one . However , at this point , it is not lambda calculus anymore .
= = = Phase 3 : ( Performed on the expression in ALang - normalized form ( ANF ) ! )
RewriteAll the code ( for a call ) such that :
@
let g = recur ( \args - > let ( x1 ... xn ) = args
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
@
Note : y became recur
Either might be also implemented as :
@
right a = ( false , a )
left a = ( true , a )
isRight = not . fst
isLeft = fst
@
Phase 4 : Handling free variables in the Lambda expression
The ALang phase handles free variables via lambda lifting :
@
let g = recur ( \b - >
let ( args , freeArgs ) = b
let ( x1 ... xn ) = args
let ( a1 ... am ) = freeArgs
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
array arg1 ... argm
@
We again pack them in an array to make the recur operator easier to implement for a backend .
This way , it does not need to support variadic argument lists .
= = = Phase 5 : ( DF Lowering for y. )
Adds the following operator as a context op :
@
recur [ a1 ... an ] Either [ y1 ... yn ] result - > Either [ y1 ... yn ] result
@
The operator has two incoming arcs and two outgoing arcs :
[ 1 . incoming arc ] @[a1 ...
[ 2 . incoming arc ] [ y1 ... yn ] result@ < -- feedback edge : @e@
[ 1 . outgoing arc ] [ a1 ... an ] [ y1 ... yn]@
[ 2 . outgoing arc ] @result@
Module : $Header$
Description : Implementation for basic tail recrusion support.
Copyright : (c) Sebastian Ertel, Justus Adam 2017. All Rights Reserved.
License : EPL-1.0
Maintainer : ,
Stability : experimental
Portability : portable
This source code is licensed under the terms described in the associated LICENSE.TXT file
== Design:
The tail recursion implementation for the ohua-core compiler encompasses the following phases:
=== Phase 1: (Performed directly on the initial ALang form.)
Turn recursions into HOFs:
@
let f' = \x1 ... xn -> ...
let y1 = ....
...
let yn = ...
...
if c then
result
else
recur y1 ... yn
let f = Y f'
@
Lambda-inlinling will then just inline f' while still performing all other transformations
on it. A nice benefit: lowering for tail recursion is just an implementation of
a HigherOrderFunction lowering. As such though, we can not access the lambda, i.e., f.
So we need to do the lambda modifications on ALang before (which is nicer anyways).
We transform this into true tail recursion and hence nothing is performed on the branches.
As a result, we remove the whole if statement and just send the input to the conditional,
the output of the cycle (recursion) and the final output to the recurFun. It will use the `cond` to
understand which arcs to pull from!
Benefits: no array, left and right functions needed.
The lambda expression from phase 2 is lifted into a control context.
Normally there are two operators (ifFun/select or smapFun/collect). this time the operator
recurFun is both!
The resulting code for a call is then:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
result
else
recurFun y1 ... yn
@
The `recurFun` calls now encapsulate the recursion. The `recurFun` call only returns when the recursion finished.
And then:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun () () a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
recurFun c result y1 ... yn
@
Here, recurFun and recurFun are actually the same operator.
DFLowering must make that association.
Maybe it should rather be:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
fix result
else
recurFun y1 ... yn
@
Using the `Y` combinator, this would be:
@
let g = \f x1 x2 x3 -> ...
let y1 = ...
...
if c then
fix result
else
f y1 y2 y3 in
let result = Y g a b c in
result
@
The above is an inlining of the function `g`, which is inarguably impossible
without additional semantics. That is, without knowing that the `recurFun` calls
are actually one. However, at this point, it is not lambda calculus anymore.
=== Phase 3: (Performed on the expression in ALang-normalized form (ANF)!)
RewriteAll the code (for a call) such that:
@
let g = recur (\args -> let (x1 ... xn) = args
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
@
Note: y became recur
Either might be also implemented as:
@
right a = (false, a)
left a = (true, a)
isRight = not . fst
isLeft = fst
@
Phase 4: Handling free variables in the Lambda expression
The ALang phase handles free variables via lambda lifting:
@
let g = recur (\b ->
let (args, freeArgs) = b
let (x1 ... xn) = args
let (a1 ... am) = freeArgs
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
array arg1 ... argm
@
We again pack them in an array to make the recur operator easier to implement for a backend.
This way, it does not need to support variadic argument lists.
=== Phase 5: (DF Lowering for y.)
Adds the following operator as a context op:
@
recur [a1 ... an] Either [y1 ... yn] result -> Either [y1 ... yn] result
@
The operator has two incoming arcs and two outgoing arcs:
[1. incoming arc] @[a1 ... an]@
[2. incoming arc] @Either [y1 ... yn] result@ <-- feedback edge: @e@
[1. outgoing arc] @Either [a1 ... an] [y1 ... yn]@
[2. outgoing arc] @result@
-}
# LANGUAGE CPP #
module Ohua.Feature.TailRec.Passes.ALang where
import Ohua.Prelude
import qualified Data.Text.Prettyprint.Doc as PP
import Control.Monad.Writer
import Data.Functor.Foldable
import qualified Data.HashSet as HS
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import Ohua.ALang.Lang
import Ohua.ALang.PPrint (quickRender)
import Ohua.ALang.Passes.Control (liftIntoCtrlCtxt)
import qualified Ohua.ALang.Refs as ALangRefs
import Ohua.ALang.Util
( fromApplyToList
, fromListToApply
, lambdaArgsAndBody
, mkDestructured
)
import qualified Ohua.DFLang.Refs as DFRefs
import Ohua.Compile.Configuration
import Ohua.Unit
-- ==== Implementation starts here
-- Currently not exposed by the frontend but only as the only part of recursion
-- at the backend.
recur = ALangRefs.recur -- allows me to use it in binding position
-- This is a compiler-internal higher-order function.
recur_hof :: QualifiedBinding
recur_hof = "ohua.lang/recur_hof"
recur_sf :: Expression
recur_sf = PureFunction recur Nothing
recur_hof_sf :: Expression
recur_hof_sf = PureFunction recur_hof Nothing
recurStartMarker :: QualifiedBinding
recurStartMarker = "ohua.lang.marker/recur_start"
recurEndMarker :: QualifiedBinding
recurEndMarker = "ohua.lang.marker/recur_end"
The Y combinator from Haskell Curry
y :: QualifiedBinding
y = "ohua.lang/Y"
y_sf :: Expression
y_sf = PureFunction y Nothing
recurFun :: QualifiedBinding
recurFun = DFRefs.recurFunBnd
recurFunPureFunction :: Expression
recurFunPureFunction = PureFunction recurFun Nothing
idPureFunction = PureFunction "ohua.lang/id" Nothing
-- Phase 1:
findTailRecs ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Bool
-> Expression
-> m Expression
findTailRecs enabled e =
snd <$> (flip runReaderT enabled . flip findRecCall HS.empty) e
findRecCall ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Expression
-> HS.HashSet Binding
-> ReaderT Bool m (HS.HashSet Binding, Expression)
findRecCall (Let a expr inExpr) algosInScope
-- for the assigment expr I add the reference and check the expression for references to the identifier
= do
(found, e) <- findRecCall expr $ HS.insert a algosInScope
-- proceed normally into the next expression
(iFound, iExpr) <- findRecCall inExpr algosInScope
-- did I detect a reference to this binding in the assignment expr?
if HS.member a found
-- hoferize right away:
then do
a' <- generateBindingWith a
return (iFound, Let a' e $ Let a (Apply y_sf (Var a')) iExpr)
else return (HS.union found iFound, Let a e iExpr)
findRecCall (Let a expr inExpr) algosInScope = do
(iFound, iExpr) <- findRecCall inExpr algosInScope
return (iFound, Let a expr iExpr)
findRecCall (Apply (Var binding) a) algosInScope
| HS.member binding algosInScope
-- no recursion here because if the expression is correct then these can be only nested APPLY statements
= do
enabledTR <- ask
unlessM ask $
throwErrorDebugS
"Detected recursion although tail recursion support is not enabled!"
return (HS.insert binding HS.empty, Apply recur_sf a)
-- else error $ "Detected recursion (" ++ (show binding) ++ ") although tail recursion support is not enabled!"
findRecCall (Apply a b) algosInScope = do
(aFound, aExpr) <- findRecCall a algosInScope
(bFound, bExpr) <- findRecCall b algosInScope
return (HS.union aFound bFound, Apply aExpr bExpr)
findRecCall (Lambda a e) algosInScope = do
(eFound, eExpr) <- findRecCall e algosInScope
return (eFound, Lambda a eExpr)
findRecCall other _ = return (HS.empty, other)
-- performed after normalization
verifyTailRecursion ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Expression
-> m Expression
verifyTailRecursion e
| isCall y e = (performChecks $ snd $ fromApplyToList e) >> return e
where
performChecks ((Lambda a e):_) = traverseToLastCall checkIf e
performChecks (e:_) =
throwErrorDebugS $ "Recursion is not inside a lambda but: " <> show e
traverseToLastCall check (Let v e ie)
| isLastStmt ie = check e
traverseToLastCall check (Let v e ie) =
failOnRecur e >> traverseToLastCall check ie
traverseToLastCall _ e =
throwErrorDebugS $ "Invariant broken! Found expression: " <> quickRender e
-- failOnRecur (Let _ e ie) | isCall recur e || isCall recur ie = error "Recursion is not tail recursive!"
failOnRecur (Let _ e ie) = failOnRecur e >> failOnRecur ie
failOnRecur (Lambda v e) = failOnRecur e -- TODO maybe throw a better error message when this happens
failOnRecur (Apply (PureFunction recur _) _) =
error "Recursion is not tail recursive!"
failOnRecur (Apply a b) = return ()
failOnRecur e = error $ "Invariant broken! Found pattern: " <> show e
checkIf e
| isCall "ohua.lang/if" e
-- assumes well-structured if
= do
let (_:tBranch:fBranch:_) = snd $ fromApplyToList e
let (Lambda v et) = tBranch
let (Lambda v ef) = fBranch
let lastFnOnBranch =
traverseToLastCall
(return .
(\(FunRef f _) -> f :: QualifiedBinding) .
fst . fromApplyToList)
tFn <- lastFnOnBranch et
fFn <- lastFnOnBranch ef
when (tFn == recur) $ do
when (fFn == recur) $
throwErrorDebugS
"Endless loop detected: Tail recursion does not have a non-recursive branch!"
unless (fFn == recur) $
throwErrorDebugS $
"We currently do not support recursive calls that are located on" <>
"nested conditional branches (#conditional branches > 1) or in" <>
"Lambdas to other higher-order functions! Found: " <>
show fFn <>
" : " <>
show tFn
checkIf e =
throwErrorDebugS $
"Recursion is not tail recursive! Last stmt: " <> show (quickRender e)
isLastStmt (Var _) = True
isLastStmt _ = False
verifyTailRecursion e@(Let v expr inExpr) =
verifyTailRecursion expr >> verifyTailRecursion inExpr >> return e
verifyTailRecursion e@(Var _) = return e
verifyTailRecursion e =
throwErrorDebugS $ "Invariant broken! Found stmt: " <> show e
Phase 3 :
-- This is a reimplementation using `rewriteM` (ergo `plated`). The theory is
-- that this should do the recursion properly and be future proof.
--
-- Its important here that we pattern match on `Let` because `rewriteM` is a
-- bottom up traversal an hence `isCall` would match on partial applications on
-- the `Y` combinator. By pattern matching on `Let` here that can be avoided.
rewriteAll :: (MonadGenBnd m, MonadError Error m, MonadReadEnvironment m) => Expression -> m Expression
rewriteAll = rewriteM $ \case
Let b e r | isCall y e -> (\e' -> Just $ Let b e' r) <$> rewriteCallExpr e
_ -> pure Nothing
isCall f (Apply (PureFunction f' _) _)
| f == f' = True
isCall f (Apply e@(Apply _ _) _) = isCall f e
isCall _ _ = False
rewriteCallExpr ::
(MonadGenBnd m, MonadError Error m, MonadReadEnvironment m) => Expression -> m Expression
rewriteCallExpr e = do
let (lam@(Lambda _ _):callArgs) = snd $ fromApplyToList e
let (recurVars, expr) = lambdaArgsAndBody lam
recurCtrl <- generateBindingWith "ctrl"
l' <- liftIntoCtrlCtxt recurCtrl expr
let l'' = rewriteLastCond l'
-- [ohualang|
-- let (recurCtrl, b1 , ..., bn) = recurFun () () a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
-- ...
-- let x1 = nth 0 recursionVars in
-- ...
-- let y1 = ...
-- ...
-- let r = recurFun c result y1 ... yn in
-- r
-- this breaks haddock |]
ctrls <- generateBindingWith "ctrls"
return $
Let ctrls (fromListToApply (FunRef recurStartMarker Nothing) callArgs) $
mkDestructured (recurCtrl : recurVars) ctrls l''
where
rewriteLastCond :: Expression -> Expression
rewriteLastCond (Let v e o@(Var b))
| v == b = Let v (rewriteCond e) o
| otherwise = error "Value returned from recursive function was not last value bound, this is not tail recursive!"
rewriteLastCond (Let v e ie) = Let v e $ rewriteLastCond ie
-- This whole rewriteCond and rewriteBranch algorithm is not correct. That
-- is to say it only works in the specific case where a single `if` is the
-- last expression in a recursive function. While the reason for this
-- assumption is obvious we must consider that also nested `if`'s
-- technically are valid tail recursive functions so long as there is at
least one branch that recurses and one branch that does not . I feel
-- implementing this correctly however is going to require some effort, thus
-- I think we should do so later.
rewriteCond :: Expression -> Expression
rewriteCond fullExpr@(Apply (Apply (Apply (PureFunction f0 _) cond) (Lambda a trueB)) (Lambda b falseB)) | f0 == ALangRefs.ifThenElse =
let trueB' = rewriteBranch trueB
falseB' = rewriteBranch falseB
(fixRef, recurVars) =
case (trueB', falseB') of
(Left f, Right bnds) -> errorD $ flexText "I am sorry, but for now the recursion is required to be on the first (`then`) branch of the final condition. This is a bug of the implementation and will be fixed in the future. (Issue #36)\n\nYour code violating this invariant was\n" <> (PP.indent 4 $ PP.pretty fullExpr) -- (f, bnds)
(Right bnds, Left f) -> (f, bnds)
_ -> error "invariant broken"
in fromListToApply (FunRef recurEndMarker Nothing) $
cond : fixRef : recurVars
rewriteCond _ =
error
"invariant broken: recursive function does not have the proper structure."
rewriteBranch :: Expression -> Either Expression [Expression]
-- normally this is "fix" instead of `id`
rewriteBranch (Let v (Apply (PureFunction "ohua.lang/id" _) result) _) = Left result
rewriteBranch (Let v e _)
| isCall recur e = (Right . snd . fromApplyToList) e
rewriteBranch e = error $ "invariant broken: " <> quickRender e
| null | https://raw.githubusercontent.com/ohua-dev/ohua-core/8fe0ee90f4a1aea0c5bfabe922b290fed668a7da/core/src/Ohua/Feature/TailRec/Passes/ALang.hs | haskell | feedback edge : @e@
feedback edge: @e@
==== Implementation starts here
Currently not exposed by the frontend but only as the only part of recursion
at the backend.
allows me to use it in binding position
This is a compiler-internal higher-order function.
Phase 1:
for the assigment expr I add the reference and check the expression for references to the identifier
proceed normally into the next expression
did I detect a reference to this binding in the assignment expr?
hoferize right away:
no recursion here because if the expression is correct then these can be only nested APPLY statements
else error $ "Detected recursion (" ++ (show binding) ++ ") although tail recursion support is not enabled!"
performed after normalization
failOnRecur (Let _ e ie) | isCall recur e || isCall recur ie = error "Recursion is not tail recursive!"
TODO maybe throw a better error message when this happens
assumes well-structured if
This is a reimplementation using `rewriteM` (ergo `plated`). The theory is
that this should do the recursion properly and be future proof.
Its important here that we pattern match on `Let` because `rewriteM` is a
bottom up traversal an hence `isCall` would match on partial applications on
the `Y` combinator. By pattern matching on `Let` here that can be avoided.
[ohualang|
let (recurCtrl, b1 , ..., bn) = recurFun () () a1 ... an in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
let r = recurFun c result y1 ... yn in
r
this breaks haddock |]
This whole rewriteCond and rewriteBranch algorithm is not correct. That
is to say it only works in the specific case where a single `if` is the
last expression in a recursive function. While the reason for this
assumption is obvious we must consider that also nested `if`'s
technically are valid tail recursive functions so long as there is at
implementing this correctly however is going to require some effort, thus
I think we should do so later.
(f, bnds)
normally this is "fix" instead of `id` | |
Module : $ Header$
Description : Implementation for basic tail recrusion support .
Copyright : ( c ) , 2017 . All Rights Reserved .
License : EPL-1.0
Maintainer : ,
Stability : experimental
Portability : portable
This source code is licensed under the terms described in the associated LICENSE.TXT file
= = Design :
The tail recursion implementation for the ohua - core compiler encompasses the following phases :
= = = Phase 1 : ( Performed directly on the initial ALang form . )
Turn recursions into :
@
let f ' = \x1 ... xn - > ...
let y1 = ....
...
let yn = ...
...
if c then
result
else
recur y1 ... yn
let f = Y f '
@
Lambda - inlinling will then just inline f ' while still performing all other transformations
on it . A nice benefit : lowering for tail recursion is just an implementation of
a HigherOrderFunction lowering . As such though , we can not access the lambda , i.e. , So we need to do the lambda modifications on ALang before ( which is nicer anyways ) .
We transform this into true tail recursion and hence nothing is performed on the branches .
As a result , we remove the whole if statement and just send the input to the conditional ,
the output of the cycle ( recursion ) and the final output to the recurFun . It will use the ` cond ` to
understand which arcs to pull from !
Benefits : no array , left and right functions needed .
The lambda expression from phase 2 is lifted into a control context .
Normally there are two operators ( ifFun / select or smapFun / collect ) . this time the operator
recurFun is both !
The resulting code for a call is then :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
result
else
recurFun y1 ... yn
@
The ` recurFun ` calls now encapsulate the recursion . The ` recurFun ` call only returns when the recursion finished .
And then :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun ( ) ( ) a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
recurFun c result y1 ... yn
@
Here , recurFun and recurFun are actually the same operator .
must make that association .
Maybe it should rather be :
@
let result =
let ( recurCtrl , finalResult , recursionVars ) = recurFun a1 ... an in
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
fix result
else
recurFun y1 ... yn
@
Using the ` Y ` combinator , this would be :
@
let = \f x1 x2 x3 - > ...
let y1 = ...
...
if c then
fix result
else
f y1 y2 y3 in
let result = Y g a b c in
result
@
The above is an inlining of the function ` g ` , which is inarguably impossible
without additional semantics . That is , without knowing that the ` recurFun ` calls
are actually one . However , at this point , it is not lambda calculus anymore .
= = = Phase 3 : ( Performed on the expression in ALang - normalized form ( ANF ) ! )
RewriteAll the code ( for a call ) such that :
@
let g = recur ( \args - > let ( x1 ... xn ) = args
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
@
Note : y became recur
Either might be also implemented as :
@
right a = ( false , a )
left a = ( true , a )
isRight = not . fst
isLeft = fst
@
Phase 4 : Handling free variables in the Lambda expression
The ALang phase handles free variables via lambda lifting :
@
let g = recur ( \b - >
let ( args , freeArgs ) = b
let ( x1 ... xn ) = args
let ( a1 ... am ) = freeArgs
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
array arg1 ... argm
@
We again pack them in an array to make the recur operator easier to implement for a backend .
This way , it does not need to support variadic argument lists .
= = = Phase 5 : ( DF Lowering for y. )
Adds the following operator as a context op :
@
recur [ a1 ... an ] Either [ y1 ... yn ] result - > Either [ y1 ... yn ] result
@
The operator has two incoming arcs and two outgoing arcs :
[ 1 . incoming arc ] @[a1 ...
[ 1 . outgoing arc ] [ a1 ... an ] [ y1 ... yn]@
[ 2 . outgoing arc ] @result@
Module : $Header$
Description : Implementation for basic tail recrusion support.
Copyright : (c) Sebastian Ertel, Justus Adam 2017. All Rights Reserved.
License : EPL-1.0
Maintainer : ,
Stability : experimental
Portability : portable
This source code is licensed under the terms described in the associated LICENSE.TXT file
== Design:
The tail recursion implementation for the ohua-core compiler encompasses the following phases:
=== Phase 1: (Performed directly on the initial ALang form.)
Turn recursions into HOFs:
@
let f' = \x1 ... xn -> ...
let y1 = ....
...
let yn = ...
...
if c then
result
else
recur y1 ... yn
let f = Y f'
@
Lambda-inlinling will then just inline f' while still performing all other transformations
on it. A nice benefit: lowering for tail recursion is just an implementation of
a HigherOrderFunction lowering. As such though, we can not access the lambda, i.e., f.
So we need to do the lambda modifications on ALang before (which is nicer anyways).
We transform this into true tail recursion and hence nothing is performed on the branches.
As a result, we remove the whole if statement and just send the input to the conditional,
the output of the cycle (recursion) and the final output to the recurFun. It will use the `cond` to
understand which arcs to pull from!
Benefits: no array, left and right functions needed.
The lambda expression from phase 2 is lifted into a control context.
Normally there are two operators (ifFun/select or smapFun/collect). this time the operator
recurFun is both!
The resulting code for a call is then:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
result
else
recurFun y1 ... yn
@
The `recurFun` calls now encapsulate the recursion. The `recurFun` call only returns when the recursion finished.
And then:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun () () a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
recurFun c result y1 ... yn
@
Here, recurFun and recurFun are actually the same operator.
DFLowering must make that association.
Maybe it should rather be:
@
let result =
let (recurCtrl,finalResult,recursionVars) = recurFun a1 ... an in
let ctxt = ctrl recurCtrl b c d in
let b0 = nth 0 ctxt in
...
let x1 = nth 0 recursionVars in
...
let y1 = ...
...
if c then
fix result
else
recurFun y1 ... yn
@
Using the `Y` combinator, this would be:
@
let g = \f x1 x2 x3 -> ...
let y1 = ...
...
if c then
fix result
else
f y1 y2 y3 in
let result = Y g a b c in
result
@
The above is an inlining of the function `g`, which is inarguably impossible
without additional semantics. That is, without knowing that the `recurFun` calls
are actually one. However, at this point, it is not lambda calculus anymore.
=== Phase 3: (Performed on the expression in ALang-normalized form (ANF)!)
RewriteAll the code (for a call) such that:
@
let g = recur (\args -> let (x1 ... xn) = args
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
@
Note: y became recur
Either might be also implemented as:
@
right a = (false, a)
left a = (true, a)
isRight = not . fst
isLeft = fst
@
Phase 4: Handling free variables in the Lambda expression
The ALang phase handles free variables via lambda lifting:
@
let g = recur (\b ->
let (args, freeArgs) = b
let (x1 ... xn) = args
let (a1 ... am) = freeArgs
...
let y1 = ....
...
let yn = ...
...
let e = if c then
let z = right result
in z
else
let ys = array y1 ... yn
let z = left ys
in z
in e
)
array a1 ... an
array arg1 ... argm
@
We again pack them in an array to make the recur operator easier to implement for a backend.
This way, it does not need to support variadic argument lists.
=== Phase 5: (DF Lowering for y.)
Adds the following operator as a context op:
@
recur [a1 ... an] Either [y1 ... yn] result -> Either [y1 ... yn] result
@
The operator has two incoming arcs and two outgoing arcs:
[1. incoming arc] @[a1 ... an]@
[1. outgoing arc] @Either [a1 ... an] [y1 ... yn]@
[2. outgoing arc] @result@
-}
# LANGUAGE CPP #
module Ohua.Feature.TailRec.Passes.ALang where
import Ohua.Prelude
import qualified Data.Text.Prettyprint.Doc as PP
import Control.Monad.Writer
import Data.Functor.Foldable
import qualified Data.HashSet as HS
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import Ohua.ALang.Lang
import Ohua.ALang.PPrint (quickRender)
import Ohua.ALang.Passes.Control (liftIntoCtrlCtxt)
import qualified Ohua.ALang.Refs as ALangRefs
import Ohua.ALang.Util
( fromApplyToList
, fromListToApply
, lambdaArgsAndBody
, mkDestructured
)
import qualified Ohua.DFLang.Refs as DFRefs
import Ohua.Compile.Configuration
import Ohua.Unit
recur_hof :: QualifiedBinding
recur_hof = "ohua.lang/recur_hof"
recur_sf :: Expression
recur_sf = PureFunction recur Nothing
recur_hof_sf :: Expression
recur_hof_sf = PureFunction recur_hof Nothing
recurStartMarker :: QualifiedBinding
recurStartMarker = "ohua.lang.marker/recur_start"
recurEndMarker :: QualifiedBinding
recurEndMarker = "ohua.lang.marker/recur_end"
The Y combinator from Haskell Curry
y :: QualifiedBinding
y = "ohua.lang/Y"
y_sf :: Expression
y_sf = PureFunction y Nothing
recurFun :: QualifiedBinding
recurFun = DFRefs.recurFunBnd
recurFunPureFunction :: Expression
recurFunPureFunction = PureFunction recurFun Nothing
idPureFunction = PureFunction "ohua.lang/id" Nothing
findTailRecs ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Bool
-> Expression
-> m Expression
findTailRecs enabled e =
snd <$> (flip runReaderT enabled . flip findRecCall HS.empty) e
findRecCall ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Expression
-> HS.HashSet Binding
-> ReaderT Bool m (HS.HashSet Binding, Expression)
findRecCall (Let a expr inExpr) algosInScope
= do
(found, e) <- findRecCall expr $ HS.insert a algosInScope
(iFound, iExpr) <- findRecCall inExpr algosInScope
if HS.member a found
then do
a' <- generateBindingWith a
return (iFound, Let a' e $ Let a (Apply y_sf (Var a')) iExpr)
else return (HS.union found iFound, Let a e iExpr)
findRecCall (Let a expr inExpr) algosInScope = do
(iFound, iExpr) <- findRecCall inExpr algosInScope
return (iFound, Let a expr iExpr)
findRecCall (Apply (Var binding) a) algosInScope
| HS.member binding algosInScope
= do
enabledTR <- ask
unlessM ask $
throwErrorDebugS
"Detected recursion although tail recursion support is not enabled!"
return (HS.insert binding HS.empty, Apply recur_sf a)
findRecCall (Apply a b) algosInScope = do
(aFound, aExpr) <- findRecCall a algosInScope
(bFound, bExpr) <- findRecCall b algosInScope
return (HS.union aFound bFound, Apply aExpr bExpr)
findRecCall (Lambda a e) algosInScope = do
(eFound, eExpr) <- findRecCall e algosInScope
return (eFound, Lambda a eExpr)
findRecCall other _ = return (HS.empty, other)
verifyTailRecursion ::
(Monad m, MonadGenBnd m, MonadError Error m)
=> Expression
-> m Expression
verifyTailRecursion e
| isCall y e = (performChecks $ snd $ fromApplyToList e) >> return e
where
performChecks ((Lambda a e):_) = traverseToLastCall checkIf e
performChecks (e:_) =
throwErrorDebugS $ "Recursion is not inside a lambda but: " <> show e
traverseToLastCall check (Let v e ie)
| isLastStmt ie = check e
traverseToLastCall check (Let v e ie) =
failOnRecur e >> traverseToLastCall check ie
traverseToLastCall _ e =
throwErrorDebugS $ "Invariant broken! Found expression: " <> quickRender e
failOnRecur (Let _ e ie) = failOnRecur e >> failOnRecur ie
failOnRecur (Apply (PureFunction recur _) _) =
error "Recursion is not tail recursive!"
failOnRecur (Apply a b) = return ()
failOnRecur e = error $ "Invariant broken! Found pattern: " <> show e
checkIf e
| isCall "ohua.lang/if" e
= do
let (_:tBranch:fBranch:_) = snd $ fromApplyToList e
let (Lambda v et) = tBranch
let (Lambda v ef) = fBranch
let lastFnOnBranch =
traverseToLastCall
(return .
(\(FunRef f _) -> f :: QualifiedBinding) .
fst . fromApplyToList)
tFn <- lastFnOnBranch et
fFn <- lastFnOnBranch ef
when (tFn == recur) $ do
when (fFn == recur) $
throwErrorDebugS
"Endless loop detected: Tail recursion does not have a non-recursive branch!"
unless (fFn == recur) $
throwErrorDebugS $
"We currently do not support recursive calls that are located on" <>
"nested conditional branches (#conditional branches > 1) or in" <>
"Lambdas to other higher-order functions! Found: " <>
show fFn <>
" : " <>
show tFn
checkIf e =
throwErrorDebugS $
"Recursion is not tail recursive! Last stmt: " <> show (quickRender e)
isLastStmt (Var _) = True
isLastStmt _ = False
verifyTailRecursion e@(Let v expr inExpr) =
verifyTailRecursion expr >> verifyTailRecursion inExpr >> return e
verifyTailRecursion e@(Var _) = return e
verifyTailRecursion e =
throwErrorDebugS $ "Invariant broken! Found stmt: " <> show e
Phase 3 :
rewriteAll :: (MonadGenBnd m, MonadError Error m, MonadReadEnvironment m) => Expression -> m Expression
rewriteAll = rewriteM $ \case
Let b e r | isCall y e -> (\e' -> Just $ Let b e' r) <$> rewriteCallExpr e
_ -> pure Nothing
isCall f (Apply (PureFunction f' _) _)
| f == f' = True
isCall f (Apply e@(Apply _ _) _) = isCall f e
isCall _ _ = False
rewriteCallExpr ::
(MonadGenBnd m, MonadError Error m, MonadReadEnvironment m) => Expression -> m Expression
rewriteCallExpr e = do
let (lam@(Lambda _ _):callArgs) = snd $ fromApplyToList e
let (recurVars, expr) = lambdaArgsAndBody lam
recurCtrl <- generateBindingWith "ctrl"
l' <- liftIntoCtrlCtxt recurCtrl expr
let l'' = rewriteLastCond l'
let = ctrl recurCtrl b c d in
let b0 = nth 0 in
ctrls <- generateBindingWith "ctrls"
return $
Let ctrls (fromListToApply (FunRef recurStartMarker Nothing) callArgs) $
mkDestructured (recurCtrl : recurVars) ctrls l''
where
rewriteLastCond :: Expression -> Expression
rewriteLastCond (Let v e o@(Var b))
| v == b = Let v (rewriteCond e) o
| otherwise = error "Value returned from recursive function was not last value bound, this is not tail recursive!"
rewriteLastCond (Let v e ie) = Let v e $ rewriteLastCond ie
least one branch that recurses and one branch that does not . I feel
rewriteCond :: Expression -> Expression
rewriteCond fullExpr@(Apply (Apply (Apply (PureFunction f0 _) cond) (Lambda a trueB)) (Lambda b falseB)) | f0 == ALangRefs.ifThenElse =
let trueB' = rewriteBranch trueB
falseB' = rewriteBranch falseB
(fixRef, recurVars) =
case (trueB', falseB') of
(Right bnds, Left f) -> (f, bnds)
_ -> error "invariant broken"
in fromListToApply (FunRef recurEndMarker Nothing) $
cond : fixRef : recurVars
rewriteCond _ =
error
"invariant broken: recursive function does not have the proper structure."
rewriteBranch :: Expression -> Either Expression [Expression]
rewriteBranch (Let v (Apply (PureFunction "ohua.lang/id" _) result) _) = Left result
rewriteBranch (Let v e _)
| isCall recur e = (Right . snd . fromApplyToList) e
rewriteBranch e = error $ "invariant broken: " <> quickRender e
|
410ad19d0f04b6885e3b9e66b8c11a7313f3da4d08af5d21f6be47c4e6192e70 | seliopou/typo | Prelude.hs | module Language.Typo.Prelude
( prelude
) where
prelude :: String
prelude = "\
\-------------------------------------------------------------------------------\n\
\-- BEGIN PRELUDE --------------------------------------------------------------\n\
\-------------------------------------------------------------------------------\n\
\\n\
# LANGUAGE NoImplicitPrelude #
# LANGUAGE MultiParamTypeClasses , FunctionalDependencies #
# LANGUAGE FlexibleInstances , UndecidableInstances #
# LANGUAGE OverlappingInstances #
\\n\
\undefined = undefined\n\
\\n\
\data True\n\
\data False\n\
\\n\
\true = undefined :: True\n\
\false = undefined :: False\n\
\\n\
\class And a b c | a b -> c where\n\
\ and :: a -> b -> c\n\
\ and = undefined\n\
\\n\
\class Or a b c | a b -> c where\n\
\ or :: a -> b -> c\n\
\ or = undefined\n\
\\n\
\class Imp a b c | a b -> c where\n\
\ imp :: a -> b -> c\n\
\ imp = undefined\n\
\\n\
\class Cond c t f r | c t f -> r where\n\
\ cond :: c -> t -> f -> r\n\
\ cond = undefined\n\
\\n\
\instance And True True True\n\
\instance And True False False\n\
\instance And False True False\n\
\instance And False False False\n\
\\n\
\instance Or True True True\n\
\instance Or True False True\n\
\instance Or False True True\n\
\instance Or False False False\n\
\\n\
\instance Imp True True True\n\
\instance Imp True False False\n\
\instance Imp False True True\n\
\instance Imp False False True\n\
\\n\
\instance Cond True t f t\n\
\instance Cond False t f f\n\
\\n\
\data Z\n\
\data S n\n\
\\n\
\data Neg n\n\
\\n\
\data LT\n\
\data EQ\n\
\data GT\n\
\\n\
\class Compare n m c | n m -> c where\n\
\ compare :: n -> m -> c\n\
\ compare = undefined\n\
\\n\
\instance Compare Z Z EQ\n\
\instance Compare Z (S m) LT\n\
\instance Compare (S n) Z GT\n\
\instance Compare (Neg n) Z LT\n\
\instance Compare Z (Neg m) GT\n\
\instance Compare (Neg n) (S m) LT\n\
\instance Compare (S n) (Neg m) GT\n\
\instance Compare m n c => Compare (Neg n) (Neg m) c\n\
\instance Compare n m c => Compare (S n) (S m) c\n\
\\n\
\class Eq n m b | n m -> b where\n\
\ eq :: n -> m -> b\n\
\ eq = undefined\n\
\\n\
\class Eq' n m c b | n m c -> b where\n\
\ eq' :: n -> m -> c -> b\n\
\ eq' = undefined\n\
\\n\
\class Lt n m b | n m -> b where\n\
\ lt :: n -> m -> b\n\
\ lt = undefined\n\
\\n\
\class Lt' n m c b | n m c -> b where\n\
\ lt' :: n -> m -> c -> b\n\
\ lt' = undefined\n\
\\n\
\instance Eq' n m LT False\n\
\instance Eq' n m EQ True\n\
\instance Eq' n m GT False\n\
\\n\
\instance (Compare n m c, Eq' n m c b) => Eq n m b\n\
\\n\
\instance Lt' n m LT True\n\
\instance Lt' n m EQ False\n\
\instance Lt' n m GT False\n\
\\n\
\instance (Compare n m c, Lt' n m c b) => Lt n m b\n\
\\n\
\class Add n m k | n m -> k where\n\
\ add :: n -> m -> k\n\
\ add = undefined\n\
\\n\
\instance Add Z Z Z\n\
\instance Add n Z n\n\
\instance Add Z m m\n\
\\n\
\instance Add n m r => Add (Neg n) (Neg m) (Neg r)\n\
\instance Sub m n r => Add (Neg n) m r\n\
\instance Sub n m r => Add n (Neg m) r\n\
\instance Add n m r => Add (S n) (S m) (S (S r))\n\
\\n\
\class Sub n m k | n m -> k where\n\
\ sub :: n -> m -> k\n\
\ sub = undefined\n\
\\n\
\instance Sub Z Z Z\n\
\instance Sub n Z n\n\
\instance Sub Z (Neg m) m\n\
\instance Sub Z (S m) (Neg (S m))\n\
\\n\
\instance Sub m n r => Sub (Neg n) (Neg m) r\n\
\instance Add n m r => Sub (Neg n) (S m) (Neg (S r))\n\
\instance Add n m r => Sub (S n) (Neg m) (S r)\n\
\instance Sub n m r => Sub (S n) (S m) r\n\
\\n\
\class Mul n m k | n m -> k where\n\
\ mul :: n -> m -> k\n\
\ mul = undefined\n\
\\n\
\instance Mul Z Z Z\n\
\instance Mul n Z Z\n\
\instance Mul Z m Z\n\
\\n\
\instance Mul n m r => Mul (Neg n) (Neg m) r\n\
\instance Mul (S n) m r => Mul (S n) (Neg m) (Neg r)\n\
\instance Mul n (S m) r => Mul (Neg n) (S m) (Neg r)\n\
\instance (Mul n m k, Add n k k', Add m k' r) => Mul (S n) (S m) (S r)\n\
\\n\
\class DivRem n m q r | n m -> q r where\n\
\ divRem :: n -> m -> (q, r)\n\
\ divRem = undefined\n\
\\n\
\class DivRemBranch0 b n m q r | b n m -> q r where\n\
\ divRemBranch0 :: b -> n -> m -> (q, r)\n\
\ divRemBranch0 = undefined\n\
\\n\
\instance DivRemBranch0 LT n m Z n\n\
\instance DivRemBranch0 EQ n m (S Z) Z\n\
\instance (Sub n m n', DivRem n' m q r) => DivRemBranch0 GT n m (S q) r\n\
\\n\
\instance (Compare n m c, DivRemBranch0 c n m q r) => DivRem n m q r\n\
\\n\
\class Div n m k | n m -> k where\n\
\ div :: n -> m -> k\n\
\ div = undefined\n\
\\n\
\class Rem n m r | n m -> r where\n\
\ rem :: n -> m -> r\n\
\ rem = undefined\n\
\\n\
\instance DivRem n m q r => Div n m q\n\
\instance DivRem n m q r => Rem n m r\n\
\\n\
\-------------------------------------------------------------------------------\n\
\-- END PRELUDE ----------------------------------------------------------------\n\
\-------------------------------------------------------------------------------\n\
\\n\
\"
| null | https://raw.githubusercontent.com/seliopou/typo/cc64ec38603a50c6543b3834d47b4ff0431c2b3e/Language/Typo/Prelude.hs | haskell | -----------------------------------------------------------------------------\n\
BEGIN PRELUDE --------------------------------------------------------------\n\
-----------------------------------------------------------------------------\n\
-----------------------------------------------------------------------------\n\
END PRELUDE ----------------------------------------------------------------\n\
-----------------------------------------------------------------------------\n\ | module Language.Typo.Prelude
( prelude
) where
prelude :: String
prelude = "\
\\n\
# LANGUAGE NoImplicitPrelude #
# LANGUAGE MultiParamTypeClasses , FunctionalDependencies #
# LANGUAGE FlexibleInstances , UndecidableInstances #
# LANGUAGE OverlappingInstances #
\\n\
\undefined = undefined\n\
\\n\
\data True\n\
\data False\n\
\\n\
\true = undefined :: True\n\
\false = undefined :: False\n\
\\n\
\class And a b c | a b -> c where\n\
\ and :: a -> b -> c\n\
\ and = undefined\n\
\\n\
\class Or a b c | a b -> c where\n\
\ or :: a -> b -> c\n\
\ or = undefined\n\
\\n\
\class Imp a b c | a b -> c where\n\
\ imp :: a -> b -> c\n\
\ imp = undefined\n\
\\n\
\class Cond c t f r | c t f -> r where\n\
\ cond :: c -> t -> f -> r\n\
\ cond = undefined\n\
\\n\
\instance And True True True\n\
\instance And True False False\n\
\instance And False True False\n\
\instance And False False False\n\
\\n\
\instance Or True True True\n\
\instance Or True False True\n\
\instance Or False True True\n\
\instance Or False False False\n\
\\n\
\instance Imp True True True\n\
\instance Imp True False False\n\
\instance Imp False True True\n\
\instance Imp False False True\n\
\\n\
\instance Cond True t f t\n\
\instance Cond False t f f\n\
\\n\
\data Z\n\
\data S n\n\
\\n\
\data Neg n\n\
\\n\
\data LT\n\
\data EQ\n\
\data GT\n\
\\n\
\class Compare n m c | n m -> c where\n\
\ compare :: n -> m -> c\n\
\ compare = undefined\n\
\\n\
\instance Compare Z Z EQ\n\
\instance Compare Z (S m) LT\n\
\instance Compare (S n) Z GT\n\
\instance Compare (Neg n) Z LT\n\
\instance Compare Z (Neg m) GT\n\
\instance Compare (Neg n) (S m) LT\n\
\instance Compare (S n) (Neg m) GT\n\
\instance Compare m n c => Compare (Neg n) (Neg m) c\n\
\instance Compare n m c => Compare (S n) (S m) c\n\
\\n\
\class Eq n m b | n m -> b where\n\
\ eq :: n -> m -> b\n\
\ eq = undefined\n\
\\n\
\class Eq' n m c b | n m c -> b where\n\
\ eq' :: n -> m -> c -> b\n\
\ eq' = undefined\n\
\\n\
\class Lt n m b | n m -> b where\n\
\ lt :: n -> m -> b\n\
\ lt = undefined\n\
\\n\
\class Lt' n m c b | n m c -> b where\n\
\ lt' :: n -> m -> c -> b\n\
\ lt' = undefined\n\
\\n\
\instance Eq' n m LT False\n\
\instance Eq' n m EQ True\n\
\instance Eq' n m GT False\n\
\\n\
\instance (Compare n m c, Eq' n m c b) => Eq n m b\n\
\\n\
\instance Lt' n m LT True\n\
\instance Lt' n m EQ False\n\
\instance Lt' n m GT False\n\
\\n\
\instance (Compare n m c, Lt' n m c b) => Lt n m b\n\
\\n\
\class Add n m k | n m -> k where\n\
\ add :: n -> m -> k\n\
\ add = undefined\n\
\\n\
\instance Add Z Z Z\n\
\instance Add n Z n\n\
\instance Add Z m m\n\
\\n\
\instance Add n m r => Add (Neg n) (Neg m) (Neg r)\n\
\instance Sub m n r => Add (Neg n) m r\n\
\instance Sub n m r => Add n (Neg m) r\n\
\instance Add n m r => Add (S n) (S m) (S (S r))\n\
\\n\
\class Sub n m k | n m -> k where\n\
\ sub :: n -> m -> k\n\
\ sub = undefined\n\
\\n\
\instance Sub Z Z Z\n\
\instance Sub n Z n\n\
\instance Sub Z (Neg m) m\n\
\instance Sub Z (S m) (Neg (S m))\n\
\\n\
\instance Sub m n r => Sub (Neg n) (Neg m) r\n\
\instance Add n m r => Sub (Neg n) (S m) (Neg (S r))\n\
\instance Add n m r => Sub (S n) (Neg m) (S r)\n\
\instance Sub n m r => Sub (S n) (S m) r\n\
\\n\
\class Mul n m k | n m -> k where\n\
\ mul :: n -> m -> k\n\
\ mul = undefined\n\
\\n\
\instance Mul Z Z Z\n\
\instance Mul n Z Z\n\
\instance Mul Z m Z\n\
\\n\
\instance Mul n m r => Mul (Neg n) (Neg m) r\n\
\instance Mul (S n) m r => Mul (S n) (Neg m) (Neg r)\n\
\instance Mul n (S m) r => Mul (Neg n) (S m) (Neg r)\n\
\instance (Mul n m k, Add n k k', Add m k' r) => Mul (S n) (S m) (S r)\n\
\\n\
\class DivRem n m q r | n m -> q r where\n\
\ divRem :: n -> m -> (q, r)\n\
\ divRem = undefined\n\
\\n\
\class DivRemBranch0 b n m q r | b n m -> q r where\n\
\ divRemBranch0 :: b -> n -> m -> (q, r)\n\
\ divRemBranch0 = undefined\n\
\\n\
\instance DivRemBranch0 LT n m Z n\n\
\instance DivRemBranch0 EQ n m (S Z) Z\n\
\instance (Sub n m n', DivRem n' m q r) => DivRemBranch0 GT n m (S q) r\n\
\\n\
\instance (Compare n m c, DivRemBranch0 c n m q r) => DivRem n m q r\n\
\\n\
\class Div n m k | n m -> k where\n\
\ div :: n -> m -> k\n\
\ div = undefined\n\
\\n\
\class Rem n m r | n m -> r where\n\
\ rem :: n -> m -> r\n\
\ rem = undefined\n\
\\n\
\instance DivRem n m q r => Div n m q\n\
\instance DivRem n m q r => Rem n m r\n\
\\n\
\\n\
\"
|
c26667607aed2edfa289ed39bc4c54c0c7bfdedc2f9d3c1086120b991f5bb1b1 | cstar/ejabberd-old | mod_last_sdb.erl | %%%----------------------------------------------------------------------
%%% File : mod_last_sdb.erl
Author : < >
%%% Purpose : jabber:iq:last support (JEP-0012)
Created : 26/02/09 by < >
%%%
ejabberd , Copyright ( C ) 2002 - 2010 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
%%%
%%%----------------------------------------------------------------------
-module(mod_last_sdb).
-author('').
-behaviour(gen_mod).
-export([start/2,
stop/1,
process_local_iq/3,
process_sm_iq/3,
on_presence_update/4,
store_last_info/4,
get_last_info/2,
remove_user/2]).
-include("ejabberd.hrl").
-include("jlib.hrl").
-include("mod_privacy.hrl").
-define(DOMAIN, "last_activity").
start(Host, Opts) ->
erlsdb:start(),
{ok, Domains, _Token} = erlsdb:list_domains(),
case lists:member(?DOMAIN, Domains) of
false ->
erlsdb:create_domain(?DOMAIN),
?INFO_MSG("SimpleDB domain ~s created", [?DOMAIN]);
true -> ok
end,
IQDisc = gen_mod:get_opt(iqdisc, Opts, one_queue),
gen_iq_handler:add_iq_handler(ejabberd_local, Host, ?NS_LAST,
?MODULE, process_local_iq, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host, ?NS_LAST,
?MODULE, process_sm_iq, IQDisc),
ejabberd_hooks:add(remove_user, Host,
?MODULE, remove_user, 50),
ejabberd_hooks:add(unset_presence_hook, Host,
?MODULE, on_presence_update, 50).
stop(Host) ->
ejabberd_hooks:delete(remove_user, Host,
?MODULE, remove_user, 50),
ejabberd_hooks:delete(unset_presence_hook, Host,
?MODULE, on_presence_update, 50),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host, ?NS_LAST),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host, ?NS_LAST).
%%%
%%% Uptime of ejabberd node
%%%
process_local_iq(_From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case Type of
set ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_NOT_ALLOWED]};
get ->
Sec = get_node_uptime(),
IQ#iq{type = result,
sub_el = [{xmlelement, "query",
[{"xmlns", ?NS_LAST},
{"seconds", integer_to_list(Sec)}],
[]}]}
end.
( ) - > integer ( )
@doc Get the uptime of the ejabberd node , expressed in seconds .
When ejabberd is starting , ejabberd_config : start/0 stores the datetime .
get_node_uptime() ->
case ejabberd_config:get_local_option(node_start) of
{_, _, _} = StartNow ->
now_to_seconds(now()) - now_to_seconds(StartNow);
_undefined ->
trunc(element(1, erlang:statistics(wall_clock))/1000)
end.
now_to_seconds({MegaSecs, Secs, _MicroSecs}) ->
MegaSecs * 1000000 + Secs.
%%%
Serve queries about user last online
%%%
process_sm_iq(From, To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case Type of
set ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_NOT_ALLOWED]};
get ->
User = To#jid.luser,
Server = To#jid.lserver,
{Subscription, _Groups} =
ejabberd_hooks:run_fold(
roster_get_jid_info, Server,
{none, []}, [User, Server, From]),
if
(Subscription == both) or (Subscription == from) ->
UserListRecord = ejabberd_hooks:run_fold(
privacy_get_user_list, Server,
#userlist{},
[User, Server]),
case ejabberd_hooks:run_fold(
privacy_check_packet, Server,
allow,
[User, Server, UserListRecord,
{From, To,
{xmlelement, "presence", [], []}},
out]) of
allow ->
get_last(IQ, SubEl, User, Server);
deny ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_NOT_ALLOWED]}
end;
true ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_NOT_ALLOWED]}
end
end.
get_last(IQ, SubEl, LUser, LServer) ->
case get_last_info(LUser, LServer) of
not_found ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_SERVICE_UNAVAILABLE]};
{ok, TimeStamp, Status}->
TimeStamp2 = now_to_seconds(now()),
Sec = TimeStamp2 - TimeStamp,
IQ#iq{type = result,
sub_el = [{xmlelement, "query",
[{"xmlns", ?NS_LAST},
{"seconds", integer_to_list(Sec)}],
[{xmlcdata, Status}]}]}
end.
on_presence_update(User, Server, _Resource, Status) ->
TimeStamp = now_to_seconds(now()),
store_last_info(User, Server, TimeStamp, Status).
store_last_info(User, Server, TimeStamp, Status) ->
LUser = jlib:nodeprep(User),
LServer = jlib:nameprep(Server),
JID = LUser ++ "@" ++ LServer,
catch erlsdb:replace_attributes(?DOMAIN, JID, [{"timestamp", TimeStamp}, {"status", Status}]).
%% @spec (LUser::string(), LServer::string()) ->
%% {ok, Timestamp::integer(), Status::string()} | not_found
get_last_info(LUser, LServer) ->
JID = LUser ++ "@" ++ LServer,
case catch erlsdb:get_attributes(?DOMAIN, JID) of
{'EXIT', _Reason} ->
not_found;
{ok, []} ->
not_found;
{ok,Activity} ->
{ok, proplists:get_value("timestamp", Activity), proplists:get_value("status", Activity)}
end.
remove_user(User, Server) ->
LUser = jlib:nodeprep(User),
LServer = jlib:nameprep(Server),
JID = LUser ++ "@" ++LServer,
catch erlsdb:delete_attributes(?DOMAIN, JID, ["timestamp", "status"]).
| null | https://raw.githubusercontent.com/cstar/ejabberd-old/559f8b6b0a935710fe93e9afacb4270d6d6ea00f/src/mod_last_sdb.erl | erlang | ----------------------------------------------------------------------
File : mod_last_sdb.erl
Purpose : jabber:iq:last support (JEP-0012)
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program; if not, write to the Free Software
----------------------------------------------------------------------
Uptime of ejabberd node
@spec (LUser::string(), LServer::string()) ->
{ok, Timestamp::integer(), Status::string()} | not_found | Author : < >
Created : 26/02/09 by < >
ejabberd , Copyright ( C ) 2002 - 2010 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
-module(mod_last_sdb).
-author('').
-behaviour(gen_mod).
-export([start/2,
stop/1,
process_local_iq/3,
process_sm_iq/3,
on_presence_update/4,
store_last_info/4,
get_last_info/2,
remove_user/2]).
-include("ejabberd.hrl").
-include("jlib.hrl").
-include("mod_privacy.hrl").
-define(DOMAIN, "last_activity").
start(Host, Opts) ->
erlsdb:start(),
{ok, Domains, _Token} = erlsdb:list_domains(),
case lists:member(?DOMAIN, Domains) of
false ->
erlsdb:create_domain(?DOMAIN),
?INFO_MSG("SimpleDB domain ~s created", [?DOMAIN]);
true -> ok
end,
IQDisc = gen_mod:get_opt(iqdisc, Opts, one_queue),
gen_iq_handler:add_iq_handler(ejabberd_local, Host, ?NS_LAST,
?MODULE, process_local_iq, IQDisc),
gen_iq_handler:add_iq_handler(ejabberd_sm, Host, ?NS_LAST,
?MODULE, process_sm_iq, IQDisc),
ejabberd_hooks:add(remove_user, Host,
?MODULE, remove_user, 50),
ejabberd_hooks:add(unset_presence_hook, Host,
?MODULE, on_presence_update, 50).
stop(Host) ->
ejabberd_hooks:delete(remove_user, Host,
?MODULE, remove_user, 50),
ejabberd_hooks:delete(unset_presence_hook, Host,
?MODULE, on_presence_update, 50),
gen_iq_handler:remove_iq_handler(ejabberd_local, Host, ?NS_LAST),
gen_iq_handler:remove_iq_handler(ejabberd_sm, Host, ?NS_LAST).
process_local_iq(_From, _To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case Type of
set ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_NOT_ALLOWED]};
get ->
Sec = get_node_uptime(),
IQ#iq{type = result,
sub_el = [{xmlelement, "query",
[{"xmlns", ?NS_LAST},
{"seconds", integer_to_list(Sec)}],
[]}]}
end.
( ) - > integer ( )
@doc Get the uptime of the ejabberd node , expressed in seconds .
When ejabberd is starting , ejabberd_config : start/0 stores the datetime .
get_node_uptime() ->
case ejabberd_config:get_local_option(node_start) of
{_, _, _} = StartNow ->
now_to_seconds(now()) - now_to_seconds(StartNow);
_undefined ->
trunc(element(1, erlang:statistics(wall_clock))/1000)
end.
now_to_seconds({MegaSecs, Secs, _MicroSecs}) ->
MegaSecs * 1000000 + Secs.
Serve queries about user last online
process_sm_iq(From, To, #iq{type = Type, sub_el = SubEl} = IQ) ->
case Type of
set ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_NOT_ALLOWED]};
get ->
User = To#jid.luser,
Server = To#jid.lserver,
{Subscription, _Groups} =
ejabberd_hooks:run_fold(
roster_get_jid_info, Server,
{none, []}, [User, Server, From]),
if
(Subscription == both) or (Subscription == from) ->
UserListRecord = ejabberd_hooks:run_fold(
privacy_get_user_list, Server,
#userlist{},
[User, Server]),
case ejabberd_hooks:run_fold(
privacy_check_packet, Server,
allow,
[User, Server, UserListRecord,
{From, To,
{xmlelement, "presence", [], []}},
out]) of
allow ->
get_last(IQ, SubEl, User, Server);
deny ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_NOT_ALLOWED]}
end;
true ->
IQ#iq{type = error,
sub_el = [SubEl, ?ERR_NOT_ALLOWED]}
end
end.
get_last(IQ, SubEl, LUser, LServer) ->
case get_last_info(LUser, LServer) of
not_found ->
IQ#iq{type = error, sub_el = [SubEl, ?ERR_SERVICE_UNAVAILABLE]};
{ok, TimeStamp, Status}->
TimeStamp2 = now_to_seconds(now()),
Sec = TimeStamp2 - TimeStamp,
IQ#iq{type = result,
sub_el = [{xmlelement, "query",
[{"xmlns", ?NS_LAST},
{"seconds", integer_to_list(Sec)}],
[{xmlcdata, Status}]}]}
end.
on_presence_update(User, Server, _Resource, Status) ->
TimeStamp = now_to_seconds(now()),
store_last_info(User, Server, TimeStamp, Status).
store_last_info(User, Server, TimeStamp, Status) ->
LUser = jlib:nodeprep(User),
LServer = jlib:nameprep(Server),
JID = LUser ++ "@" ++ LServer,
catch erlsdb:replace_attributes(?DOMAIN, JID, [{"timestamp", TimeStamp}, {"status", Status}]).
get_last_info(LUser, LServer) ->
JID = LUser ++ "@" ++ LServer,
case catch erlsdb:get_attributes(?DOMAIN, JID) of
{'EXIT', _Reason} ->
not_found;
{ok, []} ->
not_found;
{ok,Activity} ->
{ok, proplists:get_value("timestamp", Activity), proplists:get_value("status", Activity)}
end.
remove_user(User, Server) ->
LUser = jlib:nodeprep(User),
LServer = jlib:nameprep(Server),
JID = LUser ++ "@" ++LServer,
catch erlsdb:delete_attributes(?DOMAIN, JID, ["timestamp", "status"]).
|
a845b806354c8c7109b90cbf7f93bc670302545db17183ba5e7221cb22c3d273 | rmculpepper/crypto | sodium.rkt | Copyright 2018
;;
;; This library is free software: you can redistribute it and/or modify
;; it under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
;;
You should have received a copy of the GNU Lesser General Public License
;; along with this library. If not, see </>.
#lang racket/base
(require "private/sodium/factory.rkt")
(provide sodium-factory)
| null | https://raw.githubusercontent.com/rmculpepper/crypto/fec745e8af7e3f4d5eaf83407dde2817de4c2eb0/crypto-lib/sodium.rkt | racket |
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published
(at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this library. If not, see </>. | Copyright 2018
by the Free Software Foundation , either version 3 of the License , or
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
#lang racket/base
(require "private/sodium/factory.rkt")
(provide sodium-factory)
|
b417512ab3de4be4ed9e85e1011e75621c03d933f659fe6276c13fa2e16983a9 | mlcfp/zenacy-html | Filter.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
-- | Filters and transforms for HTML trees.
module Zenacy.HTML.Internal.Filter
( htmlSpaceRemove
) where
import Zenacy.HTML.Internal.Core
import Zenacy.HTML.Internal.HTML
import Zenacy.HTML.Internal.Oper
import Data.Maybe
( mapMaybe
)
-- | Removes whitespace and comments from an HTML structure.
-- Document elements are not accepted, and only non-empty text nodes
and element nodes are kept . @pre@ , @code@ , @samp@ , and @kdb@ elements
-- are passed without modification, since whitespace is typically
-- significant in those elements.
htmlSpaceRemove :: HTMLNode -> Maybe HTMLNode
htmlSpaceRemove = go
where
go x = case x of
HTMLText {}
| htmlTextSpace x ->
Nothing
| otherwise ->
Just x
HTMLElement n s a c
| n == "pre" || n == "code" || n == "samp" || n == "kbd" ->
Just x
| otherwise ->
Just $ HTMLElement n s a $ mapMaybe go c
_otherwise ->
Nothing
| null | https://raw.githubusercontent.com/mlcfp/zenacy-html/b4af86fecc6fbbe1c7501e2fa75e6aa28206ebcb/src/Zenacy/HTML/Internal/Filter.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Filters and transforms for HTML trees.
| Removes whitespace and comments from an HTML structure.
Document elements are not accepted, and only non-empty text nodes
are passed without modification, since whitespace is typically
significant in those elements. | # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module Zenacy.HTML.Internal.Filter
( htmlSpaceRemove
) where
import Zenacy.HTML.Internal.Core
import Zenacy.HTML.Internal.HTML
import Zenacy.HTML.Internal.Oper
import Data.Maybe
( mapMaybe
)
and element nodes are kept . @pre@ , @code@ , @samp@ , and @kdb@ elements
htmlSpaceRemove :: HTMLNode -> Maybe HTMLNode
htmlSpaceRemove = go
where
go x = case x of
HTMLText {}
| htmlTextSpace x ->
Nothing
| otherwise ->
Just x
HTMLElement n s a c
| n == "pre" || n == "code" || n == "samp" || n == "kbd" ->
Just x
| otherwise ->
Just $ HTMLElement n s a $ mapMaybe go c
_otherwise ->
Nothing
|
ee5f1363ca78d24eb0487173f1964d8a8ef1b9733c42f8a9e0a0414a4bcb5aa4 | ocaml-ppx/ocamlformat | exp_grouping.ml | let () =
Lwt_main.run
begin
let a = "a" in
let b = "b" in
let c = "c" in
Lwt.return "test"
end
let () =
Lwt_main.run
( let a = "a" in
let b = "b" in
let c = "c" in
Lwt.return "test" )
let () =
List.iter begin fun v ->
(* do a lot of things *)
let a = "a" in
let b = "b" in
let c = "c" in
()
end values
let () =
List.iter
(fun v ->
(* do a lot of things *)
let a = "a" in
let b = "b" in
let c = "c" in
())
values
let () =
foooooooo
begin
fooooooooooooo ;
foooooooo foooooooooooo ;
fooooooooooo foooooooooo ;
foooooooooooooooo
end
let () =
foooooooo
( fooooooooooooo ;
foooooooo foooooooooooo ;
fooooooooooo foooooooooo ;
foooooooooooooooo )
let () =
foooooooo
begin
if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then
foooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
foooooooooooooooooo
begin
if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo
end
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo
end
let () =
foooooooo
( if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else
foooooooooooooooooooooooooooo
( if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo )
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo )
let _ = a |> let a = b in c
let _ = ( let a = b in c ) |> d
let _ = a := let a = b in c
let _ = ( let a = b in c ) := d
let _ = a + let a = b in c
let _ = ( let a = b in c ) + d
let _ = f ( let a = b in c )
let _ = ( let a = b in c ) d
let _ = a#f ( let a = b in c )
let _ = ( let a = b in c ) #f
let _ = A ( let a = b in c )
let _ = `A ( let a = b in c )
let _ = { x= ( let a = b in c ) }
let _ = { ( let a = b in c ) with a= b }
let _ = {< x = let a = b in c >}
let _ = x <- ( let a = b in c )
let _ = ( let a = b in c ) .x
let _ = ( let a = b in c ).x <- d
let _ = ( ( let a = b in c ) , d )
let _ = ( let a = b in c :> t )
let _ = let a = b in c :: d
let _ = a :: ( let a = b in c )
let _ = [ ( let a = b in c ) ]
let _ = [| ( let a = b in c ) |]
[@@@ocamlformat "if-then-else=compact"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=fit-or-vertical"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=keyword-first"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=k-r"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
let _ =
match x with
| A -> begin
match B with
| A -> fooooooooooooo
end
| A -> begin
match B with
| A -> fooooooooooooo
| B -> fooooooooooooo
end
| A -> begin
match B with
| A -> fooooooooooooo
| B -> fooooooooooooo
| C -> fooooooooooooo
| D -> fooooooooooooo
end
let () =
begin
add_test @@
let test_name = "Test 1" in
test_name >:: fun _ ->
assert_equal "a" "a"
end;
begin
add_test @@
let test_name = "Test 2" in
test_name >:: fun _ ->
assert_equal "b" "b"
end
let _ = begin end
let _ = begin (* foo *) end
let _ = begin%ext end
let _ = begin%ext (* foo *) end
let _ = begin x y end
let _ = begin (* foo *) x y end
let _ = begin%ext x y end
let _ = begin%ext (* foo *) x y end
let _ =
begin[@landmark "parse_constant_dividends"]
market_data_items := ()
end
| null | https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/1bc12c4101dd57644f03881aa809a31e9addac0a/test/passing/tests/exp_grouping.ml | ocaml | do a lot of things
do a lot of things
foo
foo
foo
foo | let () =
Lwt_main.run
begin
let a = "a" in
let b = "b" in
let c = "c" in
Lwt.return "test"
end
let () =
Lwt_main.run
( let a = "a" in
let b = "b" in
let c = "c" in
Lwt.return "test" )
let () =
List.iter begin fun v ->
let a = "a" in
let b = "b" in
let c = "c" in
()
end values
let () =
List.iter
(fun v ->
let a = "a" in
let b = "b" in
let c = "c" in
())
values
let () =
foooooooo
begin
fooooooooooooo ;
foooooooo foooooooooooo ;
fooooooooooo foooooooooo ;
foooooooooooooooo
end
let () =
foooooooo
( fooooooooooooo ;
foooooooo foooooooooooo ;
fooooooooooo foooooooooo ;
foooooooooooooooo )
let () =
foooooooo
begin
if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then
foooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
foooooooooooooooooo
begin
if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo
end
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo
end
let () =
foooooooo
( if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else
foooooooooooooooooooooooooooo
( if foooooooooooooooooooooooooooo then
if foooooooooooooooooooooooooooo then foooooooooooooooooooooooo
else foooooooooooooooooooooooooo
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo )
else
if foooooooooooooooooooooooooooooooo then
fooooooooooooooooooooooooooooooooooo
else
if foooooooooooooooooo then foooooooooooooooooooooooooooooooooo
else fooooooooooooooooooooo )
let _ = a |> let a = b in c
let _ = ( let a = b in c ) |> d
let _ = a := let a = b in c
let _ = ( let a = b in c ) := d
let _ = a + let a = b in c
let _ = ( let a = b in c ) + d
let _ = f ( let a = b in c )
let _ = ( let a = b in c ) d
let _ = a#f ( let a = b in c )
let _ = ( let a = b in c ) #f
let _ = A ( let a = b in c )
let _ = `A ( let a = b in c )
let _ = { x= ( let a = b in c ) }
let _ = { ( let a = b in c ) with a= b }
let _ = {< x = let a = b in c >}
let _ = x <- ( let a = b in c )
let _ = ( let a = b in c ) .x
let _ = ( let a = b in c ).x <- d
let _ = ( ( let a = b in c ) , d )
let _ = ( let a = b in c :> t )
let _ = let a = b in c :: d
let _ = a :: ( let a = b in c )
let _ = [ ( let a = b in c ) ]
let _ = [| ( let a = b in c ) |]
[@@@ocamlformat "if-then-else=compact"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=fit-or-vertical"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=keyword-first"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
[@@@ocamlformat "if-then-else=k-r"]
let _ =
if x
then begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
else if y then begin f 0; f 2 end
else begin
foo.fooooo <- Fooo.foo fooo foo.fooooo;
Fooo fooo
end
let _ =
match x with
| A -> begin
match B with
| A -> fooooooooooooo
end
| A -> begin
match B with
| A -> fooooooooooooo
| B -> fooooooooooooo
end
| A -> begin
match B with
| A -> fooooooooooooo
| B -> fooooooooooooo
| C -> fooooooooooooo
| D -> fooooooooooooo
end
let () =
begin
add_test @@
let test_name = "Test 1" in
test_name >:: fun _ ->
assert_equal "a" "a"
end;
begin
add_test @@
let test_name = "Test 2" in
test_name >:: fun _ ->
assert_equal "b" "b"
end
let _ = begin end
let _ = begin%ext end
let _ = begin x y end
let _ = begin%ext x y end
let _ =
begin[@landmark "parse_constant_dividends"]
market_data_items := ()
end
|
9404e8ffa19ab375e6b76f34ab45478b46eca66ca7e5614cb558dba3ff9d979f | 2600hz/kazoo | hon_util.erl | %%%-----------------------------------------------------------------------------
( C ) 2012 - 2020 , 2600Hz
%%% @doc
@author
%%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(hon_util).
-export([candidate_rates/1, candidate_rates/2, candidate_rates/3
,matching_rates/2
,sort_rates/1
,sort_rates_by_cost/1, sort_rates_by_weight/1
,account_ratedeck/1, account_ratedeck/2
]).
-include("hotornot.hrl").
-define(MIN_PREFIX_LEN, 1). % how many chars to strip off the e164 DID
-type candidate_rates_return() :: {'ok', kzd_rates:docs()} |
{'error', 'did_to_short'} |
kz_datamgr:data_error().
-spec candidate_rates(kz_term:ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID) ->
candidate_rates(ToDID, 'undefined', 'undefined').
-spec candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID, AccountId) ->
candidate_rates(ToDID, AccountId, 'undefined').
-spec candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID, AccountId, RatedeckId) ->
E164 = knm_converters:normalize(ToDID),
find_candidate_rates(E164, AccountId, RatedeckId).
-spec find_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
find_candidate_rates(E164, AccountId, RatedeckId)
when byte_size(E164) > ?MIN_PREFIX_LEN ->
case hotornot_config:should_use_trie() of
'false' -> fetch_candidate_rates(E164, AccountId, RatedeckId);
'true' -> find_trie_rates(E164, AccountId, RatedeckId)
end;
find_candidate_rates(DID, _AccountId, _RatedeckId) ->
lager:debug("DID ~s is too short", [DID]),
{'error', 'did_too_short'}.
-spec find_trie_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
find_trie_rates(E164, AccountId, RatedeckId) ->
case hon_trie:match_did(kz_binary:remove_non_numeric(E164), AccountId, RatedeckId) of
{'ok', Result} -> {'ok', Result};
{'error', _E} ->
lager:warning("got error while searching did in trie, falling back to DB search"),
Candidates = fetch_candidate_rates(E164, AccountId, RatedeckId),
maybe_update_trie(RatedeckId, Candidates),
Candidates
end.
-spec maybe_update_trie(kz_term:ne_binary(), candidate_rates_return()) -> 'ok'.
maybe_update_trie(RatedeckId, Candidates) ->
maybe_update_trie(RatedeckId, Candidates, hotornot_config:trie_module()).
-spec maybe_update_trie(kz_term:ne_binary(), candidate_rates_return(), atom()) -> 'ok'.
maybe_update_trie(RatedeckId, {'ok', [_|_]=Rates}, 'hon_trie_lru') ->
hon_trie_lru:cache_rates(RatedeckId, Rates);
maybe_update_trie(_RatedeckId, _Candidates, _Module) ->
'ok'.
-spec fetch_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
fetch_candidate_rates(E164, AccountId, RatedeckId) ->
fetch_candidate_rates(E164, AccountId, RatedeckId, kzdb_ratedeck:prefix_keys(E164)).
-spec fetch_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary(), kz_term:ne_binaries()) ->
candidate_rates_return().
fetch_candidate_rates(_E164, _AccountId, _RatedeckId, []) ->
{'error', 'did_too_short'};
fetch_candidate_rates(E164, AccountId, RatedeckId, Keys) ->
lager:debug("searching for prefixes for ~s: ~p", [E164, Keys]),
RatedeckDb = account_ratedeck(AccountId, RatedeckId),
case fetch_rates_from_ratedeck(RatedeckDb, Keys) of
{'ok', []}=OK -> OK;
{'error', _}=E -> E;
{'ok', ViewRows} ->
{'ok'
,[kzd_rates:set_ratedeck_id(kz_json:get_json_value(<<"doc">>, ViewRow)
,kzd_ratedeck:format_ratedeck_id(RatedeckDb)
)
|| ViewRow <- ViewRows
]
}
end.
-spec fetch_rates_from_ratedeck(kz_term:ne_binary(), [integer()]) ->
kz_datamgr:get_results_return().
fetch_rates_from_ratedeck(RatedeckDb, Keys) ->
kz_datamgr:get_results(RatedeckDb
,<<"rates/lookup">>
,[{'keys', Keys}
,'include_docs'
]
).
-ifdef(TEST).
-spec account_ratedeck(kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(_AccountId) -> ?KZ_RATES_DB.
-spec account_ratedeck(kz_term:api_ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(_AccountId, _RatedeckId) -> ?KZ_RATES_DB.
-else.
-spec account_ratedeck(kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(AccountId) ->
account_ratedeck(AccountId, 'undefined').
-spec account_ratedeck(kz_term:api_ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck('undefined', 'undefined') ->
lager:info("no account supplied, using default ratedeck"),
hotornot_config:default_ratedeck();
account_ratedeck('undefined', <<_/binary>> = RatedeckId) ->
lager:info("using supplied ratedeck ~s", [RatedeckId]),
kzd_ratedeck:format_ratedeck_db(RatedeckId);
account_ratedeck(AccountId, _RatedeckId) ->
case kz_services_ratedecks:id(AccountId) of
'undefined' ->
lager:debug("failed to find account ~s ratedeck, checking reseller", [AccountId]),
reseller_ratedeck(AccountId, kz_services_reseller:get_id(AccountId));
RatedeckId ->
lager:info("using account ratedeck ~s for account ~s", [RatedeckId, AccountId]),
kzd_ratedeck:format_ratedeck_db(RatedeckId)
end.
-spec reseller_ratedeck(kz_term:ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
reseller_ratedeck(_AccountId, 'undefined') ->
lager:debug("no reseller for ~s, using default ratedeck", [_AccountId]),
hotornot_config:default_ratedeck();
reseller_ratedeck(ResellerId, ResellerId) ->
lager:debug("account ~s is own reseller, using system setting", [ResellerId]),
hotornot_config:default_ratedeck();
reseller_ratedeck(_AccountId, ResellerId) ->
case kz_services_ratedecks:id(ResellerId) of
'undefined' ->
lager:debug("failed to find reseller ~s ratedeck, using default", [_AccountId]),
hotornot_config:default_ratedeck();
RatedeckId ->
lager:info("using reseller ~s ratedeck ~s for account ~s"
,[ResellerId, RatedeckId, _AccountId]
),
kzd_ratedeck:format_ratedeck_db(RatedeckId)
end.
-endif.
-spec matching_rates(kzd_rates:docs(), kapi_rate:req()) ->
kzd_rates:docs().
matching_rates(Rates, RateReq) ->
FilterList = hotornot_config:filter_list(),
lists:foldl(fun(Filter, Acc) ->
lists:filter(fun(Rate) -> matching_rate(Rate, Filter, RateReq) end, Acc)
end
,Rates
,FilterList
).
-spec sort_rates(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates(Rates) ->
case hotornot_config:should_sort_by_weight() of
'true' -> sort_rates_by_weight(Rates);
'false' -> sort_rates_by_cost(Rates)
end.
-spec sort_rates_by_weight(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates_by_weight(Rates) ->
lists:usort(fun sort_rate_by_weight/2, Rates).
-spec sort_rates_by_cost(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates_by_cost(Rates) ->
lists:usort(fun sort_rate_by_cost/2, Rates).
%% Private helper functions
-spec matching_rate(kzd_rates:doc(), kz_term:ne_binary(), kapi_rate:req()) -> boolean().
matching_rate(Rate, <<"direction">>, RateReq) ->
case kapi_rate:direction(RateReq) of
'undefined' -> 'true';
Direction ->
lists:member(Direction, kzd_rates:direction(Rate))
end;
matching_rate(Rate, <<"route_options">>, RateReq) ->
RouteOptions = kapi_rate:options(RateReq),
RouteFlags = kapi_rate:outbound_flags(RateReq),
ResourceFlag = case kapi_rate:account_id(RateReq) of
'undefined' -> [];
AccountId -> maybe_add_resource_flag(RateReq, AccountId)
end,
options_match(kzd_rates:options(Rate), RouteOptions++RouteFlags++ResourceFlag);
matching_rate(Rate, <<"routes">>, RateReq) ->
ToDID = kapi_rate:to_did(RateReq),
E164 = knm_converters:normalize(ToDID),
lists:any(fun(Regex) -> re:run(E164, Regex) =/= 'nomatch' end
,kzd_rates:routes(Rate, [])
);
matching_rate(Rate, <<"caller_id_numbers">>, RateReq) ->
FromDID = kapi_rate:from_did(RateReq),
E164 = knm_converters:normalize(FromDID),
lists:any(fun(Regex) -> re:run(E164, Regex) =/= 'nomatch' end
,kzd_rates:caller_id_numbers(Rate, [<<".">>])
);
matching_rate(Rate, <<"ratedeck_id">>, RateReq) ->
AccountId = kapi_rate:account_id(RateReq),
AccountRatedeck = kz_services_ratedecks:name(AccountId),
RatedeckName = kzd_rates:ratedeck_id(Rate),
AccountRatedeck =:= RatedeckName;
matching_rate(Rate, <<"reseller">>, RateReq) ->
AccountId = kapi_rate:account_id(RateReq),
ResellerId = kz_services_reseller:get_id(AccountId),
RateAccountId = kzd_rates:account_id(Rate),
RateAccountId =:= ResellerId;
matching_rate(Rate, <<"version">>, _RateReq) ->
kzd_rates:rate_version(Rate) =:= hotornot_config:rate_version();
matching_rate(_Rate, _FilterType, _RateReq) -> 'false'.
Return true if RateA has lower weight than
-spec sort_rate_by_weight(kzd_rates:doc(), kzd_rates:doc()) -> boolean().
sort_rate_by_weight(RateA, RateB) ->
PrefixA = byte_size(kz_term:to_binary(kzd_rates:prefix(RateA))),
PrefixB = byte_size(kz_term:to_binary(kzd_rates:prefix(RateB))),
case PrefixA =:= PrefixB of
'true' ->
kzd_rates:weight(RateA, 100) < kzd_rates:weight(RateB, 100);
'false' ->
PrefixA > PrefixB
end.
-spec sort_rate_by_cost(kzd_rates:doc(), kzd_rates:doc()) -> boolean().
sort_rate_by_cost(RateA, RateB) ->
PrefixA = byte_size(kz_term:to_binary(kzd_rates:prefix(RateA))),
PrefixB = byte_size(kz_term:to_binary(kzd_rates:prefix(RateB))),
case PrefixA =:= PrefixB of
'true' ->
kzd_rates:rate_cost(RateA, 0.0) > kzd_rates:rate_cost(RateB, 0.0);
'false' ->
PrefixA > PrefixB
end.
%% Route options come from the client device
%% Rate options come from the carrier providing the trunk
All Route options must exist in a carrier 's options to keep the carrier
%% in the list of carriers capable of handling the call
-spec options_match(trunking_options(), trunking_options()) -> boolean().
options_match([], []) -> 'true';
options_match([], _) -> 'true';
options_match(RateOptions, RouteOptions) ->
lists:all(fun(RouteOption) ->
props:get_value(RouteOption, RateOptions, 'false') =/= 'false'
end
,RouteOptions
).
-spec maybe_add_resource_flag(kapi_rate:req(), kz_term:ne_binary()) -> kz_term:ne_binaries().
maybe_add_resource_flag(RateReq, AccountId) ->
case hotornot_config:should_account_filter_by_resource(AccountId) of
'true' ->
case kapi_rate:resource_id(RateReq) of
'undefined' -> [];
ResourceId -> [ResourceId]
end;
'false' -> []
end.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/hotornot/src/hon_util.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
how many chars to strip off the e164 DID
Private helper functions
Route options come from the client device
Rate options come from the carrier providing the trunk
in the list of carriers capable of handling the call | ( C ) 2012 - 2020 , 2600Hz
@author
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(hon_util).
-export([candidate_rates/1, candidate_rates/2, candidate_rates/3
,matching_rates/2
,sort_rates/1
,sort_rates_by_cost/1, sort_rates_by_weight/1
,account_ratedeck/1, account_ratedeck/2
]).
-include("hotornot.hrl").
-type candidate_rates_return() :: {'ok', kzd_rates:docs()} |
{'error', 'did_to_short'} |
kz_datamgr:data_error().
-spec candidate_rates(kz_term:ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID) ->
candidate_rates(ToDID, 'undefined', 'undefined').
-spec candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID, AccountId) ->
candidate_rates(ToDID, AccountId, 'undefined').
-spec candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
candidate_rates(ToDID, AccountId, RatedeckId) ->
E164 = knm_converters:normalize(ToDID),
find_candidate_rates(E164, AccountId, RatedeckId).
-spec find_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
find_candidate_rates(E164, AccountId, RatedeckId)
when byte_size(E164) > ?MIN_PREFIX_LEN ->
case hotornot_config:should_use_trie() of
'false' -> fetch_candidate_rates(E164, AccountId, RatedeckId);
'true' -> find_trie_rates(E164, AccountId, RatedeckId)
end;
find_candidate_rates(DID, _AccountId, _RatedeckId) ->
lager:debug("DID ~s is too short", [DID]),
{'error', 'did_too_short'}.
-spec find_trie_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
find_trie_rates(E164, AccountId, RatedeckId) ->
case hon_trie:match_did(kz_binary:remove_non_numeric(E164), AccountId, RatedeckId) of
{'ok', Result} -> {'ok', Result};
{'error', _E} ->
lager:warning("got error while searching did in trie, falling back to DB search"),
Candidates = fetch_candidate_rates(E164, AccountId, RatedeckId),
maybe_update_trie(RatedeckId, Candidates),
Candidates
end.
-spec maybe_update_trie(kz_term:ne_binary(), candidate_rates_return()) -> 'ok'.
maybe_update_trie(RatedeckId, Candidates) ->
maybe_update_trie(RatedeckId, Candidates, hotornot_config:trie_module()).
-spec maybe_update_trie(kz_term:ne_binary(), candidate_rates_return(), atom()) -> 'ok'.
maybe_update_trie(RatedeckId, {'ok', [_|_]=Rates}, 'hon_trie_lru') ->
hon_trie_lru:cache_rates(RatedeckId, Rates);
maybe_update_trie(_RatedeckId, _Candidates, _Module) ->
'ok'.
-spec fetch_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary()) ->
candidate_rates_return().
fetch_candidate_rates(E164, AccountId, RatedeckId) ->
fetch_candidate_rates(E164, AccountId, RatedeckId, kzdb_ratedeck:prefix_keys(E164)).
-spec fetch_candidate_rates(kz_term:ne_binary(), kz_term:api_ne_binary(), kz_term:api_ne_binary(), kz_term:ne_binaries()) ->
candidate_rates_return().
fetch_candidate_rates(_E164, _AccountId, _RatedeckId, []) ->
{'error', 'did_too_short'};
fetch_candidate_rates(E164, AccountId, RatedeckId, Keys) ->
lager:debug("searching for prefixes for ~s: ~p", [E164, Keys]),
RatedeckDb = account_ratedeck(AccountId, RatedeckId),
case fetch_rates_from_ratedeck(RatedeckDb, Keys) of
{'ok', []}=OK -> OK;
{'error', _}=E -> E;
{'ok', ViewRows} ->
{'ok'
,[kzd_rates:set_ratedeck_id(kz_json:get_json_value(<<"doc">>, ViewRow)
,kzd_ratedeck:format_ratedeck_id(RatedeckDb)
)
|| ViewRow <- ViewRows
]
}
end.
-spec fetch_rates_from_ratedeck(kz_term:ne_binary(), [integer()]) ->
kz_datamgr:get_results_return().
fetch_rates_from_ratedeck(RatedeckDb, Keys) ->
kz_datamgr:get_results(RatedeckDb
,<<"rates/lookup">>
,[{'keys', Keys}
,'include_docs'
]
).
-ifdef(TEST).
-spec account_ratedeck(kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(_AccountId) -> ?KZ_RATES_DB.
-spec account_ratedeck(kz_term:api_ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(_AccountId, _RatedeckId) -> ?KZ_RATES_DB.
-else.
-spec account_ratedeck(kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck(AccountId) ->
account_ratedeck(AccountId, 'undefined').
-spec account_ratedeck(kz_term:api_ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
account_ratedeck('undefined', 'undefined') ->
lager:info("no account supplied, using default ratedeck"),
hotornot_config:default_ratedeck();
account_ratedeck('undefined', <<_/binary>> = RatedeckId) ->
lager:info("using supplied ratedeck ~s", [RatedeckId]),
kzd_ratedeck:format_ratedeck_db(RatedeckId);
account_ratedeck(AccountId, _RatedeckId) ->
case kz_services_ratedecks:id(AccountId) of
'undefined' ->
lager:debug("failed to find account ~s ratedeck, checking reseller", [AccountId]),
reseller_ratedeck(AccountId, kz_services_reseller:get_id(AccountId));
RatedeckId ->
lager:info("using account ratedeck ~s for account ~s", [RatedeckId, AccountId]),
kzd_ratedeck:format_ratedeck_db(RatedeckId)
end.
-spec reseller_ratedeck(kz_term:ne_binary(), kz_term:api_ne_binary()) -> kz_term:ne_binary().
reseller_ratedeck(_AccountId, 'undefined') ->
lager:debug("no reseller for ~s, using default ratedeck", [_AccountId]),
hotornot_config:default_ratedeck();
reseller_ratedeck(ResellerId, ResellerId) ->
lager:debug("account ~s is own reseller, using system setting", [ResellerId]),
hotornot_config:default_ratedeck();
reseller_ratedeck(_AccountId, ResellerId) ->
case kz_services_ratedecks:id(ResellerId) of
'undefined' ->
lager:debug("failed to find reseller ~s ratedeck, using default", [_AccountId]),
hotornot_config:default_ratedeck();
RatedeckId ->
lager:info("using reseller ~s ratedeck ~s for account ~s"
,[ResellerId, RatedeckId, _AccountId]
),
kzd_ratedeck:format_ratedeck_db(RatedeckId)
end.
-endif.
-spec matching_rates(kzd_rates:docs(), kapi_rate:req()) ->
kzd_rates:docs().
matching_rates(Rates, RateReq) ->
FilterList = hotornot_config:filter_list(),
lists:foldl(fun(Filter, Acc) ->
lists:filter(fun(Rate) -> matching_rate(Rate, Filter, RateReq) end, Acc)
end
,Rates
,FilterList
).
-spec sort_rates(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates(Rates) ->
case hotornot_config:should_sort_by_weight() of
'true' -> sort_rates_by_weight(Rates);
'false' -> sort_rates_by_cost(Rates)
end.
-spec sort_rates_by_weight(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates_by_weight(Rates) ->
lists:usort(fun sort_rate_by_weight/2, Rates).
-spec sort_rates_by_cost(kzd_rates:docs()) -> kzd_rates:docs().
sort_rates_by_cost(Rates) ->
lists:usort(fun sort_rate_by_cost/2, Rates).
-spec matching_rate(kzd_rates:doc(), kz_term:ne_binary(), kapi_rate:req()) -> boolean().
matching_rate(Rate, <<"direction">>, RateReq) ->
case kapi_rate:direction(RateReq) of
'undefined' -> 'true';
Direction ->
lists:member(Direction, kzd_rates:direction(Rate))
end;
matching_rate(Rate, <<"route_options">>, RateReq) ->
RouteOptions = kapi_rate:options(RateReq),
RouteFlags = kapi_rate:outbound_flags(RateReq),
ResourceFlag = case kapi_rate:account_id(RateReq) of
'undefined' -> [];
AccountId -> maybe_add_resource_flag(RateReq, AccountId)
end,
options_match(kzd_rates:options(Rate), RouteOptions++RouteFlags++ResourceFlag);
matching_rate(Rate, <<"routes">>, RateReq) ->
ToDID = kapi_rate:to_did(RateReq),
E164 = knm_converters:normalize(ToDID),
lists:any(fun(Regex) -> re:run(E164, Regex) =/= 'nomatch' end
,kzd_rates:routes(Rate, [])
);
matching_rate(Rate, <<"caller_id_numbers">>, RateReq) ->
FromDID = kapi_rate:from_did(RateReq),
E164 = knm_converters:normalize(FromDID),
lists:any(fun(Regex) -> re:run(E164, Regex) =/= 'nomatch' end
,kzd_rates:caller_id_numbers(Rate, [<<".">>])
);
matching_rate(Rate, <<"ratedeck_id">>, RateReq) ->
AccountId = kapi_rate:account_id(RateReq),
AccountRatedeck = kz_services_ratedecks:name(AccountId),
RatedeckName = kzd_rates:ratedeck_id(Rate),
AccountRatedeck =:= RatedeckName;
matching_rate(Rate, <<"reseller">>, RateReq) ->
AccountId = kapi_rate:account_id(RateReq),
ResellerId = kz_services_reseller:get_id(AccountId),
RateAccountId = kzd_rates:account_id(Rate),
RateAccountId =:= ResellerId;
matching_rate(Rate, <<"version">>, _RateReq) ->
kzd_rates:rate_version(Rate) =:= hotornot_config:rate_version();
matching_rate(_Rate, _FilterType, _RateReq) -> 'false'.
Return true if RateA has lower weight than
-spec sort_rate_by_weight(kzd_rates:doc(), kzd_rates:doc()) -> boolean().
sort_rate_by_weight(RateA, RateB) ->
PrefixA = byte_size(kz_term:to_binary(kzd_rates:prefix(RateA))),
PrefixB = byte_size(kz_term:to_binary(kzd_rates:prefix(RateB))),
case PrefixA =:= PrefixB of
'true' ->
kzd_rates:weight(RateA, 100) < kzd_rates:weight(RateB, 100);
'false' ->
PrefixA > PrefixB
end.
-spec sort_rate_by_cost(kzd_rates:doc(), kzd_rates:doc()) -> boolean().
sort_rate_by_cost(RateA, RateB) ->
PrefixA = byte_size(kz_term:to_binary(kzd_rates:prefix(RateA))),
PrefixB = byte_size(kz_term:to_binary(kzd_rates:prefix(RateB))),
case PrefixA =:= PrefixB of
'true' ->
kzd_rates:rate_cost(RateA, 0.0) > kzd_rates:rate_cost(RateB, 0.0);
'false' ->
PrefixA > PrefixB
end.
All Route options must exist in a carrier 's options to keep the carrier
-spec options_match(trunking_options(), trunking_options()) -> boolean().
options_match([], []) -> 'true';
options_match([], _) -> 'true';
options_match(RateOptions, RouteOptions) ->
lists:all(fun(RouteOption) ->
props:get_value(RouteOption, RateOptions, 'false') =/= 'false'
end
,RouteOptions
).
-spec maybe_add_resource_flag(kapi_rate:req(), kz_term:ne_binary()) -> kz_term:ne_binaries().
maybe_add_resource_flag(RateReq, AccountId) ->
case hotornot_config:should_account_filter_by_resource(AccountId) of
'true' ->
case kapi_rate:resource_id(RateReq) of
'undefined' -> [];
ResourceId -> [ResourceId]
end;
'false' -> []
end.
|
fcb733c5a5604233de948ce69ef4c669cd15039c4eb1183afe46a3795887ef69 | gsakkas/rite | 2577.ml |
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a (b - 1) (a :: acc) else acc in
helper x n [];;
let padZero l1 l2 =
let l1_len = List.length l1 in
let l2_len = List.length l2 in
let l_diff = l1_len - l2_len in
if l_diff < 0
then (((clone 0 (l_diff * (-1))) @ l1), l2)
else (l1, ((clone 0 l_diff) @ l2));;
let rec removeZero l =
match l with | [] -> [] | h::t -> if h = 0 then removeZero t else h :: t;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x = (x + x) :: a in
let base = [] in
let args = (l1, l2) in let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a ( b - 1 ) ( a : : acc ) else acc in
helper x n [ ] ; ;
let =
let l1_len = l1 in
let l2_len = l2 in
let in
if < 0
then ( ( ( clone 0 ( * ( -1 ) ) ) @ l1 ) , l2 )
else ( l1 , ( ( clone 0 ) @ l2 ) ) ; ;
let rec removeZero l =
match l with | [ ] - > [ ] | h::t - > if h = 0 then removeZero t else h : : t ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x = ( [ x + 1 ] , [ x + 1 ] ) in
let base = ( [ ] , [ ] ) in
let args = l1 in let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a (b - 1) (a :: acc) else acc in
helper x n [];;
let padZero l1 l2 =
let l1_len = List.length l1 in
let l2_len = List.length l2 in
let l_diff = l1_len - l2_len in
if l_diff < 0
then (((clone 0 (l_diff * (-1))) @ l1), l2)
else (l1, ((clone 0 l_diff) @ l2));;
let rec removeZero l =
match l with | [] -> [] | h::t -> if h = 0 then removeZero t else h :: t;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x = ([x + 1], [x + 1]) in
let base = ([], []) in
let args = l1 in let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 19,17)-(19,29 )
( [ x + 1 ] , [ x + 1 ] )
TupleG [ ListG [ EmptyG],ListG [ EmptyG ] ]
( 20,16)-(20,18 )
( [ ] , [ ] )
TupleG [ ListG [ ] , ListG [ ] ]
( 21,16)-(21,24 )
l1
VarG
(19,17)-(19,29)
([x + 1] , [x + 1])
TupleG [ListG [EmptyG],ListG [EmptyG]]
(20,16)-(20,18)
([] , [])
TupleG [ListG [],ListG []]
(21,16)-(21,24)
l1
VarG
*)
type error slice
( 21,5)-(21,75 )
( 21,16)-(21,24 )
( 21,42)-(21,56 )
( 21,42)-(21,68 )
( 21,64)-(21,68 )
(21,5)-(21,75)
(21,16)-(21,24)
(21,42)-(21,56)
(21,42)-(21,68)
(21,64)-(21,68)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14_min/2577.ml | ocaml |
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a (b - 1) (a :: acc) else acc in
helper x n [];;
let padZero l1 l2 =
let l1_len = List.length l1 in
let l2_len = List.length l2 in
let l_diff = l1_len - l2_len in
if l_diff < 0
then (((clone 0 (l_diff * (-1))) @ l1), l2)
else (l1, ((clone 0 l_diff) @ l2));;
let rec removeZero l =
match l with | [] -> [] | h::t -> if h = 0 then removeZero t else h :: t;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x = (x + x) :: a in
let base = [] in
let args = (l1, l2) in let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a ( b - 1 ) ( a : : acc ) else acc in
helper x n [ ] ; ;
let =
let l1_len = l1 in
let l2_len = l2 in
let in
if < 0
then ( ( ( clone 0 ( * ( -1 ) ) ) @ l1 ) , l2 )
else ( l1 , ( ( clone 0 ) @ l2 ) ) ; ;
let rec removeZero l =
match l with | [ ] - > [ ] | h::t - > if h = 0 then removeZero t else h : : t ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x = ( [ x + 1 ] , [ x + 1 ] ) in
let base = ( [ ] , [ ] ) in
let args = l1 in let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n =
let rec helper a b acc = if b > 0 then helper a (b - 1) (a :: acc) else acc in
helper x n [];;
let padZero l1 l2 =
let l1_len = List.length l1 in
let l2_len = List.length l2 in
let l_diff = l1_len - l2_len in
if l_diff < 0
then (((clone 0 (l_diff * (-1))) @ l1), l2)
else (l1, ((clone 0 l_diff) @ l2));;
let rec removeZero l =
match l with | [] -> [] | h::t -> if h = 0 then removeZero t else h :: t;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x = ([x + 1], [x + 1]) in
let base = ([], []) in
let args = l1 in let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 19,17)-(19,29 )
( [ x + 1 ] , [ x + 1 ] )
TupleG [ ListG [ EmptyG],ListG [ EmptyG ] ]
( 20,16)-(20,18 )
( [ ] , [ ] )
TupleG [ ListG [ ] , ListG [ ] ]
( 21,16)-(21,24 )
l1
VarG
(19,17)-(19,29)
([x + 1] , [x + 1])
TupleG [ListG [EmptyG],ListG [EmptyG]]
(20,16)-(20,18)
([] , [])
TupleG [ListG [],ListG []]
(21,16)-(21,24)
l1
VarG
*)
type error slice
( 21,5)-(21,75 )
( 21,16)-(21,24 )
( 21,42)-(21,56 )
( 21,42)-(21,68 )
( 21,64)-(21,68 )
(21,5)-(21,75)
(21,16)-(21,24)
(21,42)-(21,56)
(21,42)-(21,68)
(21,64)-(21,68)
*)
|
|
b5e4fb1af2005b15b3148b41103388d808856c6b6ce50cfa6b215222fa66fb70 | madvas/emojillionaire | contact_page.cljs | (ns emojillionaire.components.contact-page
(:require
[cljs-react-material-ui.reagent :as ui]
[emojillionaire.components.emoji :refer [emoji]]
[emojillionaire.components.layout :refer [outer-paper row col headline]]
[emojillionaire.utils :as u]))
(defn contact-page []
[outer-paper
[headline "Contact" :love-letter] [:br]
[:h3 "If you found a bug, please try to open issue on "
[u/new-window-link "" "Github"]
", or you can contact me on Twitter " [u/new-window-link "" "@matuslestan"]]])
| null | https://raw.githubusercontent.com/madvas/emojillionaire/ee47e874db0c88b91985b6f9e72221f12d3010ff/src/cljs/emojillionaire/components/contact_page.cljs | clojure | (ns emojillionaire.components.contact-page
(:require
[cljs-react-material-ui.reagent :as ui]
[emojillionaire.components.emoji :refer [emoji]]
[emojillionaire.components.layout :refer [outer-paper row col headline]]
[emojillionaire.utils :as u]))
(defn contact-page []
[outer-paper
[headline "Contact" :love-letter] [:br]
[:h3 "If you found a bug, please try to open issue on "
[u/new-window-link "" "Github"]
", or you can contact me on Twitter " [u/new-window-link "" "@matuslestan"]]])
|
|
869fa72c3f311a3e0f361b4a1be03ab67af774ca8acde494756c7a9be495e95c | cyverse-archive/DiscoveryEnvironmentBackend | callbacks.clj | (ns metadactyl.routes.callbacks
(:use [common-swagger-api.schema]
[metadactyl.routes.domain.callback]
[metadactyl.routes.params]
[ring.util.http-response :only [ok]])
(:require [metadactyl.service.callbacks :as callbacks]))
(defroutes* callbacks
(POST* "/de-job" []
:body [body (describe DeJobStatusUpdate "The App to add.")]
:summary "Update the status of of a DE analysis."
:description "The jex-events service calls this endpoint when the status of a DE analysis
changes"
(ok (callbacks/update-de-job-status body)))
(POST* "/agave-job/:job-id" []
:path-params [job-id :- AnalysisIdPathParam]
:query [params AgaveJobStatusUpdate]
:summary "Update the status of an Agave analysis."
:description "The DE registers this endpoint as a callback when it submts jobs to Agave."
(ok (callbacks/update-agave-job-status job-id params))))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/metadactyl-clj/src/metadactyl/routes/callbacks.clj | clojure | (ns metadactyl.routes.callbacks
(:use [common-swagger-api.schema]
[metadactyl.routes.domain.callback]
[metadactyl.routes.params]
[ring.util.http-response :only [ok]])
(:require [metadactyl.service.callbacks :as callbacks]))
(defroutes* callbacks
(POST* "/de-job" []
:body [body (describe DeJobStatusUpdate "The App to add.")]
:summary "Update the status of of a DE analysis."
:description "The jex-events service calls this endpoint when the status of a DE analysis
changes"
(ok (callbacks/update-de-job-status body)))
(POST* "/agave-job/:job-id" []
:path-params [job-id :- AnalysisIdPathParam]
:query [params AgaveJobStatusUpdate]
:summary "Update the status of an Agave analysis."
:description "The DE registers this endpoint as a callback when it submts jobs to Agave."
(ok (callbacks/update-agave-job-status job-id params))))
|
|
7c6d0c7d0e18bbcfd052614922ecac8f955e5779576058a92913df6a92a5dc84 | WhatsApp/erlt | erlt_enum.erl | Copyright Ericsson AB 1996 - 2020 . All Rights Reserved .
Copyright ( c ) 2020 Facebook , Inc. and its affiliates .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(erlt_enum).
%% The skeleton for this module is erl_id_trans.
This module only traverses legal Erlang code . This is most noticeable
%% in guards where only a limited number of expressions are allowed.
%% N.B. if this module is to be used as a basis for transforms then
%% all the error cases must be handled otherwise this module just crashes!
-export([module/2, local_rewriter/1]).
-record(context, {
module :: atom(),
enums = #{},
defs_db :: erlt_defs:def_db()
}).
module(Forms, DefDb) ->
Context = init_context(Forms, DefDb),
erlt_ast:prewalk(Forms, fun(Node, Ctx) -> rewrite(Node, Context, Ctx) end).
local_rewriter(Forms) ->
Context = init_context(Forms, erlt_defs:new()),
fun(Expr) ->
rewrite_local(Expr, Context)
end.
init_context(Forms, DefsDb) ->
[Module] = [M || {attribute, _, module, M} <- Forms],
Enums = [E || {attribute, _, enum, E} <- Forms],
#context{
module = Module,
enums = init_enums(Enums, Module),
defs_db = DefsDb
}.
init_enums(Defs, Module) ->
Map = [{Name, enum_info(Module, Type)} || {Name, Type, _Args} <- Defs],
maps:from_list(Map).
enum_info(Mod, {type, _, enum, {atom, _, Name}, Variants}) ->
VariantsMap = [variant_info(Mod, Name, Variant) || Variant <- Variants],
maps:from_list(VariantsMap).
variant_info(Mod, Enum, {variant, _, {atom, Line, Tag}, Fields}) ->
String = "$#" ++ atom_to_list(Mod) ++ ":" ++ atom_to_list(Enum) ++ "." ++ atom_to_list(Tag),
RuntimeTag = list_to_atom(String),
Anno = erl_anno:set_generated(true, Line),
{Tag, {{atom, Anno, RuntimeTag}, fields_map(Fields)}}.
fields_map(none) ->
none;
fields_map([{field_definition, _, {atom, _, Name}, Default, _Type} | Rest]) ->
[{Name, Default} | fields_map(Rest)];
fields_map([{field_definition, _, positional, undefined, _Type} | Rest]) ->
[positional | fields_map(Rest)];
fields_map([]) ->
[].
rewrite({attribute, Line, enum, {Name, Type, Vars}}, Context, form) ->
Union = build_type_union(Type, Context),
{attribute, Line, type, {Name, Union, Vars}};
rewrite({enum, Line, Name, Variant, Fields}, Context, pattern) ->
{RuntimeTag, Def} = get_definition(Name, Variant, Context),
Pattern = variant_pattern(Fields, Def),
{tuple, Line, [RuntimeTag | Pattern]};
rewrite({enum, Line, Name, Variant, Fields}, Context, _Ctx) ->
{RuntimeTag, Def} = get_definition(Name, Variant, Context),
Constructor = variant_init(Fields, Def),
{tuple, Line, [RuntimeTag | Constructor]};
rewrite(Other, _Context, _Ctx) ->
Other.
rewrite_local({enum, Line, {atom, _, Name}, {atom, _, Variant}, Fields}, Context) ->
{RuntimeTag, Def} = map_get(Variant, map_get(Name, Context#context.enums)),
Constructor = variant_init(Fields, Def),
{tuple, Line, [RuntimeTag | Constructor]}.
get_definition({atom, _, Name}, {atom, _, Variant}, Context) ->
map_get(Variant, map_get(Name, Context#context.enums));
get_definition({remote, _, {atom, _, Module}, {atom, _, Name}}, {atom, _, Variant}, Context) ->
map_get(Variant, get_remote_definition(Module, Name, Context)).
get_remote_definition(Module, Name, Context) ->
{ok, {attribute, _, _, {_, Type, _}}} =
erlt_defs:find_enum(Module, Name, Context#context.defs_db),
enum_info(Module, Type).
variant_init(none, none) ->
[];
variant_init(Fields, Defs) ->
Fun = fun
(positional, [{field, _, positional, Expr} | Rest]) ->
{Expr, Rest};
({Name, Default}, LabelledFields) ->
case find_field(Name, LabelledFields) of
{field, _, _, Value} -> {Value, LabelledFields};
error when Default =/= undefined -> {Default, LabelledFields}
end
end,
element(1, lists:mapfoldl(Fun, Fields, Defs)).
variant_pattern(none, none) ->
[];
variant_pattern(Fields, Defs) ->
Fun = fun
(positional, [{field, _, positional, Expr} | Rest]) ->
{Expr, Rest};
({Name, _Default}, LabelledFields) ->
case find_field(Name, LabelledFields) of
{field, _, _, Value} ->
{Value, LabelledFields};
error ->
Anno = erl_anno:set_generated(true, erl_anno:new(1)),
{{var, Anno, '_'}, LabelledFields}
end
end,
element(1, lists:mapfoldl(Fun, Fields, Defs)).
find_field(Name, [{field, _, {atom, _, Name}, _} = Field | _]) ->
Field;
find_field(Name, [_ | Rest]) ->
find_field(Name, Rest);
find_field(_Name, []) ->
error.
build_type_union({type, Line, enum, {atom, _, Name}, Variants}, Context) ->
Defs = map_get(Name, Context#context.enums),
{type, Line, union, [variant_type(Variant, Defs) || Variant <- Variants]}.
variant_type({variant, Line, {atom, _, Name}, none}, Defs) ->
{RuntimeTag, none} = map_get(Name, Defs),
{type, Line, tuple, [RuntimeTag]};
variant_type({variant, Line, {atom, _, Name}, Fields}, Defs) ->
{RuntimeTag, _} = map_get(Name, Defs),
FieldTypes = [Type || {field_definition, _, _Name, _Default, Type} <- Fields],
{type, Line, tuple, [RuntimeTag | FieldTypes]}.
| null | https://raw.githubusercontent.com/WhatsApp/erlt/616a4adc628ca8754112e659701e57f1cd7fecd1/erltc/src/erlt_enum.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The skeleton for this module is erl_id_trans.
in guards where only a limited number of expressions are allowed.
N.B. if this module is to be used as a basis for transforms then
all the error cases must be handled otherwise this module just crashes! | Copyright Ericsson AB 1996 - 2020 . All Rights Reserved .
Copyright ( c ) 2020 Facebook , Inc. and its affiliates .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(erlt_enum).
This module only traverses legal Erlang code . This is most noticeable
-export([module/2, local_rewriter/1]).
-record(context, {
module :: atom(),
enums = #{},
defs_db :: erlt_defs:def_db()
}).
module(Forms, DefDb) ->
Context = init_context(Forms, DefDb),
erlt_ast:prewalk(Forms, fun(Node, Ctx) -> rewrite(Node, Context, Ctx) end).
local_rewriter(Forms) ->
Context = init_context(Forms, erlt_defs:new()),
fun(Expr) ->
rewrite_local(Expr, Context)
end.
init_context(Forms, DefsDb) ->
[Module] = [M || {attribute, _, module, M} <- Forms],
Enums = [E || {attribute, _, enum, E} <- Forms],
#context{
module = Module,
enums = init_enums(Enums, Module),
defs_db = DefsDb
}.
init_enums(Defs, Module) ->
Map = [{Name, enum_info(Module, Type)} || {Name, Type, _Args} <- Defs],
maps:from_list(Map).
enum_info(Mod, {type, _, enum, {atom, _, Name}, Variants}) ->
VariantsMap = [variant_info(Mod, Name, Variant) || Variant <- Variants],
maps:from_list(VariantsMap).
variant_info(Mod, Enum, {variant, _, {atom, Line, Tag}, Fields}) ->
String = "$#" ++ atom_to_list(Mod) ++ ":" ++ atom_to_list(Enum) ++ "." ++ atom_to_list(Tag),
RuntimeTag = list_to_atom(String),
Anno = erl_anno:set_generated(true, Line),
{Tag, {{atom, Anno, RuntimeTag}, fields_map(Fields)}}.
fields_map(none) ->
none;
fields_map([{field_definition, _, {atom, _, Name}, Default, _Type} | Rest]) ->
[{Name, Default} | fields_map(Rest)];
fields_map([{field_definition, _, positional, undefined, _Type} | Rest]) ->
[positional | fields_map(Rest)];
fields_map([]) ->
[].
rewrite({attribute, Line, enum, {Name, Type, Vars}}, Context, form) ->
Union = build_type_union(Type, Context),
{attribute, Line, type, {Name, Union, Vars}};
rewrite({enum, Line, Name, Variant, Fields}, Context, pattern) ->
{RuntimeTag, Def} = get_definition(Name, Variant, Context),
Pattern = variant_pattern(Fields, Def),
{tuple, Line, [RuntimeTag | Pattern]};
rewrite({enum, Line, Name, Variant, Fields}, Context, _Ctx) ->
{RuntimeTag, Def} = get_definition(Name, Variant, Context),
Constructor = variant_init(Fields, Def),
{tuple, Line, [RuntimeTag | Constructor]};
rewrite(Other, _Context, _Ctx) ->
Other.
rewrite_local({enum, Line, {atom, _, Name}, {atom, _, Variant}, Fields}, Context) ->
{RuntimeTag, Def} = map_get(Variant, map_get(Name, Context#context.enums)),
Constructor = variant_init(Fields, Def),
{tuple, Line, [RuntimeTag | Constructor]}.
get_definition({atom, _, Name}, {atom, _, Variant}, Context) ->
map_get(Variant, map_get(Name, Context#context.enums));
get_definition({remote, _, {atom, _, Module}, {atom, _, Name}}, {atom, _, Variant}, Context) ->
map_get(Variant, get_remote_definition(Module, Name, Context)).
get_remote_definition(Module, Name, Context) ->
{ok, {attribute, _, _, {_, Type, _}}} =
erlt_defs:find_enum(Module, Name, Context#context.defs_db),
enum_info(Module, Type).
variant_init(none, none) ->
[];
variant_init(Fields, Defs) ->
Fun = fun
(positional, [{field, _, positional, Expr} | Rest]) ->
{Expr, Rest};
({Name, Default}, LabelledFields) ->
case find_field(Name, LabelledFields) of
{field, _, _, Value} -> {Value, LabelledFields};
error when Default =/= undefined -> {Default, LabelledFields}
end
end,
element(1, lists:mapfoldl(Fun, Fields, Defs)).
variant_pattern(none, none) ->
[];
variant_pattern(Fields, Defs) ->
Fun = fun
(positional, [{field, _, positional, Expr} | Rest]) ->
{Expr, Rest};
({Name, _Default}, LabelledFields) ->
case find_field(Name, LabelledFields) of
{field, _, _, Value} ->
{Value, LabelledFields};
error ->
Anno = erl_anno:set_generated(true, erl_anno:new(1)),
{{var, Anno, '_'}, LabelledFields}
end
end,
element(1, lists:mapfoldl(Fun, Fields, Defs)).
find_field(Name, [{field, _, {atom, _, Name}, _} = Field | _]) ->
Field;
find_field(Name, [_ | Rest]) ->
find_field(Name, Rest);
find_field(_Name, []) ->
error.
build_type_union({type, Line, enum, {atom, _, Name}, Variants}, Context) ->
Defs = map_get(Name, Context#context.enums),
{type, Line, union, [variant_type(Variant, Defs) || Variant <- Variants]}.
variant_type({variant, Line, {atom, _, Name}, none}, Defs) ->
{RuntimeTag, none} = map_get(Name, Defs),
{type, Line, tuple, [RuntimeTag]};
variant_type({variant, Line, {atom, _, Name}, Fields}, Defs) ->
{RuntimeTag, _} = map_get(Name, Defs),
FieldTypes = [Type || {field_definition, _, _Name, _Default, Type} <- Fields],
{type, Line, tuple, [RuntimeTag | FieldTypes]}.
|
adcbc8e7489109b4af8bef677054e654e23b69b4160fff70aa1f462f9c3f0227 | jaspervdj/fugacious | MailQueue.hs | {-# LANGUAGE OverloadedStrings #-}
module Fugacious.MailQueue
( DeliverMail
, Config (..)
, Handle (..)
, withHandle
, withHandles
) where
import qualified Data.Aeson as A
import qualified Fugacious.Logger as Logger
import qualified Fugacious.MailQueue.Amazon as Amazon
import Fugacious.MailQueue.Internal
import qualified Fugacious.MailQueue.Spool as Spool
data Config
= AmazonConfig Amazon.Config
| SpoolConfig Spool.Config
instance A.FromJSON Config where
parseJSON = A.withObject "FromJSON Fugacious.MailQueue" $ \o -> do
ty <- o A..: "type"
case ty of
"amazon" -> AmazonConfig <$> A.parseJSON (A.Object o)
"spool" -> SpoolConfig <$> A.parseJSON (A.Object o)
_ -> fail $ "Unknown MailQueue type: " ++ ty
withHandle :: Config -> Logger.Handle -> (Handle -> IO a) -> IO a
withHandle (AmazonConfig c) logger = Amazon.withHandle c logger
withHandle (SpoolConfig c) _logger = Spool.withHandle c
withHandles :: [Config] -> Logger.Handle -> ([Handle] -> IO a) -> IO a
withHandles [] _logger f = f []
withHandles (c : cs) logger f =
withHandle c logger $ \h ->
withHandles cs logger $ \hs ->
f (h : hs)
| null | https://raw.githubusercontent.com/jaspervdj/fugacious/4e9c2d48174c852616fbfbf28bd9cc90812a1c95/lib/Fugacious/MailQueue.hs | haskell | # LANGUAGE OverloadedStrings # | module Fugacious.MailQueue
( DeliverMail
, Config (..)
, Handle (..)
, withHandle
, withHandles
) where
import qualified Data.Aeson as A
import qualified Fugacious.Logger as Logger
import qualified Fugacious.MailQueue.Amazon as Amazon
import Fugacious.MailQueue.Internal
import qualified Fugacious.MailQueue.Spool as Spool
data Config
= AmazonConfig Amazon.Config
| SpoolConfig Spool.Config
instance A.FromJSON Config where
parseJSON = A.withObject "FromJSON Fugacious.MailQueue" $ \o -> do
ty <- o A..: "type"
case ty of
"amazon" -> AmazonConfig <$> A.parseJSON (A.Object o)
"spool" -> SpoolConfig <$> A.parseJSON (A.Object o)
_ -> fail $ "Unknown MailQueue type: " ++ ty
withHandle :: Config -> Logger.Handle -> (Handle -> IO a) -> IO a
withHandle (AmazonConfig c) logger = Amazon.withHandle c logger
withHandle (SpoolConfig c) _logger = Spool.withHandle c
withHandles :: [Config] -> Logger.Handle -> ([Handle] -> IO a) -> IO a
withHandles [] _logger f = f []
withHandles (c : cs) logger f =
withHandle c logger $ \h ->
withHandles cs logger $ \hs ->
f (h : hs)
|
98848aa689794403e5c2b93206768184dea1b6d6396fba0db563e2c6f92d0c07 | ocaml-ppx/ocamlformat | indent_empty.ml | module M = struct
let f =
end
let g =
fun x -> 3 + 4 *
| null | https://raw.githubusercontent.com/ocaml-ppx/ocamlformat/3d1c992240f7d30bcb8151285274f44619dae197/test/failing/tests/indent_empty.ml | ocaml | module M = struct
let f =
end
let g =
fun x -> 3 + 4 *
|
|
85b94c6f8840e44926a371a4432feb277bd42ad6264e63bbbc6c3c1bc0f7cc18 | MarchLiu/market | local_remote_test.clj | (ns liu.mars.market.local-remote-test
"run with local profile, place run `lein with-profile +local run` prepare the server in local"
(:require [clojure.test :refer :all])
(:require [clojure.java.jdbc :as j])
(:require [liu.mars.market.test-data :as data])
(:require [liu.mars.market.config :as cfg])
(:require [liu.mars.market.order :as o])
(:import (akka.actor ActorSystem)
(akka.testkit.javadsl TestKit)
(liu.mars.market.messages FindOrder LimitAsk LimitBid MarketAsk MarketBid NextOrder Cancel)
(java.util.function Supplier Function)))
(testing "tests for place and find actions by actor"
(let [system (ActorSystem/create "test")
test-kit (TestKit. system)
await #(.awaitCond test-kit (reify Supplier (get [this] (.msgAvailable test-kit))))
self (.getRef test-kit)
host "192.168.50.22"
place-actor (.actorSelection system (str "akka.tcp@" host ":25530/user/place"))
peek-actor (.actorSelection system (str "akka.tcp@" host ":25530/user/peek"))]
(doseq [item (:limit-ask data/note-paper)]
(let [post (doto (LimitAsk.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitAsk msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:limit-bid data/note-paper)]
(let [post (doto (LimitBid.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitBid msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:market-ask data/note-paper)]
(let [post (doto (MarketAsk.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketAsk msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:market-bid data/note-paper)]
(let [post (doto (MarketBid.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketBid msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:cancel data/note-paper)]
(let [post (doto (Cancel.)
(.setAccountId (:account-id item))
(.setSymbol (:symbol item))
(.setOrderId (:order-id item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get cancel order as save"
(reify Function
(apply [this msg]
(is (instance? Cancel msg))
(is (= (.getId get) (.getId msg)))
(is (= (:order-id item) (.getOrderId msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:limit-ask data/note-paper)]
(let [post (doto (LimitAsk.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitAsk msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:limit-bid data/note-paper)]
(let [post (doto (LimitBid.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitBid msg))
(is (= (inc (.getPositionId get)) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:market-ask data/note-paper)]
(let [post (doto (MarketAsk.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketAsk msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:market-bid data/note-paper)]
(let [post (doto (MarketBid.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketBid msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:cancel data/note-paper)]
(let [post (doto (Cancel.)
(.setAccountId (:account-id item))
(.setSymbol (:symbol item))
(.setOrderId (:order-id item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get cancel order as save"
(reify Function
(apply [this msg]
(is (instance? Cancel msg))
(is (= (inc (.getPositionId get)) (.getId msg)))
(is (= (:order-id item) (.getOrderId msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(TestKit/shutdownActorSystem system)))
| null | https://raw.githubusercontent.com/MarchLiu/market/7a7daf6c04b41e0f8494be6740da8d54785c5e77/counter/src/test/clojure/liu/mars/market/local_remote_test.clj | clojure | (ns liu.mars.market.local-remote-test
"run with local profile, place run `lein with-profile +local run` prepare the server in local"
(:require [clojure.test :refer :all])
(:require [clojure.java.jdbc :as j])
(:require [liu.mars.market.test-data :as data])
(:require [liu.mars.market.config :as cfg])
(:require [liu.mars.market.order :as o])
(:import (akka.actor ActorSystem)
(akka.testkit.javadsl TestKit)
(liu.mars.market.messages FindOrder LimitAsk LimitBid MarketAsk MarketBid NextOrder Cancel)
(java.util.function Supplier Function)))
(testing "tests for place and find actions by actor"
(let [system (ActorSystem/create "test")
test-kit (TestKit. system)
await #(.awaitCond test-kit (reify Supplier (get [this] (.msgAvailable test-kit))))
self (.getRef test-kit)
host "192.168.50.22"
place-actor (.actorSelection system (str "akka.tcp@" host ":25530/user/place"))
peek-actor (.actorSelection system (str "akka.tcp@" host ":25530/user/peek"))]
(doseq [item (:limit-ask data/note-paper)]
(let [post (doto (LimitAsk.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitAsk msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:limit-bid data/note-paper)]
(let [post (doto (LimitBid.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitBid msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:market-ask data/note-paper)]
(let [post (doto (MarketAsk.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketAsk msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:market-bid data/note-paper)]
(let [post (doto (MarketBid.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketBid msg))
(is (= (.getId get) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:cancel data/note-paper)]
(let [post (doto (Cancel.)
(.setAccountId (:account-id item))
(.setSymbol (:symbol item))
(.setOrderId (:order-id item)))
get (FindOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setId get msg))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get cancel order as save"
(reify Function
(apply [this msg]
(is (instance? Cancel msg))
(is (= (.getId get) (.getId msg)))
(is (= (:order-id item) (.getOrderId msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:limit-ask data/note-paper)]
(let [post (doto (LimitAsk.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitAsk msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:limit-bid data/note-paper)]
(let [post (doto (LimitBid.)
(.setPrice (:price item))
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? LimitBid msg))
(is (= (inc (.getPositionId get)) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg))
(is (= (:price item) (.getPrice msg))))))))
(doseq [item (:market-ask data/note-paper)]
(let [post (doto (MarketAsk.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketAsk msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:market-bid data/note-paper)]
(let [post (doto (MarketBid.)
(.setQuantity (:quantity item))
(.setAccountId (:account-id item))
(.setSymbol (:symbol item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get limit ask order as save"
(reify Function
(apply [this msg]
(is (instance? MarketBid msg))
(is (= (+ (.getPositionId get) 1) (.getId msg)))
(is (= 0 (.getCompleted msg)))
(is (= (:quantity item) (.getQuantity msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(doseq [item (:cancel data/note-paper)]
(let [post (doto (Cancel.)
(.setAccountId (:account-id item))
(.setSymbol (:symbol item))
(.setOrderId (:order-id item)))
get (NextOrder.)]
(.tell place-actor post self)
(await)
(.expectMsgPF test-kit "should get new order id from place actor"
(reify Function
(apply [this msg]
(is (instance? Long msg))
(.setPositionId get (- msg 1)))))
(.tell peek-actor get self)
(await)
(.expectMsgPF test-kit "should get cancel order as save"
(reify Function
(apply [this msg]
(is (instance? Cancel msg))
(is (= (inc (.getPositionId get)) (.getId msg)))
(is (= (:order-id item) (.getOrderId msg)))
(is (= (:account-id item) (.getAccountId msg)))
(is (= (:symbol item)) (.getSymbol msg)))))))
(TestKit/shutdownActorSystem system)))
|
|
fac6e608efabcdeab2039b981e1e4b8b16cf8c1be08df4bbde5a2cd7c43a823a | archaelus/erms | erms_test_esme.erl | Copyright ( C ) 2004 Peña < >
%%%
%%% This library is free software; you can redistribute it and/or
%%% modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
%%%
%%% This library is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% Lesser General Public License for more details.
%%%
You should have received a copy of the GNU Lesser General Public
%%% License along with this library; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
%%% @doc Submit SM example.
%%%
< p > A very simple ESME for submitting short messages.</p >
%%%
2004
@author < >
%%% [/]
@version 1.1 , { 09 Feb 2004 } { @time } .
%%% @end
-module(erms_test_esme).
-behaviour(gen_esme).
%%%-------------------------------------------------------------------
%%% Include files
%%%-------------------------------------------------------------------
-include_lib("eunit/include/eunit.hrl").
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("oserl.hrl").
-include_lib("smpp_base.hrl").
%%%-------------------------------------------------------------------
%%% External exports
%%%-------------------------------------------------------------------
-export([shell_start/2,
connect/2,
submit_msg/2,
stop/0,
test_batch/2]).
%%%-------------------------------------------------------------------
%%% Internal ESME exports
%%%-------------------------------------------------------------------
-export([init/1,
handle_outbind/3,
handle_alert_notification/2,
handle_enquire_link_failure/2,
handle_operation/3,
handle_unbind/3,
handle_listen_error/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%%%-------------------------------------------------------------------
Macros
%%%-------------------------------------------------------------------
-define(SERVER, ?MODULE).
%%%-------------------------------------------------------------------
Records
%%%-------------------------------------------------------------------
%% %@spec {state, tx_session, host, port, system, password}
%%
%% %@doc Representation of the server's state
%%
%% <dl>
< dt > TxSession : < /dt><dd > Pid of the transmitter session.</dd >
%% <dt>Host: </dt><dd>Hostname of the SMSC.</dd>
%% <dt>Port: </dt><dd>Tx port of the SMSC.</dd>
< dt > System : < /dt><dd > SMPP system i d string.</dd >
< dt > Password : < /dt><dd > SMPP password string.</dd >
%% </dl>
%% %@end
-record(state, {tx_session, host, port, system, password}).
%-record(msg, {id,
% from,
% to,
% text}).
%%%===================================================================
%%% External functions
%%%===================================================================
, Pass ) - > { ok , pid ( ) } | { error , term ( ) } | ignore
%% System = string()
%% Pass = string()
%% @doc Starts an unlinked esme process that will be used to talk to
the SMSC using the System / Pass login / password . The
ESME will register itself as erms_test_esme to make other
%% operations easier.
%% @end
shell_start(System, Pass) ->
gen_esme:start({local, ?SERVER}, ?MODULE,
[System, Pass],
[]).
%% @spec connect(Host::term(), Port::integer()) -> Response
%% Response = {ok, Pdu::list()} | {connect_error, Error::term()}
@doc Connects the test ESME to the SMSC at . Returns the
decoded SMPP PDU of the bind_tx command on success and a failure
%% reason if not.
%% @end
connect(Host, Port) ->
case gen_esme:session_start(?SERVER, Host, Port) of
{ok, Tx} ->
gen_esme:call(?SERVER, {new_session, Tx});
Error ->
{cannot_connect, Error}
end.
@spec submit_msg(Message::#msg { } , Options::list ( ) ) - > ok
@doc Submits an erms # msg { } with PDU options Options .
%% @end
submit_msg(Msg = #msg{}, Options) when is_list(Options) ->
{ok, Session} = gen_esme:call(?SERVER, tx_session, timer:seconds(5)),
ParamList = erms_smpp:msg_to_pdu(Msg, Options),
gen_esme_session:submit_sm(Session, ParamList).
%% @spec test_batch(Messages, Count::integer()) -> Errors
%% Messages = #msg{} | list(#msg{})
%% Errors = list({MessageNumber::integer(), Message::#msg{}, Error::term})
%% @doc Sends Count messages (from the list Messages) as fast as
%% possible and reports any submission errors.
%% @end
test_batch(Msg = #msg{}, Count)->
test_batch([Msg], Count);
test_batch(Msgs, Count) when is_integer(Count), is_list(Msgs) ->
test_batch(Msgs, 1, Count, []).
@private
test_batch(_, Iter, Max, Errors) when Iter >= Max ->
Errors;
test_batch([Msg|Msgs], Iter, Max, Errors) ->
RotatedMsgs = lists:append(Msgs, [Msg]),
case submit_msg(Msg, []) of
{ok, _Pdu} ->
test_batch(RotatedMsgs, Iter + 1, Max, Errors);
Error ->
test_batch(RotatedMsgs, Iter + 1, Max, [{Iter, Msg, Error}|Errors])
end.
stop ( ) - > ok
%%
@doc Stops the ESME server .
%%
%% @see handle_call/3
%%
@equiv gen_esme : call(SERVER , die , 10000 )
%% @end
stop() ->
gen_esme:call(?SERVER, die, 10000).
%%%===================================================================
%%% Server functions
%%%===================================================================
@private
) - > Result
= term ( )
Result = { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
State = term ( )
%% Timeout = int() | infinity
term ( )
%%
%% @doc <a href="#init-1">
%% gen_esme - init/1</a> callback implementation.
%%
%% <p>Initiates the server.</p>
%% @end
init([Host, Port, System, Password]) ->
{ok, #state{host=Host, port=Port, system=System, password=Password}};
init([System, Password]) ->
{ok, #state{system=System, password=Password}}.
@private
@spec handle_outbind(Outbind , From , State ) - > Result
= { outbind , Session , Pdu , IpAddr }
%% Session = pid()
Pdu = pdu ( )
= { int ( ) , int ( ) , int ( ) , int ( ) }
%% From = term()
%% State = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
%% @doc <a href="#handle_outbind-3">gen_esme - handle_outbind/3</a> callback implementation.
%%
< p > Handle < i > oubind</i > requests from the peer >
%% @end
handle_outbind(R = {outbind, _Session, _Pdu, _IpAddr}, _From, State) ->
?WARN("Unexpected outbind request ~p", [R]),
{noreply, State}.
@private
, State ) - > Result
%% AlertNotification = {alert_notification, Session, Pdu}
%% Session = pid()
Pdu = pdu ( )
%% State = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
%% @doc <a href="#handle_alert_notification-3">gen_esme - handle_alert_notification/2</a> callback implementation.
%%
< p > Handle < i > alert_notification</i > requests from the peer >
%% @end
handle_alert_notification(R = {alert_notification, _Session, _Pdu}, State) ->
?WARN("Unexpected alert_notification request ~p", [R]),
{noreply, State}.
@private
, State ) - > Result
EnquireLinkFailure = { enquire_link_failure , Session , CommandStatus }
%% Session = pid()
CommandStatus = int ( )
%% State = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
%% @doc <a href="#handle_enquire_link_failure-2">gen_esme - handle_enquire_link_failure/2</a> callback implementation.
%%
%% <p>Notifies when an <i>enquire_link</i> failure occurs (i.e. the SMSC did
%% not respond to our <i>enquire_link</i> operation).</p>
%% @end
handle_enquire_link_failure(R = {enquire_link_failure, _Session, _Status}, State) ->
?WARN("Unexpected link failure enqiry request ~p", [R]),
{noreply, State}.
@private
@spec handle_operation(Operation , From , State ) - > Result
%% Operation = {deliver_sm, Session, Pdu} | {data_sm, Session, Pdu}
%% Session = pid()
Pdu = pdu ( )
%% From = term()
%% State = term()
%% Result = {reply, Reply, NewState} |
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
%% {stop, Reason, Reply, NewState} |
{ stop , , NewState }
%% Reply = {ok, ParamList} | {error, Error, ParamList}
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
%% @doc <a href="#handle_operation-3">gen_esme - handle_operation/3</a> callback implementation.
%%
%% <p>Handle <i>deliver_sm</i> and <i>data_sm</i> operations (from the peer
%% SMSCs) to the callback ESME.</p>
%%
%% <p>The <tt>ParamList</tt> included in the response is used to construct
the response PDU . If a command_status other than ESME_ROK is to
be returned by the ESME in the response PDU , the callback should return the
term < tt>{error , Error , ParamList}</tt > , where < tt > Error</tt > is the
desired command_status error code.</p >
%% @end
handle_operation(R = {_CmdName, _Session, _Pdu}, _From, S) ->
% Don't know how to handle CmdName
?WARN("Don't know how to handle operation ~p", [R]),
{reply, {error, ?ESME_RINVCMDID, []}, S}.
@private
, From , State ) - > Result
= { unbind , Session , Pdu }
%% Session = pid()
Pdu = pdu ( )
%% Result = {reply, Reply, NewState} |
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
%% {stop, Reason, Reply, NewState} |
{ stop , , NewState }
%% Reply = ok | {error, Error}
%% Error = int()
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
%% @doc <a href="#handle_unbind-3">gen_esme - handle_unbind/3</a> callback implementation.
%%
< p > Handle < i > unbind</i > requests from the peer >
%%
< p > If < tt > ok</tt > returned an unbind_resp with a ESME_ROK
command_status is sent to the MC and the session moves into the unbound
state . When < tt>{error , Error}</tt > is returned by the ESME , the
response PDU sent by the session to the MC will have an < tt > Error</tt >
command_status and the session will remain on it 's current bound state
( bound_rx , or >
%% @end
handle_unbind({unbind, _Session, _Pdu}, _From, State) ->
?INFO("Unbinding from smsc.", []),
{reply, ok, State}.
@private
handle_listen_error(State ) - > Result
%% State = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
%% Timeout = int()
%% Reason = term()
%%
@doc < a href=" / oserl / > callback implementation .
%%
%% <p>Handle listen failures.</p>
%% @end
handle_listen_error(State) ->
?INFO("Need to reconnect - listen error.", []),
{noreply, State}.
@private
, From , State ) - > Result
%% Request = term()
%% From = {pid(), Tag}
State = term ( )
%% Result = {reply, Reply, NewState} |
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , Reply , NewState } |
{ stop , , NewState }
%% Reply = term()
NewState = term ( )
Timeout = int ( ) | infinity
%% Reason = term()
%%
%% @doc <a href="#handle_call-3">gen_esme - handle_call/3</a> callback implementation.
%%
< p > Handling call >
%%
%% <ul>
< li > On < tt>{stop , , Reply , NewState}</tt >
%% terminate/2 is called</li>
< li > On < tt>{stop , , NewState}</tt >
%% terminate/2 is called</li>
%% </ul>
%%
%% @see terminate/2
%% @end
handle_call(tx_session, _From, S = #state{tx_session=Session}) ->
{reply, {ok, Session}, S};
handle_call({new_session, Tx}, _From, S = #state{system=System,password=Pass}) ->
ParamList = [{system_id, System},
{password, Pass}],
case gen_esme:bind_transmitter(Tx, ParamList) of
{ok, PduResp} ->
{reply, {ok, PduResp}, S#state{tx_session = Tx}};
BindError ->
{stop, BindError, S}
end;
handle_call({submit_msg, Msg, Options}, _From,
S = #state{tx_session=Session}) ->
ParamList = erms_smpp:msg_to_pdu(Msg, Options),
{reply,
gen_esme:submit_sm(Session, ParamList),
S};
handle_call(die, _From, State) ->
{stop, normal, ok, State}.
@private
, State ) - > Result
%% Request = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
Timeout = int ( ) | infinity
%% Reason = normal | term()
%%
%% @doc <a href="#handle_cast-2"> gen_esme - handle_cast/2</a> callback implementation.
%%
< p > Handling cast >
%%
%% <ul>
< li > On < tt>{stop , , State}</tt > terminate/2 is called.</li >
%% </ul>
%%
%% @see terminate/2
%% @end
handle_cast(connect, S = #state{tx_session=undefined, host=Host, port=Port}) ->
Pid = self(),
F = fun () ->
case gen_esme:session_start(Pid, Host, Port) of
{ok, Tx} ->
gen_esme:call(Pid, {new_session, Tx}),
ok;
Error ->
exit({cannot_connect, Error})
end
end,
proc_lib:spawn_link(F),
{noreply, S};
handle_cast(Request, State) ->
?WARN("Unexpected cast: ~p", [Request]),
{noreply, State}.
@private
, State ) - > Result
%% Info = timeout | term()
%% State = term()
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
Timeout = int ( ) | infinity
%% Reason = normal | term()
%%
%% @doc <a href="#handle_info-2"> gen_esme - handle_info/2</a> callback implementation.
%%
%% <p>Handling all non call/cast messages.</p>
%%
%% <ul>
< li > On < tt>{stop , , State}</tt > terminate/2 is called.</li >
%% </ul>
%%
%% @see terminate/2
%% @end
handle_info(Info, State) ->
?WARN("Unexpected info ~p~n", [Info]),
{noreply, State}.
@private
, State ) - > ok
%% Reason = normal | shutdown | term()
State = term ( )
%%
%% @doc <a href="#terminate-2">
%% gen_esme - terminate/2</a> callback implementation.
%%
< p > Shutdown the ESME server.</p >
%%
%% <p>Return value is ignored by <tt>gen_esme</tt>.</p>
%% @end
terminate(kill, _S) ->
ok;
terminate(Reason, S = #state{tx_session=Pid}) when is_pid(Pid) ->
catch gen_smsc:unbind(Pid),
catch gen_smsc:session_stop(Pid),
terminate(Reason, S#state{tx_session=undefined});
terminate(_Reason, _S) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
undefined | term ( )
%% State = term()
%% Extra = term
NewState = term ( )
%%
%% @doc <a href="#code_change-2"> gen_esme - code_change/2</a> callback implementation.
%%
%% <p>Convert process state when code is changed.</p>
%% @end
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/archaelus/erms/5dbe5e79516a16e461e7a2a345dd80fbf92ef6fa/src/erms_test_esme.erl | erlang |
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
@doc Submit SM example.
[/]
@end
-------------------------------------------------------------------
Include files
-------------------------------------------------------------------
-------------------------------------------------------------------
External exports
-------------------------------------------------------------------
-------------------------------------------------------------------
Internal ESME exports
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
%@spec {state, tx_session, host, port, system, password}
%@doc Representation of the server's state
<dl>
<dt>Host: </dt><dd>Hostname of the SMSC.</dd>
<dt>Port: </dt><dd>Tx port of the SMSC.</dd>
</dl>
%@end
-record(msg, {id,
from,
to,
text}).
===================================================================
External functions
===================================================================
System = string()
Pass = string()
@doc Starts an unlinked esme process that will be used to talk to
operations easier.
@end
@spec connect(Host::term(), Port::integer()) -> Response
Response = {ok, Pdu::list()} | {connect_error, Error::term()}
reason if not.
@end
@end
@spec test_batch(Messages, Count::integer()) -> Errors
Messages = #msg{} | list(#msg{})
Errors = list({MessageNumber::integer(), Message::#msg{}, Error::term})
@doc Sends Count messages (from the list Messages) as fast as
possible and reports any submission errors.
@end
@see handle_call/3
@end
===================================================================
Server functions
===================================================================
Timeout = int() | infinity
@doc <a href="#init-1">
gen_esme - init/1</a> callback implementation.
<p>Initiates the server.</p>
@end
Session = pid()
From = term()
State = term()
Timeout = int()
Reason = term()
@doc <a href="#handle_outbind-3">gen_esme - handle_outbind/3</a> callback implementation.
@end
AlertNotification = {alert_notification, Session, Pdu}
Session = pid()
State = term()
Timeout = int()
Reason = term()
@doc <a href="#handle_alert_notification-3">gen_esme - handle_alert_notification/2</a> callback implementation.
@end
Session = pid()
State = term()
Timeout = int()
Reason = term()
@doc <a href="#handle_enquire_link_failure-2">gen_esme - handle_enquire_link_failure/2</a> callback implementation.
<p>Notifies when an <i>enquire_link</i> failure occurs (i.e. the SMSC did
not respond to our <i>enquire_link</i> operation).</p>
@end
Operation = {deliver_sm, Session, Pdu} | {data_sm, Session, Pdu}
Session = pid()
From = term()
State = term()
Result = {reply, Reply, NewState} |
{stop, Reason, Reply, NewState} |
Reply = {ok, ParamList} | {error, Error, ParamList}
Timeout = int()
Reason = term()
@doc <a href="#handle_operation-3">gen_esme - handle_operation/3</a> callback implementation.
<p>Handle <i>deliver_sm</i> and <i>data_sm</i> operations (from the peer
SMSCs) to the callback ESME.</p>
<p>The <tt>ParamList</tt> included in the response is used to construct
@end
Don't know how to handle CmdName
Session = pid()
Result = {reply, Reply, NewState} |
{stop, Reason, Reply, NewState} |
Reply = ok | {error, Error}
Error = int()
Timeout = int()
Reason = term()
@doc <a href="#handle_unbind-3">gen_esme - handle_unbind/3</a> callback implementation.
@end
State = term()
Timeout = int()
Reason = term()
<p>Handle listen failures.</p>
@end
Request = term()
From = {pid(), Tag}
Result = {reply, Reply, NewState} |
Reply = term()
Reason = term()
@doc <a href="#handle_call-3">gen_esme - handle_call/3</a> callback implementation.
<ul>
terminate/2 is called</li>
terminate/2 is called</li>
</ul>
@see terminate/2
@end
Request = term()
Reason = normal | term()
@doc <a href="#handle_cast-2"> gen_esme - handle_cast/2</a> callback implementation.
<ul>
</ul>
@see terminate/2
@end
Info = timeout | term()
State = term()
Reason = normal | term()
@doc <a href="#handle_info-2"> gen_esme - handle_info/2</a> callback implementation.
<p>Handling all non call/cast messages.</p>
<ul>
</ul>
@see terminate/2
@end
Reason = normal | shutdown | term()
@doc <a href="#terminate-2">
gen_esme - terminate/2</a> callback implementation.
<p>Return value is ignored by <tt>gen_esme</tt>.</p>
@end
State = term()
Extra = term
@doc <a href="#code_change-2"> gen_esme - code_change/2</a> callback implementation.
<p>Convert process state when code is changed.</p>
@end
===================================================================
=================================================================== | Copyright ( C ) 2004 Peña < >
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
< p > A very simple ESME for submitting short messages.</p >
2004
@author < >
@version 1.1 , { 09 Feb 2004 } { @time } .
-module(erms_test_esme).
-behaviour(gen_esme).
-include_lib("eunit/include/eunit.hrl").
-include_lib("logging.hrl").
-include_lib("mnesia_model.hrl").
-include_lib("oserl.hrl").
-include_lib("smpp_base.hrl").
-export([shell_start/2,
connect/2,
submit_msg/2,
stop/0,
test_batch/2]).
-export([init/1,
handle_outbind/3,
handle_alert_notification/2,
handle_enquire_link_failure/2,
handle_operation/3,
handle_unbind/3,
handle_listen_error/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
Macros
-define(SERVER, ?MODULE).
Records
< dt > TxSession : < /dt><dd > Pid of the transmitter session.</dd >
< dt > System : < /dt><dd > SMPP system i d string.</dd >
< dt > Password : < /dt><dd > SMPP password string.</dd >
-record(state, {tx_session, host, port, system, password}).
, Pass ) - > { ok , pid ( ) } | { error , term ( ) } | ignore
the SMSC using the System / Pass login / password . The
ESME will register itself as erms_test_esme to make other
shell_start(System, Pass) ->
gen_esme:start({local, ?SERVER}, ?MODULE,
[System, Pass],
[]).
@doc Connects the test ESME to the SMSC at . Returns the
decoded SMPP PDU of the bind_tx command on success and a failure
connect(Host, Port) ->
case gen_esme:session_start(?SERVER, Host, Port) of
{ok, Tx} ->
gen_esme:call(?SERVER, {new_session, Tx});
Error ->
{cannot_connect, Error}
end.
@spec submit_msg(Message::#msg { } , Options::list ( ) ) - > ok
@doc Submits an erms # msg { } with PDU options Options .
submit_msg(Msg = #msg{}, Options) when is_list(Options) ->
{ok, Session} = gen_esme:call(?SERVER, tx_session, timer:seconds(5)),
ParamList = erms_smpp:msg_to_pdu(Msg, Options),
gen_esme_session:submit_sm(Session, ParamList).
test_batch(Msg = #msg{}, Count)->
test_batch([Msg], Count);
test_batch(Msgs, Count) when is_integer(Count), is_list(Msgs) ->
test_batch(Msgs, 1, Count, []).
@private
test_batch(_, Iter, Max, Errors) when Iter >= Max ->
Errors;
test_batch([Msg|Msgs], Iter, Max, Errors) ->
RotatedMsgs = lists:append(Msgs, [Msg]),
case submit_msg(Msg, []) of
{ok, _Pdu} ->
test_batch(RotatedMsgs, Iter + 1, Max, Errors);
Error ->
test_batch(RotatedMsgs, Iter + 1, Max, [{Iter, Msg, Error}|Errors])
end.
stop ( ) - > ok
@doc Stops the ESME server .
@equiv gen_esme : call(SERVER , die , 10000 )
stop() ->
gen_esme:call(?SERVER, die, 10000).
@private
) - > Result
= term ( )
Result = { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
State = term ( )
term ( )
init([Host, Port, System, Password]) ->
{ok, #state{host=Host, port=Port, system=System, password=Password}};
init([System, Password]) ->
{ok, #state{system=System, password=Password}}.
@private
@spec handle_outbind(Outbind , From , State ) - > Result
= { outbind , Session , Pdu , IpAddr }
Pdu = pdu ( )
= { int ( ) , int ( ) , int ( ) , int ( ) }
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
< p > Handle < i > oubind</i > requests from the peer >
handle_outbind(R = {outbind, _Session, _Pdu, _IpAddr}, _From, State) ->
?WARN("Unexpected outbind request ~p", [R]),
{noreply, State}.
@private
, State ) - > Result
Pdu = pdu ( )
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
< p > Handle < i > alert_notification</i > requests from the peer >
handle_alert_notification(R = {alert_notification, _Session, _Pdu}, State) ->
?WARN("Unexpected alert_notification request ~p", [R]),
{noreply, State}.
@private
, State ) - > Result
EnquireLinkFailure = { enquire_link_failure , Session , CommandStatus }
CommandStatus = int ( )
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
handle_enquire_link_failure(R = {enquire_link_failure, _Session, _Status}, State) ->
?WARN("Unexpected link failure enqiry request ~p", [R]),
{noreply, State}.
@private
@spec handle_operation(Operation , From , State ) - > Result
Pdu = pdu ( )
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
ParamList = [ { ParamName , ParamValue } ]
ParamName = atom ( )
ParamValue = term ( )
NewState = term ( )
the response PDU . If a command_status other than ESME_ROK is to
be returned by the ESME in the response PDU , the callback should return the
term < tt>{error , Error , ParamList}</tt > , where < tt > Error</tt > is the
desired command_status error code.</p >
handle_operation(R = {_CmdName, _Session, _Pdu}, _From, S) ->
?WARN("Don't know how to handle operation ~p", [R]),
{reply, {error, ?ESME_RINVCMDID, []}, S}.
@private
, From , State ) - > Result
= { unbind , Session , Pdu }
Pdu = pdu ( )
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
< p > Handle < i > unbind</i > requests from the peer >
< p > If < tt > ok</tt > returned an unbind_resp with a ESME_ROK
command_status is sent to the MC and the session moves into the unbound
state . When < tt>{error , Error}</tt > is returned by the ESME , the
response PDU sent by the session to the MC will have an < tt > Error</tt >
command_status and the session will remain on it 's current bound state
( bound_rx , or >
handle_unbind({unbind, _Session, _Pdu}, _From, State) ->
?INFO("Unbinding from smsc.", []),
{reply, ok, State}.
@private
handle_listen_error(State ) - > Result
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
@doc < a href=" / oserl / > callback implementation .
handle_listen_error(State) ->
?INFO("Need to reconnect - listen error.", []),
{noreply, State}.
@private
, From , State ) - > Result
State = term ( )
{ reply , Reply , NewState , Timeout } |
{ noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , Reply , NewState } |
{ stop , , NewState }
NewState = term ( )
Timeout = int ( ) | infinity
< p > Handling call >
< li > On < tt>{stop , , Reply , NewState}</tt >
< li > On < tt>{stop , , NewState}</tt >
handle_call(tx_session, _From, S = #state{tx_session=Session}) ->
{reply, {ok, Session}, S};
handle_call({new_session, Tx}, _From, S = #state{system=System,password=Pass}) ->
ParamList = [{system_id, System},
{password, Pass}],
case gen_esme:bind_transmitter(Tx, ParamList) of
{ok, PduResp} ->
{reply, {ok, PduResp}, S#state{tx_session = Tx}};
BindError ->
{stop, BindError, S}
end;
handle_call({submit_msg, Msg, Options}, _From,
S = #state{tx_session=Session}) ->
ParamList = erms_smpp:msg_to_pdu(Msg, Options),
{reply,
gen_esme:submit_sm(Session, ParamList),
S};
handle_call(die, _From, State) ->
{stop, normal, ok, State}.
@private
, State ) - > Result
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
Timeout = int ( ) | infinity
< p > Handling cast >
< li > On < tt>{stop , , State}</tt > terminate/2 is called.</li >
handle_cast(connect, S = #state{tx_session=undefined, host=Host, port=Port}) ->
Pid = self(),
F = fun () ->
case gen_esme:session_start(Pid, Host, Port) of
{ok, Tx} ->
gen_esme:call(Pid, {new_session, Tx}),
ok;
Error ->
exit({cannot_connect, Error})
end
end,
proc_lib:spawn_link(F),
{noreply, S};
handle_cast(Request, State) ->
?WARN("Unexpected cast: ~p", [Request]),
{noreply, State}.
@private
, State ) - > Result
Result = { noreply , NewState } |
{ noreply , NewState , Timeout } |
{ stop , , NewState }
NewState = term ( )
Timeout = int ( ) | infinity
< li > On < tt>{stop , , State}</tt > terminate/2 is called.</li >
handle_info(Info, State) ->
?WARN("Unexpected info ~p~n", [Info]),
{noreply, State}.
@private
, State ) - > ok
State = term ( )
< p > Shutdown the ESME server.</p >
terminate(kill, _S) ->
ok;
terminate(Reason, S = #state{tx_session=Pid}) when is_pid(Pid) ->
catch gen_smsc:unbind(Pid),
catch gen_smsc:session_stop(Pid),
terminate(Reason, S#state{tx_session=undefined});
terminate(_Reason, _S) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
undefined | term ( )
NewState = term ( )
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
|
40b69f2776f4b4fe78a85109d318b396dff9b5714edd4a188c5ff0d4ac360b60 | arcfide/oleg | sxpath-plus.scm | ;; sxpath+ - a macro implementation of the sxpath function
$ I d : sxpath - plus.scm , v 1.3 2004/01/25 16:14:20 kl Exp kl $
;
This macro was proposed by on SSAX - SXML mail list 30/07/2003
;
;
; This is version is based on "binary" sxpath, where location step functions
have two parameters - nodeset and a list var bindings ( including * root * ) .
commited to SourceForge CVS Jan 13 2004
;
; The "look and feel" is similar to the normal sxpath function. Instead of
; (sxpath '(div // para))
; one writes
; (sxpath+ (div // para))
;
; Please note that the location path list is not quoted any more!
;
; Like normal sxpath calls, one can unquote to embedded an expression result in
; the path expression:
; (sxpath+ (// (chapter (@ (equal? ,a-node)))))
; or to embedded a procedure:
; (sxpath+ (,(lift (node-parent tree)) @ name))
;
Note : location step functions are required to take 2 parameters
; (a node-or-nodeset and environment of variable bindings).
; Thus a small convenience "lift" is used, in order to lift
; a function of type nodeset -> nodeset into one that also accepts
( but does not use ) 2 parameters : node and environment .
;
This code does use syntax - case , not just syntax - rules , in three
; places: to distinguish symbols and txpath strings in locations steps, to
; recognize numbers as abbreviations for (node-pos ...), and to match names
; which are bound-identifiers (eq? and equal?).
;
; dispatch on a number (node-pos) reducer pattern
(define-syntax analyze-red1
(lambda (stx)
(syntax-case stx ()
((_ num nsenv env)
(number? (syntax-object->datum (syntax num)))
(syntax (node-pos num))))))
; analyze a "reducer"
(define-syntax analyze-reduce
(syntax-rules ()
((_ () nsenv env)
(sxml:filter (analyze-path () nsenv env)))
((_ (step ...) nsenv env)
(sxml:filter (analyze-path (step ...) nsenv env)))
((_ item nsenv env)
(analyze-red1 item nsenv env))))
; dispatch on string or symbol within a location step
(define-syntax analyze-1
(lambda (stx)
(syntax-case stx ()
((_ sym nsenv env)
(identifier? (syntax sym))
(syntax (select-kids (ntype?? 'sym))))
((_ str nsenv env)
(string? (syntax-object->datum (syntax str)))
(syntax (lambda (nodeset)
((txpath str nsenv) nodeset env)))))))
; transform a single location step
(define-syntax analyze-step
(lambda (stx)
(syntax-case stx (// *or* *not* ns-id:* unquote)
((_ // nsenv env)
(syntax (node-or (node-self (ntype?? '*any*))
(node-closure (ntype?? '*any*)))))
((_ (*or* item ...) nsenv env)
(syntax (select-kids (ntype-names?? '(item ...)))))
((_ (*not* item ...) nsenv env)
(syntax (select-kids (sxml:invert (ntype-names?? '(item ...))))))
; eq? and equal? must be matched in a non-hygienic way
in PLT , can use module - or - top - identifier= ? for these comparisons
((_ (sym-equal? (unquote x)) nsenv env)
(eq? 'equal? (syntax-object->datum (syntax sym-equal?)))
(syntax (select-kids (node-equal? x))))
((_ (sym-equal? x) nsenv env)
(eq? 'equal? (syntax-object->datum (syntax sym-equal?)))
(syntax (select-kids (node-equal? 'x))))
((_ (sym-eq? (unquote x)) nsenv env)
(eq? 'eq? (syntax-object->datum (syntax sym-eq?)))
(syntax (select-kids (node-eq? x))))
((_ (sym-eq? x) nsenv env)
(eq? 'eq? (syntax-object->datum (syntax sym-eq?)))
(syntax (select-kids (node-eq? 'x))))
((_ (ns-id:* x) nsenv env)
(syntax (select-kids (ntype-namespace-id?? x))))
exp must evaluate to a procedure ( nodeset - > env ) - > nodeset
; this is done to be consistent with current sxpath function
in the past , this would have been a procedure nodeset - > nodeset
((_ (unquote exp) nsenv env)
(syntax (lambda (nodeset)
(exp nodeset env))))
((_ ((step ...) reducer ...) nsenv env)
(syntax (node-reduce (analyze-path (step ...) nsenv env)
(analyze-reduce reducer nsenv env) ...)))
((_ (() reducer ...) nsenv env)
(syntax (node-reduce (analyze-path () nsenv env)
(analyze-reduce reducer nsenv env) ...)))
this should actually verify that sym is an identifier ! ! !
((_ (sym reducer ...) nsenv env)
(syntax (node-reduce (analyze-1 sym nsenv env)
(analyze-reduce reducer nsenv env) ...)))
((_ item nsenv env)
(syntax (analyze-1 item nsenv env))))))
; transform a location path
(define-syntax analyze-path
(syntax-rules ()
((_ () nsenv env)
(node-join))
((_ (step ...) nsenv env)
(node-join (analyze-step step nsenv env) ...))
((_ str nsenv env)
(analyze-1 str nsenv env))))
(define-syntax sxpath+
(syntax-rules ()
((_ path)
(sxpath+ path '()))
((_ path ns-binding)
(let ((nsenv ns-binding))
(lambda (node . var-binding)
((analyze-path path nsenv
(if (null? var-binding)
'()
(cadr var-binding))
) node))))))
| null | https://raw.githubusercontent.com/arcfide/oleg/c6826870436925fd4c873c01d7fcc24a7a7f95dc/sxml-tools/sxpath-plus.scm | scheme | sxpath+ - a macro implementation of the sxpath function
This is version is based on "binary" sxpath, where location step functions
The "look and feel" is similar to the normal sxpath function. Instead of
(sxpath '(div // para))
one writes
(sxpath+ (div // para))
Please note that the location path list is not quoted any more!
Like normal sxpath calls, one can unquote to embedded an expression result in
the path expression:
(sxpath+ (// (chapter (@ (equal? ,a-node)))))
or to embedded a procedure:
(sxpath+ (,(lift (node-parent tree)) @ name))
(a node-or-nodeset and environment of variable bindings).
Thus a small convenience "lift" is used, in order to lift
a function of type nodeset -> nodeset into one that also accepts
places: to distinguish symbols and txpath strings in locations steps, to
recognize numbers as abbreviations for (node-pos ...), and to match names
which are bound-identifiers (eq? and equal?).
dispatch on a number (node-pos) reducer pattern
analyze a "reducer"
dispatch on string or symbol within a location step
transform a single location step
eq? and equal? must be matched in a non-hygienic way
this is done to be consistent with current sxpath function
transform a location path | $ I d : sxpath - plus.scm , v 1.3 2004/01/25 16:14:20 kl Exp kl $
This macro was proposed by on SSAX - SXML mail list 30/07/2003
have two parameters - nodeset and a list var bindings ( including * root * ) .
commited to SourceForge CVS Jan 13 2004
Note : location step functions are required to take 2 parameters
( but does not use ) 2 parameters : node and environment .
This code does use syntax - case , not just syntax - rules , in three
(define-syntax analyze-red1
(lambda (stx)
(syntax-case stx ()
((_ num nsenv env)
(number? (syntax-object->datum (syntax num)))
(syntax (node-pos num))))))
(define-syntax analyze-reduce
(syntax-rules ()
((_ () nsenv env)
(sxml:filter (analyze-path () nsenv env)))
((_ (step ...) nsenv env)
(sxml:filter (analyze-path (step ...) nsenv env)))
((_ item nsenv env)
(analyze-red1 item nsenv env))))
(define-syntax analyze-1
(lambda (stx)
(syntax-case stx ()
((_ sym nsenv env)
(identifier? (syntax sym))
(syntax (select-kids (ntype?? 'sym))))
((_ str nsenv env)
(string? (syntax-object->datum (syntax str)))
(syntax (lambda (nodeset)
((txpath str nsenv) nodeset env)))))))
(define-syntax analyze-step
(lambda (stx)
(syntax-case stx (// *or* *not* ns-id:* unquote)
((_ // nsenv env)
(syntax (node-or (node-self (ntype?? '*any*))
(node-closure (ntype?? '*any*)))))
((_ (*or* item ...) nsenv env)
(syntax (select-kids (ntype-names?? '(item ...)))))
((_ (*not* item ...) nsenv env)
(syntax (select-kids (sxml:invert (ntype-names?? '(item ...))))))
in PLT , can use module - or - top - identifier= ? for these comparisons
((_ (sym-equal? (unquote x)) nsenv env)
(eq? 'equal? (syntax-object->datum (syntax sym-equal?)))
(syntax (select-kids (node-equal? x))))
((_ (sym-equal? x) nsenv env)
(eq? 'equal? (syntax-object->datum (syntax sym-equal?)))
(syntax (select-kids (node-equal? 'x))))
((_ (sym-eq? (unquote x)) nsenv env)
(eq? 'eq? (syntax-object->datum (syntax sym-eq?)))
(syntax (select-kids (node-eq? x))))
((_ (sym-eq? x) nsenv env)
(eq? 'eq? (syntax-object->datum (syntax sym-eq?)))
(syntax (select-kids (node-eq? 'x))))
((_ (ns-id:* x) nsenv env)
(syntax (select-kids (ntype-namespace-id?? x))))
exp must evaluate to a procedure ( nodeset - > env ) - > nodeset
in the past , this would have been a procedure nodeset - > nodeset
((_ (unquote exp) nsenv env)
(syntax (lambda (nodeset)
(exp nodeset env))))
((_ ((step ...) reducer ...) nsenv env)
(syntax (node-reduce (analyze-path (step ...) nsenv env)
(analyze-reduce reducer nsenv env) ...)))
((_ (() reducer ...) nsenv env)
(syntax (node-reduce (analyze-path () nsenv env)
(analyze-reduce reducer nsenv env) ...)))
this should actually verify that sym is an identifier ! ! !
((_ (sym reducer ...) nsenv env)
(syntax (node-reduce (analyze-1 sym nsenv env)
(analyze-reduce reducer nsenv env) ...)))
((_ item nsenv env)
(syntax (analyze-1 item nsenv env))))))
(define-syntax analyze-path
(syntax-rules ()
((_ () nsenv env)
(node-join))
((_ (step ...) nsenv env)
(node-join (analyze-step step nsenv env) ...))
((_ str nsenv env)
(analyze-1 str nsenv env))))
(define-syntax sxpath+
(syntax-rules ()
((_ path)
(sxpath+ path '()))
((_ path ns-binding)
(let ((nsenv ns-binding))
(lambda (node . var-binding)
((analyze-path path nsenv
(if (null? var-binding)
'()
(cadr var-binding))
) node))))))
|
48e87801917480ca4a533e8d7d5073a773207678f4074e0231aeace283d578bc | input-output-hk/cardano-wallet-legacy | Conv.hs | # LANGUAGE LambdaCase #
-- | Convert to and from V1 types
module Cardano.Wallet.WalletLayer.Kernel.Conv (
-- * From V1 to kernel types
fromRootId
, fromAccountId
, fromAssuranceLevel
, fromRedemptionCode
, fromRedemptionCodePaper
-- * From kernel types to V1 types
, toAccountId
, toRootId
, toAccount
, toWallet
, toEosWallet
, toAddress
, toCardanoAddress
, toAssuranceLevel
, toSyncState
, toInputGrouping
-- * Custom errors
, InvalidRedemptionCode(..)
-- * Convenience re-exports
, runExcept
, runExceptT
, withExceptT
, exceptT
) where
import Universum
import qualified Prelude
import Control.Lens (to)
import Control.Monad.Except
import Crypto.Error (CryptoError)
import Data.ByteString.Base58 (bitcoinAlphabet, decodeBase58)
import Formatting (bprint, build, formatToString, sformat, shown, (%))
import qualified Formatting.Buildable
import qualified Serokell.Util.Base64 as B64
import Pos.Client.Txp.Util (InputSelectionPolicy (..))
import Pos.Core (Address, BlockCount (..))
import Pos.Crypto (AesKey, RedeemSecretKey, aesDecrypt,
redeemDeterministicKeyGen)
import Cardano.Mnemonic (mnemonicToAesKey)
import Cardano.Wallet.API.Types.UnitOfMeasure
import qualified Cardano.Wallet.API.V1.Types as V1
import Cardano.Wallet.Kernel.AddressPoolGap (AddressPoolGap)
import Cardano.Wallet.Kernel.CoinSelection.FromGeneric
(InputGrouping (..))
import Cardano.Wallet.Kernel.DB.BlockMeta (addressMetaIsUsed)
import qualified Cardano.Wallet.Kernel.DB.HdRootId as HD
import qualified Cardano.Wallet.Kernel.DB.HdWallet as HD
import Cardano.Wallet.Kernel.DB.InDb (fromDb)
import Cardano.Wallet.Kernel.DB.Spec (cpAddressMeta)
import Cardano.Wallet.Kernel.DB.Spec.Read
import Cardano.Wallet.Kernel.DB.Util.IxSet (ixedIndexed)
import qualified Cardano.Wallet.Kernel.DB.Util.IxSet as IxSet
import Cardano.Wallet.Kernel.Internal (WalletRestorationProgress,
wrpCurrentSlot, wrpTargetSlot, wrpThroughput)
import qualified Cardano.Wallet.Kernel.Read as Kernel
import UTxO.Util (exceptT)
import Cardano . Wallet . WalletLayer ( InvalidRedemptionCode ( .. ) )
-- Functions 'toWallet' and 'toEosWallet' contain duplications in 'where'
sections , but in order to fix it we should change two functions and
-- introduce the new one.
# ANN module ( " HLint : ignore Reduce duplication " : : Text ) #
{-------------------------------------------------------------------------------
From V1 to kernel types
Nomenclature based on kernel names, not V1 names.
-------------------------------------------------------------------------------}
fromRootId :: Monad m => V1.WalletId -> ExceptT Text m HD.HdRootId
fromRootId (V1.WalletId wId) = exceptT $ maybe
(Left $ "fromRootId: can't decode given Text to HdRootId: " <> wId)
Right
(HD.decodeHdRootId wId)
fromAccountId :: Monad m
=> V1.WalletId -> V1.AccountIndex -> ExceptT Text m HD.HdAccountId
fromAccountId wId accIx =
aux <$> fromRootId wId
where
aux :: HD.HdRootId -> HD.HdAccountId
aux hdRootId = HD.HdAccountId hdRootId (HD.HdAccountIx $ V1.getAccIndex accIx)
-- | Converts from the @V1@ 'AssuranceLevel' to the HD one.
fromAssuranceLevel :: V1.AssuranceLevel -> HD.AssuranceLevel
fromAssuranceLevel V1.NormalAssurance = HD.AssuranceLevelNormal
fromAssuranceLevel V1.StrictAssurance = HD.AssuranceLevelStrict
-- | Decode redemption key for non-paper wallet
--
-- See also comments for 'fromRedemptionCodePaper'.
fromRedemptionCode :: Monad m
=> V1.ShieldedRedemptionCode
-> ExceptT InvalidRedemptionCode m RedeemSecretKey
fromRedemptionCode (V1.ShieldedRedemptionCode crSeed) = do
bs <- withExceptT (const $ InvalidRedemptionCodeInvalidBase64 crSeed) $
asum [ exceptT $ B64.decode crSeed
, exceptT $ B64.decodeUrl crSeed
]
exceptT $ maybe (Left $ InvalidRedemptionCodeNot32Bytes crSeed) (Right . snd) $
redeemDeterministicKeyGen bs
-- | Decode redemption key for paper wallet
--
-- NOTE: Although both 'fromRedemptionCode' and 'fromRedemptionCodePaper' both
-- take a 'V1.ShieldedRedemptionCode' as argument, note that for paper wallets
-- this must be Base58 encoded, whereas for 'fromRedemptionCode' it must be
-- Base64 encoded.
fromRedemptionCodePaper :: Monad m
=> V1.ShieldedRedemptionCode
-> V1.RedemptionMnemonic
-> ExceptT InvalidRedemptionCode m RedeemSecretKey
fromRedemptionCodePaper (V1.ShieldedRedemptionCode pvSeed)
(V1.RedemptionMnemonic pvBackupPhrase) = do
encBS <- exceptT $ maybe (Left $ InvalidRedemptionCodeInvalidBase58 pvSeed) Right $
decodeBase58 bitcoinAlphabet $ encodeUtf8 pvSeed
decBS <- withExceptT InvalidRedemptionCodeCryptoError $ exceptT $
aesDecrypt encBS aesKey
exceptT $ maybe (Left $ InvalidRedemptionCodeNot32Bytes pvSeed) (Right . snd) $
redeemDeterministicKeyGen decBS
where
aesKey :: AesKey
aesKey = mnemonicToAesKey pvBackupPhrase
{-------------------------------------------------------------------------------
From kernel to V1 types
-------------------------------------------------------------------------------}
toAccountId :: HD.HdAccountId -> V1.AccountIndex
toAccountId =
V1.unsafeMkAccountIndex -- Invariant: Assuming HD AccountId are valid
. HD.getHdAccountIx
. view HD.hdAccountIdIx
toRootId :: HD.HdRootId -> V1.WalletId
toRootId = V1.WalletId . sformat build
| Converts a ' HdAccount ' into a V1 ' Account ' .
--
toAccount :: Kernel.DB -> HD.HdAccount -> V1.Account
toAccount snapshot account = V1.Account {
accIndex = accountIndex
, accAddresses = map (toAddress account . view ixedIndexed) addresses
, accAmount = V1.WalletCoin accountAvailableBalance
, accName = account ^. HD.hdAccountName . to HD.getAccountName
, accWalletId = V1.WalletId (sformat build hdRootId)
}
where
-- NOTE(adn): Perhaps we want the minimum or expected balance here?
accountAvailableBalance = account ^. HD.hdAccountState . HD.hdAccountStateCurrent (to cpAvailableBalance)
hdAccountId = account ^. HD.hdAccountId
accountIndex = toAccountId (account ^. HD.hdAccountId)
hdAddresses = Kernel.addressesByAccountId snapshot hdAccountId
addresses = IxSet.toList hdAddresses
hdRootId = account ^. HD.hdAccountId . HD.hdAccountIdParent
-- | Converts an 'HdRoot' into a V1 'Wallet.
toWallet :: Kernel.DB -> HD.HdRoot -> V1.Wallet
toWallet db hdRoot = V1.Wallet {
walId = (V1.WalletId walletId)
, walName = hdRoot ^. HD.hdRootName
. to HD.getWalletName
, walBalance = V1.WalletCoin (Kernel.rootTotalBalance db rootId)
, walHasSpendingPassword = hasSpendingPassword
, walSpendingPasswordLastUpdate = V1.WalletTimestamp lastUpdate
, walCreatedAt = V1.WalletTimestamp createdAt
, walAssuranceLevel = v1AssuranceLevel
, walSyncState = V1.Synced
}
where
(hasSpendingPassword, lastUpdate) =
case hdRoot ^. HD.hdRootHasPassword of
HD.NoSpendingPassword lr -> (False, lr ^. fromDb)
HD.HasSpendingPassword lu -> (True, lu ^. fromDb)
-- In case the wallet has no spending password, its last update
-- matches this wallet creation time.
rootId = hdRoot ^. HD.hdRootId
createdAt = hdRoot ^. HD.hdRootCreatedAt . fromDb
walletId = sformat build rootId
v1AssuranceLevel = toAssuranceLevel $ hdRoot ^. HD.hdRootAssurance
| Converts an ' HdRoot ' into a V1 ' EosWallet ' .
toEosWallet :: Kernel.DB -> HD.HdRoot -> AddressPoolGap -> V1.EosWallet
toEosWallet db hdRoot gap = V1.EosWallet {
eoswalId = V1.WalletId walletId
, eoswalName = hdRoot ^. HD.hdRootName . to HD.getWalletName
, eoswalAddressPoolGap = gap
, eoswalBalance = V1.WalletCoin (Kernel.rootTotalBalance db rootId)
, eoswalCreatedAt = V1.WalletTimestamp createdAt
, eoswalAssuranceLevel = v1AssuranceLevel
}
where
rootId = hdRoot ^. HD.hdRootId
createdAt = hdRoot ^. HD.hdRootCreatedAt . fromDb
walletId = sformat build rootId
v1AssuranceLevel = toAssuranceLevel $ hdRoot ^. HD.hdRootAssurance
toAssuranceLevel :: HD.AssuranceLevel -> V1.AssuranceLevel
toAssuranceLevel HD.AssuranceLevelNormal = V1.NormalAssurance
toAssuranceLevel HD.AssuranceLevelStrict = V1.StrictAssurance
| Converts a Kernel ' HdAddress ' into a V1 ' WalletAddress ' .
toAddress :: HD.HdAccount -> HD.HdAddress -> V1.WalletAddress
toAddress acc hdAddress =
V1.WalletAddress (V1.WalAddress cardanoAddress)
(addressMeta ^. addressMetaIsUsed)
addressOwnership
where
cardanoAddress = hdAddress ^. HD.hdAddressAddress . fromDb
addressMeta = acc ^. HD.hdAccountState . HD.hdAccountStateCurrentCombined (<>) (cpAddressMeta cardanoAddress)
-- NOTE
-- In this particular case, the address had to be known by us. As a matter
of fact , to construct a ' WalletAddress ' , we have to be aware of pieces
-- of informations we can only have if the address is ours (like the
parent 's account derivation index required to build the ' HdAddress ' in
a first place ) !
addressOwnership = V1.AddressIsOurs
| Converts a Kernel ' HdAddress ' into a Cardano ' Address ' .
toCardanoAddress :: HD.HdAddress -> Address
toCardanoAddress hdAddr = hdAddr ^. HD.hdAddressAddress . fromDb
{-------------------------------------------------------------------------------
Custom errors
-------------------------------------------------------------------------------}
data InvalidRedemptionCode =
| Seed is invalid base64(url ) ( used for non - paper wallets )
InvalidRedemptionCodeInvalidBase64 Text
-- | Seed is invalid base58 (used for paper wallets)
| InvalidRedemptionCodeInvalidBase58 Text
| AES decryption error ( for paper wallets )
| InvalidRedemptionCodeCryptoError CryptoError
| Seed is not 32 - bytes long ( for either paper or non - paper wallets )
--
NOTE : For paper wallets the seed is actually AES encrypted so the
-- length would be hard to verify simply by inspecting this text.
| InvalidRedemptionCodeNot32Bytes Text
instance Buildable InvalidRedemptionCode where
build (InvalidRedemptionCodeInvalidBase64 txt) =
bprint ("InvalidRedemptionCodeInvalidBase64 " % build) txt
build (InvalidRedemptionCodeInvalidBase58 txt) =
bprint ("InvalidRedemptionCodeInvalidBase58 " % build) txt
build (InvalidRedemptionCodeCryptoError err) =
bprint ("InvalidRedemptionCodeCryptoError " % shown) err
build (InvalidRedemptionCodeNot32Bytes txt) =
bprint ("InvalidRedemptionCodeNot32Bytes " % build) txt
instance Show InvalidRedemptionCode where
show = formatToString build
| Calculate the ' SyncState ' from data about the wallet 's restoration .
toSyncState :: Maybe WalletRestorationProgress -> V1.SyncState
toSyncState = \case
Nothing -> V1.Synced
Just info -> let MeasuredIn (BlockCount blocksPerSec) = info ^. wrpThroughput
in V1.Restoring $
V1.SyncProgress
{ spEstimatedCompletionTime =
let blocksToGo = (info ^. wrpTargetSlot) - (info ^. wrpCurrentSlot)
bps = max blocksPerSec 1
in V1.mkEstimatedCompletionTime (fromIntegral ((1000 * blocksToGo) `div` bps))
, spThroughput = V1.mkSyncThroughput (BlockCount blocksPerSec)
, spPercentage =
let tgtSlot = info ^. wrpTargetSlot
pct = if tgtSlot /= 0
then (100 * (info ^. wrpCurrentSlot)) `div` tgtSlot
else 0
in V1.mkSyncPercentage (fromIntegral pct)
}
Matches the input with the 's ' InputGrouping '
toInputGrouping :: V1.WalletInputSelectionPolicy -> InputGrouping
toInputGrouping (V1.WalletInputSelectionPolicy policy) = case policy of
OptimizeForSecurity -> PreferGrouping
OptimizeForHighThroughput -> IgnoreGrouping
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet-legacy/143e6d0dac0b28b3274600c6c49ec87e42ec9f37/src/Cardano/Wallet/WalletLayer/Kernel/Conv.hs | haskell | | Convert to and from V1 types
* From V1 to kernel types
* From kernel types to V1 types
* Custom errors
* Convenience re-exports
Functions 'toWallet' and 'toEosWallet' contain duplications in 'where'
introduce the new one.
------------------------------------------------------------------------------
From V1 to kernel types
Nomenclature based on kernel names, not V1 names.
------------------------------------------------------------------------------
| Converts from the @V1@ 'AssuranceLevel' to the HD one.
| Decode redemption key for non-paper wallet
See also comments for 'fromRedemptionCodePaper'.
| Decode redemption key for paper wallet
NOTE: Although both 'fromRedemptionCode' and 'fromRedemptionCodePaper' both
take a 'V1.ShieldedRedemptionCode' as argument, note that for paper wallets
this must be Base58 encoded, whereas for 'fromRedemptionCode' it must be
Base64 encoded.
------------------------------------------------------------------------------
From kernel to V1 types
------------------------------------------------------------------------------
Invariant: Assuming HD AccountId are valid
NOTE(adn): Perhaps we want the minimum or expected balance here?
| Converts an 'HdRoot' into a V1 'Wallet.
In case the wallet has no spending password, its last update
matches this wallet creation time.
NOTE
In this particular case, the address had to be known by us. As a matter
of informations we can only have if the address is ours (like the
------------------------------------------------------------------------------
Custom errors
------------------------------------------------------------------------------
| Seed is invalid base58 (used for paper wallets)
length would be hard to verify simply by inspecting this text. | # LANGUAGE LambdaCase #
module Cardano.Wallet.WalletLayer.Kernel.Conv (
fromRootId
, fromAccountId
, fromAssuranceLevel
, fromRedemptionCode
, fromRedemptionCodePaper
, toAccountId
, toRootId
, toAccount
, toWallet
, toEosWallet
, toAddress
, toCardanoAddress
, toAssuranceLevel
, toSyncState
, toInputGrouping
, InvalidRedemptionCode(..)
, runExcept
, runExceptT
, withExceptT
, exceptT
) where
import Universum
import qualified Prelude
import Control.Lens (to)
import Control.Monad.Except
import Crypto.Error (CryptoError)
import Data.ByteString.Base58 (bitcoinAlphabet, decodeBase58)
import Formatting (bprint, build, formatToString, sformat, shown, (%))
import qualified Formatting.Buildable
import qualified Serokell.Util.Base64 as B64
import Pos.Client.Txp.Util (InputSelectionPolicy (..))
import Pos.Core (Address, BlockCount (..))
import Pos.Crypto (AesKey, RedeemSecretKey, aesDecrypt,
redeemDeterministicKeyGen)
import Cardano.Mnemonic (mnemonicToAesKey)
import Cardano.Wallet.API.Types.UnitOfMeasure
import qualified Cardano.Wallet.API.V1.Types as V1
import Cardano.Wallet.Kernel.AddressPoolGap (AddressPoolGap)
import Cardano.Wallet.Kernel.CoinSelection.FromGeneric
(InputGrouping (..))
import Cardano.Wallet.Kernel.DB.BlockMeta (addressMetaIsUsed)
import qualified Cardano.Wallet.Kernel.DB.HdRootId as HD
import qualified Cardano.Wallet.Kernel.DB.HdWallet as HD
import Cardano.Wallet.Kernel.DB.InDb (fromDb)
import Cardano.Wallet.Kernel.DB.Spec (cpAddressMeta)
import Cardano.Wallet.Kernel.DB.Spec.Read
import Cardano.Wallet.Kernel.DB.Util.IxSet (ixedIndexed)
import qualified Cardano.Wallet.Kernel.DB.Util.IxSet as IxSet
import Cardano.Wallet.Kernel.Internal (WalletRestorationProgress,
wrpCurrentSlot, wrpTargetSlot, wrpThroughput)
import qualified Cardano.Wallet.Kernel.Read as Kernel
import UTxO.Util (exceptT)
import Cardano . Wallet . WalletLayer ( InvalidRedemptionCode ( .. ) )
sections , but in order to fix it we should change two functions and
# ANN module ( " HLint : ignore Reduce duplication " : : Text ) #
fromRootId :: Monad m => V1.WalletId -> ExceptT Text m HD.HdRootId
fromRootId (V1.WalletId wId) = exceptT $ maybe
(Left $ "fromRootId: can't decode given Text to HdRootId: " <> wId)
Right
(HD.decodeHdRootId wId)
fromAccountId :: Monad m
=> V1.WalletId -> V1.AccountIndex -> ExceptT Text m HD.HdAccountId
fromAccountId wId accIx =
aux <$> fromRootId wId
where
aux :: HD.HdRootId -> HD.HdAccountId
aux hdRootId = HD.HdAccountId hdRootId (HD.HdAccountIx $ V1.getAccIndex accIx)
fromAssuranceLevel :: V1.AssuranceLevel -> HD.AssuranceLevel
fromAssuranceLevel V1.NormalAssurance = HD.AssuranceLevelNormal
fromAssuranceLevel V1.StrictAssurance = HD.AssuranceLevelStrict
fromRedemptionCode :: Monad m
=> V1.ShieldedRedemptionCode
-> ExceptT InvalidRedemptionCode m RedeemSecretKey
fromRedemptionCode (V1.ShieldedRedemptionCode crSeed) = do
bs <- withExceptT (const $ InvalidRedemptionCodeInvalidBase64 crSeed) $
asum [ exceptT $ B64.decode crSeed
, exceptT $ B64.decodeUrl crSeed
]
exceptT $ maybe (Left $ InvalidRedemptionCodeNot32Bytes crSeed) (Right . snd) $
redeemDeterministicKeyGen bs
fromRedemptionCodePaper :: Monad m
=> V1.ShieldedRedemptionCode
-> V1.RedemptionMnemonic
-> ExceptT InvalidRedemptionCode m RedeemSecretKey
fromRedemptionCodePaper (V1.ShieldedRedemptionCode pvSeed)
(V1.RedemptionMnemonic pvBackupPhrase) = do
encBS <- exceptT $ maybe (Left $ InvalidRedemptionCodeInvalidBase58 pvSeed) Right $
decodeBase58 bitcoinAlphabet $ encodeUtf8 pvSeed
decBS <- withExceptT InvalidRedemptionCodeCryptoError $ exceptT $
aesDecrypt encBS aesKey
exceptT $ maybe (Left $ InvalidRedemptionCodeNot32Bytes pvSeed) (Right . snd) $
redeemDeterministicKeyGen decBS
where
aesKey :: AesKey
aesKey = mnemonicToAesKey pvBackupPhrase
toAccountId :: HD.HdAccountId -> V1.AccountIndex
toAccountId =
. HD.getHdAccountIx
. view HD.hdAccountIdIx
toRootId :: HD.HdRootId -> V1.WalletId
toRootId = V1.WalletId . sformat build
| Converts a ' HdAccount ' into a V1 ' Account ' .
toAccount :: Kernel.DB -> HD.HdAccount -> V1.Account
toAccount snapshot account = V1.Account {
accIndex = accountIndex
, accAddresses = map (toAddress account . view ixedIndexed) addresses
, accAmount = V1.WalletCoin accountAvailableBalance
, accName = account ^. HD.hdAccountName . to HD.getAccountName
, accWalletId = V1.WalletId (sformat build hdRootId)
}
where
accountAvailableBalance = account ^. HD.hdAccountState . HD.hdAccountStateCurrent (to cpAvailableBalance)
hdAccountId = account ^. HD.hdAccountId
accountIndex = toAccountId (account ^. HD.hdAccountId)
hdAddresses = Kernel.addressesByAccountId snapshot hdAccountId
addresses = IxSet.toList hdAddresses
hdRootId = account ^. HD.hdAccountId . HD.hdAccountIdParent
toWallet :: Kernel.DB -> HD.HdRoot -> V1.Wallet
toWallet db hdRoot = V1.Wallet {
walId = (V1.WalletId walletId)
, walName = hdRoot ^. HD.hdRootName
. to HD.getWalletName
, walBalance = V1.WalletCoin (Kernel.rootTotalBalance db rootId)
, walHasSpendingPassword = hasSpendingPassword
, walSpendingPasswordLastUpdate = V1.WalletTimestamp lastUpdate
, walCreatedAt = V1.WalletTimestamp createdAt
, walAssuranceLevel = v1AssuranceLevel
, walSyncState = V1.Synced
}
where
(hasSpendingPassword, lastUpdate) =
case hdRoot ^. HD.hdRootHasPassword of
HD.NoSpendingPassword lr -> (False, lr ^. fromDb)
HD.HasSpendingPassword lu -> (True, lu ^. fromDb)
rootId = hdRoot ^. HD.hdRootId
createdAt = hdRoot ^. HD.hdRootCreatedAt . fromDb
walletId = sformat build rootId
v1AssuranceLevel = toAssuranceLevel $ hdRoot ^. HD.hdRootAssurance
| Converts an ' HdRoot ' into a V1 ' EosWallet ' .
toEosWallet :: Kernel.DB -> HD.HdRoot -> AddressPoolGap -> V1.EosWallet
toEosWallet db hdRoot gap = V1.EosWallet {
eoswalId = V1.WalletId walletId
, eoswalName = hdRoot ^. HD.hdRootName . to HD.getWalletName
, eoswalAddressPoolGap = gap
, eoswalBalance = V1.WalletCoin (Kernel.rootTotalBalance db rootId)
, eoswalCreatedAt = V1.WalletTimestamp createdAt
, eoswalAssuranceLevel = v1AssuranceLevel
}
where
rootId = hdRoot ^. HD.hdRootId
createdAt = hdRoot ^. HD.hdRootCreatedAt . fromDb
walletId = sformat build rootId
v1AssuranceLevel = toAssuranceLevel $ hdRoot ^. HD.hdRootAssurance
toAssuranceLevel :: HD.AssuranceLevel -> V1.AssuranceLevel
toAssuranceLevel HD.AssuranceLevelNormal = V1.NormalAssurance
toAssuranceLevel HD.AssuranceLevelStrict = V1.StrictAssurance
| Converts a Kernel ' HdAddress ' into a V1 ' WalletAddress ' .
toAddress :: HD.HdAccount -> HD.HdAddress -> V1.WalletAddress
toAddress acc hdAddress =
V1.WalletAddress (V1.WalAddress cardanoAddress)
(addressMeta ^. addressMetaIsUsed)
addressOwnership
where
cardanoAddress = hdAddress ^. HD.hdAddressAddress . fromDb
addressMeta = acc ^. HD.hdAccountState . HD.hdAccountStateCurrentCombined (<>) (cpAddressMeta cardanoAddress)
of fact , to construct a ' WalletAddress ' , we have to be aware of pieces
parent 's account derivation index required to build the ' HdAddress ' in
a first place ) !
addressOwnership = V1.AddressIsOurs
| Converts a Kernel ' HdAddress ' into a Cardano ' Address ' .
toCardanoAddress :: HD.HdAddress -> Address
toCardanoAddress hdAddr = hdAddr ^. HD.hdAddressAddress . fromDb
data InvalidRedemptionCode =
| Seed is invalid base64(url ) ( used for non - paper wallets )
InvalidRedemptionCodeInvalidBase64 Text
| InvalidRedemptionCodeInvalidBase58 Text
| AES decryption error ( for paper wallets )
| InvalidRedemptionCodeCryptoError CryptoError
| Seed is not 32 - bytes long ( for either paper or non - paper wallets )
NOTE : For paper wallets the seed is actually AES encrypted so the
| InvalidRedemptionCodeNot32Bytes Text
instance Buildable InvalidRedemptionCode where
build (InvalidRedemptionCodeInvalidBase64 txt) =
bprint ("InvalidRedemptionCodeInvalidBase64 " % build) txt
build (InvalidRedemptionCodeInvalidBase58 txt) =
bprint ("InvalidRedemptionCodeInvalidBase58 " % build) txt
build (InvalidRedemptionCodeCryptoError err) =
bprint ("InvalidRedemptionCodeCryptoError " % shown) err
build (InvalidRedemptionCodeNot32Bytes txt) =
bprint ("InvalidRedemptionCodeNot32Bytes " % build) txt
instance Show InvalidRedemptionCode where
show = formatToString build
| Calculate the ' SyncState ' from data about the wallet 's restoration .
toSyncState :: Maybe WalletRestorationProgress -> V1.SyncState
toSyncState = \case
Nothing -> V1.Synced
Just info -> let MeasuredIn (BlockCount blocksPerSec) = info ^. wrpThroughput
in V1.Restoring $
V1.SyncProgress
{ spEstimatedCompletionTime =
let blocksToGo = (info ^. wrpTargetSlot) - (info ^. wrpCurrentSlot)
bps = max blocksPerSec 1
in V1.mkEstimatedCompletionTime (fromIntegral ((1000 * blocksToGo) `div` bps))
, spThroughput = V1.mkSyncThroughput (BlockCount blocksPerSec)
, spPercentage =
let tgtSlot = info ^. wrpTargetSlot
pct = if tgtSlot /= 0
then (100 * (info ^. wrpCurrentSlot)) `div` tgtSlot
else 0
in V1.mkSyncPercentage (fromIntegral pct)
}
Matches the input with the 's ' InputGrouping '
toInputGrouping :: V1.WalletInputSelectionPolicy -> InputGrouping
toInputGrouping (V1.WalletInputSelectionPolicy policy) = case policy of
OptimizeForSecurity -> PreferGrouping
OptimizeForHighThroughput -> IgnoreGrouping
|
96c9bfb1c3efde3b73831cdf046d975b2ca9e2cc9d5e6ea63089d8ed6d48c6a2 | pedestal/pedestal | service_test.clj | Copyright 2014 - 2022 Cognitect , Inc.
; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( )
; which can be found in the file epl-v10.html at the root of this distribution.
;
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
;
; You must not remove this notice, or any other, from this software.
(ns gzip.service-test
(:require [clj-http.client :as http-cl]
[clojure.test :refer :all]
[io.pedestal.test :refer :all]
[io.pedestal.http :as http]
[io.pedestal.http.jetty :as jetty]
[io.pedestal.http.servlet :as servlet]
[io.pedestal.http.impl.servlet-interceptor :as incept]
[gzip.service :as service]
[gzip.server :as server]))
(def service
(::http/service-fn (http/create-servlet service/service)))
(defn jetty-server
[app opts]
(prn :jetty-server :enter)
(let [options (assoc opts :join? false)
service-fn (incept/http-interceptor-service-fn [app])
jetty-server (servlet/servlet :service service-fn)]
(prn :jetty-server :building)
(jetty/server jetty-server opts)))
(defn get-response [addy]
(try
(http-cl/get addy)
(catch clojure.lang.ExceptionInfo ex
(prn "BOOM!\n\n")
(prn ex)
{:body "BOOM"})))
(deftest home-page-test
(let [jetty (server/run-dev)
response (get-response ":8080")
_ (http/stop jetty)]
(testing "service response"
(is (=
(:body response)
"Hello World!")))
(testing "gzip-encoded"
(is (.startsWith
(:orig-content-encoding response)
"gzip")))))
| null | https://raw.githubusercontent.com/pedestal/pedestal/53bfe70143a22cdfd2f0d183023334a199c9e9a2/samples/servlet-filters-gzip/test/gzip/service_test.clj | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright 2014 - 2022 Cognitect , Inc.
Eclipse Public License 1.0 ( )
(ns gzip.service-test
(:require [clj-http.client :as http-cl]
[clojure.test :refer :all]
[io.pedestal.test :refer :all]
[io.pedestal.http :as http]
[io.pedestal.http.jetty :as jetty]
[io.pedestal.http.servlet :as servlet]
[io.pedestal.http.impl.servlet-interceptor :as incept]
[gzip.service :as service]
[gzip.server :as server]))
(def service
(::http/service-fn (http/create-servlet service/service)))
(defn jetty-server
[app opts]
(prn :jetty-server :enter)
(let [options (assoc opts :join? false)
service-fn (incept/http-interceptor-service-fn [app])
jetty-server (servlet/servlet :service service-fn)]
(prn :jetty-server :building)
(jetty/server jetty-server opts)))
(defn get-response [addy]
(try
(http-cl/get addy)
(catch clojure.lang.ExceptionInfo ex
(prn "BOOM!\n\n")
(prn ex)
{:body "BOOM"})))
(deftest home-page-test
(let [jetty (server/run-dev)
response (get-response ":8080")
_ (http/stop jetty)]
(testing "service response"
(is (=
(:body response)
"Hello World!")))
(testing "gzip-encoded"
(is (.startsWith
(:orig-content-encoding response)
"gzip")))))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.